code stringlengths 281 23.7M |
|---|
def test_grid_inactivate_outside(tmpdir):
g1 = xtgeo.grid_from_file(reekgrid)
p1 = xtgeo.polygons_from_file(reekpoly)
act1 = g1.get_actnum().values3d
n1 = act1[(3, 56, 1)]
assert (n1 == 1)
try:
g1.inactivate_outside(p1, layer_range=(1, 4))
except RuntimeError as rw:
print(rw)
g1.to_file(os.path.join(tmpdir, 'reek_inact_out_pol.roff'))
act2 = g1.get_actnum().values3d
n2 = act2[(3, 56, 1)]
assert (n2 == 0)
assert (int(act1[(20, 38, 4)]) == int(act2[(20, 38, 4)])) |
class AtomicWriteTestCase(unittest.TestCase):
initial_content = 'Initial Content'
new_content = 'New Content'
def setUp(self):
with tempfile.NamedTemporaryFile('wt', delete=False) as tf:
tf.write(self.initial_content)
self.input_file = tf.name
def test_exception_leaves_unchanged(self):
try:
with common.atomic_write(self.input_file) as tf:
raise IOError()
except IOError:
with open(self.input_file) as fp:
self.assertEqual(self.initial_content, fp.read())
self.assertFalse(os.path.exists(tf.name))
def test_write(self):
with common.atomic_write(self.input_file) as fp:
self.assertNotEqual(self.input_file, fp.name)
fp.write(self.new_content)
self.assertFalse(os.path.exists(fp.name))
with open(self.input_file) as fp:
self.assertEqual(self.new_content, fp.read())
def tearDown(self):
os.remove(self.input_file) |
class OptionSeriesDumbbellSonificationDefaultspeechoptionsPointgrouping(Options):
def algorithm(self):
return self._config_get('last')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def check_error(esp, efuses, args):
error_in_blocks = get_error_summary(efuses)
if (args.recovery and error_in_blocks):
confirmed = False
for block in reversed(efuses.blocks):
if (block.fail or (block.num_errors > 0)):
if (not block.get_bitstring().all(False)):
block.save(block.get_bitstring().bytes[::(- 1)])
if (not confirmed):
confirmed = True
efuses.confirm('Recovery of block coding errors', args.do_not_confirm)
block.burn()
if confirmed:
efuses.update_efuses()
error_in_blocks = get_error_summary(efuses)
if error_in_blocks:
raise esptool.FatalError('Error(s) were detected in eFuses')
print('No errors detected') |
.param_file((FIXTURE_PATH / 'tables.md'))
def test_tables(file_params, sphinx_doctree_no_tr: CreateDoctree):
sphinx_doctree_no_tr.set_conf({'extensions': ['myst_parser']})
result = sphinx_doctree_no_tr(file_params.content, 'index.md')
file_params.assert_expected(result.pformat('index'), rstrip_lines=True) |
def execute(request, owner_id, cmd, stdin=None, meta=None, callback=None, lock=None, lock_timeout=None, queue=None, expires=EXPIRES, tt=TT_EXEC, tg=TG_DC_BOUND, nolog=False, ping_worker=True, check_user_tasks=True, block_key=None):
task_id = task_id_from_request(request, owner_id=owner_id, tt=tt, tg=tg)
task = ('Task %s[%s]("%s")' % (_execute.name, task_id, cmd))
lock_key = lock
lock_acquired = False
if (meta is None):
meta = {}
if (ping_worker and queue):
if ((callback is not False) and (queue != Q_MGMT)):
queues = [queue, Q_MGMT]
else:
queues = [queue]
for q in queues:
if (not ping(q, timeout=ping_worker, count=2)):
return (None, ('Task queue worker (%s) is not responding!' % queue_to_hostnames(q)))
try:
if lock_key:
if (lock_timeout is None):
lock_timeout = expires
lock_key = (KEY_PREFIX + lock)
task_lock = TaskLock(lock_key, desc=task)
lock_acquired = task_lock.acquire(task_id, timeout=lock_timeout)
if (not lock_acquired):
return (task_id, 'Task did not acquire lock')
meta['nolog'] = nolog
args = (cmd, stdin)
kwargs = {'meta': meta, 'callback': callback, 'lock': lock_key, 'block': block_key, 'check_user_tasks': check_user_tasks}
task = _execute.apply_async(args=args, kwargs=kwargs, queue=queue, task_id=task_id, expires=expires, add_to_parent=False)
except Exception as e:
logger.exception(e)
logger.error('%s could not be created (%s)', task, e)
if lock_acquired:
task_lock.delete(fail_silently=True, premature=True)
return (None, e)
else:
if nolog:
logger.debug('%s created', task)
else:
logger.info('%s created', task)
return (task.id, None) |
class SnapshotPluginBuilder():
_out_file_path = None
_interim_file_path = None
def __init__(self, snapshot_plugin_dir):
try:
if (not os.path.exists(snapshot_plugin_dir)):
os.makedirs(snapshot_plugin_dir)
else:
self.check_and_delete_plugins_tar_files(snapshot_plugin_dir)
self._out_file_path = snapshot_plugin_dir
self._interim_file_path = snapshot_plugin_dir
except (OSError, Exception) as ex:
_LOGGER.error(ex, 'Error in initializing SnapshotPluginBuilder class.')
raise RuntimeError(str(ex))
async def build(self):
def reset(tarinfo):
tarinfo.uid = tarinfo.gid = 0
tarinfo.uname = tarinfo.gname = 'root'
return tarinfo
tar_file_name = ''
try:
snapshot_id = str(int(time.time()))
snapshot_filename = '{}-{}.tar.gz'.format(SNAPSHOT_PREFIX, snapshot_id)
tar_file_name = '{}/{}'.format(self._out_file_path, snapshot_filename)
pyz = tarfile.open(tar_file_name, 'w:gz')
try:
pyz.add('{}/python/fledge/plugins'.format(_FLEDGE_ROOT), arcname='python/fledge/plugins', recursive=True)
if (path.exists('{}/bin'.format(_FLEDGE_ROOT)) and path.exists('{}/bin/fledge'.format(_FLEDGE_ROOT))):
pyz.add('{}/plugins'.format(_FLEDGE_ROOT), arcname='plugins', recursive=True, filter=reset)
else:
pyz.add('{}/C/plugins'.format(_FLEDGE_ROOT), arcname='C/plugins', recursive=True)
pyz.add('{}/plugins'.format(_FLEDGE_ROOT), arcname='plugins', recursive=True)
pyz.add('{}/cmake_build/C/plugins'.format(_FLEDGE_ROOT), arcname='cmake_build/C/plugins', recursive=True)
finally:
pyz.close()
except Exception as ex:
if os.path.isfile(tar_file_name):
os.remove(tar_file_name)
_LOGGER.error(ex, 'Error in creating Snapshot .tar.gz file.')
raise RuntimeError(str(ex))
self.check_and_delete_temp_files(self._interim_file_path)
self.check_and_delete_plugins_tar_files(self._out_file_path)
_LOGGER.info('Snapshot %s successfully created.', tar_file_name)
return (snapshot_id, snapshot_filename)
def check_and_delete_plugins_tar_files(self, snapshot_plugin_dir):
valid_extension = '.tar.gz'
valid_files_to_delete = dict()
try:
for (root, dirs, files) in os.walk(snapshot_plugin_dir):
for _file in files:
if _file.endswith(valid_extension):
valid_files_to_delete[_file.split('.')[0]] = os.path.join(root, _file)
valid_files_to_delete_sorted = OrderedDict(sorted(valid_files_to_delete.items(), reverse=True))
while (len(valid_files_to_delete_sorted) > _NO_OF_FILES_TO_RETAIN):
(_file, _path) = valid_files_to_delete_sorted.popitem()
_LOGGER.warning('Removing plugin snapshot file %s.', _path)
os.remove(_path)
except OSError as ex:
_LOGGER.error(ex, 'ERROR while deleting plugin file.')
def check_and_delete_temp_files(self, snapshot_plugin_dir):
for f in os.listdir(snapshot_plugin_dir):
if (not fnmatch.fnmatch(f, '{}*.tar.gz'.format(SNAPSHOT_PREFIX))):
os.remove(os.path.join(snapshot_plugin_dir, f))
def write_to_tar(self, pyz, temp_file, data):
with open(temp_file, 'w') as outfile:
json.dump(data, outfile, indent=4)
pyz.add(temp_file, arcname=basename(temp_file))
def extract_files(self, pyz):
if (path.exists('{}/bin'.format(_FLEDGE_ROOT)) and path.exists('{}/bin/fledge'.format(_FLEDGE_ROOT))):
cmd = '{}/extras/C/cmdutil tar-extract {}'.format(_FLEDGE_ROOT, pyz)
retcode = os.system(cmd)
if (retcode != 0):
raise OSError('Error {}: {}'.format(retcode, cmd))
return True
else:
try:
with tarfile.open(pyz, 'r:gz') as tar:
tar.extractall(path=_FLEDGE_ROOT, members=tar.getmembers())
except Exception as ex:
raise RuntimeError('Extraction error for snapshot {}. {}'.format(pyz, str(ex)))
else:
return True |
class DE2000(DeltaE):
NAME = '2000'
LAB = 'lab-d65'
G_CONST = (25 ** 7)
def distance(cls, color: 'Color', sample: 'Color', kl: float=1, kc: float=1, kh: float=1, **kwargs: Any) -> float:
(l1, a1, b1) = alg.no_nans(color.convert(cls.LAB)[:(- 1)])
(l2, a2, b2) = alg.no_nans(sample.convert(cls.LAB)[:(- 1)])
c1 = math.sqrt(((a1 ** 2) + (b1 ** 2)))
c2 = math.sqrt(((a2 ** 2) + (b2 ** 2)))
cm = ((c1 + c2) / 2)
c7 = (cm ** 7)
g = (0.5 * (1 - math.sqrt((c7 / (c7 + cls.G_CONST)))))
ap1 = ((1 + g) * a1)
ap2 = ((1 + g) * a2)
cp1 = math.sqrt(((ap1 ** 2) + (b1 ** 2)))
cp2 = math.sqrt(((ap2 ** 2) + (b2 ** 2)))
hp1 = (0 if ((ap1 == 0) and (b1 == 0)) else math.atan2(b1, ap1))
hp2 = (0 if ((ap2 == 0) and (b2 == 0)) else math.atan2(b2, ap2))
hp1 = math.degrees(((hp1 + (2 * math.pi)) if (hp1 < 0.0) else hp1))
hp2 = math.degrees(((hp2 + (2 * math.pi)) if (hp2 < 0.0) else hp2))
dl = (l1 - l2)
dc = (cp1 - cp2)
hdiff = (hp1 - hp2)
if ((cp1 * cp2) == 0.0):
dh = 0.0
elif (abs(hdiff) <= 180.0):
dh = hdiff
else:
offset = ((- 360) if (hdiff > 180.0) else 360)
dh = (hdiff + offset)
dh = ((2 * math.sqrt((cp2 * cp1))) * math.sin(math.radians((dh / 2))))
lpm = ((l1 + l2) / 2)
cpm = ((cp1 + cp2) / 2)
hsum = (hp1 + hp2)
if ((cp1 * cp2) == 0):
hpm = hsum
elif (abs((hp1 - hp2)) > 180):
offset = (360 if (hsum < 360) else (- 360))
hpm = ((hsum + offset) / 2)
else:
hpm = (hsum / 2)
t = ((((1 - (0.17 * math.cos(math.radians((hpm - 30))))) + (0.24 * math.cos(math.radians((2 * hpm))))) + (0.32 * math.cos(math.radians(((3 * hpm) + 6))))) - (0.2 * math.cos(math.radians(((4 * hpm) - 63)))))
dt = (30 * math.exp(((- 1) * (((hpm - 275) / 25) ** 2))))
cpm7 = (cpm ** 7)
rc = (2 * math.sqrt((cpm7 / (cpm7 + cls.G_CONST))))
l_temp = ((lpm - 50) ** 2)
sl = (1 + ((0.015 * l_temp) / math.sqrt((20 + l_temp))))
sc = (1 + (0.045 * cpm))
sh = (1 + ((0.015 * cpm) * t))
rt = (((- 1) * math.sin(math.radians((2 * dt)))) * rc)
return math.sqrt((((((dl / (kl * sl)) ** 2) + ((dc / (kc * sc)) ** 2)) + ((dh / (kh * sh)) ** 2)) + ((rt * (dc / (kc * sc))) * (dh / (kh * sh))))) |
def extractAzurro(item):
if (not ('translation project' in item['tags'])):
return None
if (not ('review' in item['tags'])):
return None
if ('preview' in item['title'].lower()):
return None
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if (not (chp or vol)):
return None
if ('A Naive Short-tempered Girl' in item['tags']):
return buildReleaseMessageWithType(item, 'A Naive Short-tempered Girl', vol, chp, frag=frag, postfix=postfix)
if ('Substitute Bride' in item['tags']):
return buildReleaseMessageWithType(item, 'Substitute Bride', vol, chp, frag=frag, postfix=postfix)
if ('Husband is Great Black Belly ()' in item['tags']):
return buildReleaseMessageWithType(item, 'Husband is Great Black Belly', vol, chp, frag=frag, postfix=postfix)
if ("The CEO's Pregnant Wife ()" in item['tags']):
return buildReleaseMessageWithType(item, "The CEO's Pregnant Wife", vol, chp, frag=frag, postfix=postfix)
if ('The Wolf Husband and The Green Plum Wife ()' in item['tags']):
return buildReleaseMessageWithType(item, 'The Wolf Husband and The Green Plum Wife', vol, chp, frag=frag, postfix=postfix)
return False |
def normalize_selection(*args, **kwargs):
from climetlab.arguments.transformers import ALL
_kwargs = {}
for a in args:
if (a is None):
continue
if isinstance(a, dict):
_kwargs.update(a)
continue
raise ValueError(f'Cannot make a selection with {a}')
_kwargs.update(kwargs)
for (k, v) in _kwargs.items():
assert ((v is None) or (v is ALL) or callable(v) or isinstance(v, (list, tuple, set)) or isinstance(v, (str, int, float, datetime.datetime))), f'Unsupported type: {type(v)} for key {k}'
return _kwargs |
def load_prices_json(tdb, tdenv, jsonText):
data = json.loads(jsonText)
sysData = data['sys']
sysName = sysData['name']
pos = sysData['pos']
stnData = data['stn']
stnName = stnData['name']
lsFromStar = stnData['ls']
try:
blackMarket = stnData['bm'].upper()
if (not (blackMarket in ['Y', 'N'])):
blackMarket = '?'
except KeyError:
blackMarket = '?'
system = lookup_system(tdb, tdenv, sysName, pos[0], pos[1], pos[2])
if (not system):
if (not tdenv.ignoreUnknown):
raise UnknownSystemError(sysName)
if (not tdenv.quiet):
print('NOTE: Ignoring unknown system: {} [{},{},{}]'.format(sysName, pos[0], pos[1], pos[2]))
return
if ((system.dbname != sysName) and tdenv.detail):
print("NOTE: Treating '{}' as '{}'".format(name, system.dbname))
tdenv.DEBUG1('- System: {}', system.dbname)
station = lookup_station(tdb, tdenv, system, stnName, lsFromStar, blackMarket)
if (not station):
if tdenv.ignoreUnknown:
raise UnknownStationError(stnName)
if (not tdenv.quiet):
print('NOTE: Ignoring unknown station: {}/{}'.format(sysName.upper(), stnName))
return
tdenv.DEBUG1('- Station: {}', station.dbname) |
class ConsoleButton(ToggleButton):
def __init__(self, parent):
super().__init__(parent, 'Console', 'c')
self.console = self.root.main.console
def toggle_on(self):
self.console.config(height=3)
return True
def toggle_off(self):
self.console.config(height=1) |
class OptionSeriesArearangeSonificationTracksMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_volume_anomalies_with_seasonality(test_id: str, dbt_project: DbtProject):
utc_today = datetime.utcnow().date()
dates = generate_dates(base_date=(utc_today - timedelta(days=1)), step=timedelta(weeks=1), days_back=(7 * 14))
data = [{TIMESTAMP_COLUMN: cur_date.strftime(DATE_FORMAT)} for cur_date in dates if (cur_date < (utc_today - timedelta(weeks=1)))]
test_result = dbt_project.test(test_id, DBT_TEST_NAME, DBT_TEST_ARGS, data=data)
assert (test_result['status'] == 'pass')
test_args = {**DBT_TEST_ARGS, 'seasonality': 'day_of_week'}
test_result = dbt_project.test(test_id, DBT_TEST_NAME, test_args)
assert (test_result['status'] == 'fail') |
_toolkit([ToolkitName.qt])
class TestPanelLayout(unittest.TestCase):
def test_scrollable_group_typical(self):
from pyface.qt import QtGui
example = ScrollableGroupExample()
ui = example.edit_traits(view=scrollable_group_view)
try:
mainwindow = ui.control.layout().itemAt(0).widget()
scroll_area = mainwindow.centralWidget()
self.assertIsInstance(scroll_area, QtGui.QScrollArea)
content = scroll_area.widget()
self.assertEqual(type(content), QtGui.QWidget)
finally:
ui.dispose()
def test_scrollable_group_box(self):
from pyface.qt import QtGui
example = ScrollableGroupExample()
ui = example.edit_traits(view=scrollable_group_box_view)
try:
mainwindow = ui.control.layout().itemAt(0).widget()
scroll_area = mainwindow.centralWidget()
self.assertIsInstance(scroll_area, QtGui.QScrollArea)
group_box = scroll_area.widget()
self.assertIsInstance(group_box, QtGui.QGroupBox)
self.assertEqual(group_box.title(), 'Scrollable View')
finally:
ui.dispose()
def test_scrollable_labelled_group(self):
from pyface.qt import QtGui
example = ScrollableGroupExample()
ui = example.edit_traits(view=scrollable_labelled_group_view)
try:
mainwindow = ui.control.layout().itemAt(0).widget()
scroll_area = mainwindow.centralWidget()
self.assertIsInstance(scroll_area, QtGui.QScrollArea)
content = scroll_area.widget()
self.assertEqual(type(content), QtGui.QWidget)
finally:
ui.dispose()
def test_non_scrollable_group_typical(self):
from pyface.qt import QtGui
example = ScrollableGroupExample(my_str='The group is not scrollable')
ui = example.edit_traits(view=non_scrollable_group_view)
try:
mainwindow = ui.control.layout().itemAt(0).widget()
content = mainwindow.centralWidget()
self.assertEqual(type(content), QtGui.QWidget)
finally:
ui.dispose() |
def configure_ankihabitica():
if ah.user_settings['keep_log']:
ah.log.debug('Begin function')
if os.path.exists(ah.conffile):
read_conf_file(ah.conffile)
else:
ah.settings.configured = False
if ah.user_settings['keep_log']:
ah.log.debug('End function') |
def test_footballmatch_module_kickoff_time(lfs_match, monkeypatch):
class MockDatetime(datetime):
def now(cls, *args, **kwargs):
return cls(2021, 11, 6, 13, 45, 0, tzinfo=UTC())
monkeypatch.setattr('qtile_extras.resources.footballscores.FootballMatch.is_fixture', (lambda _: True))
monkeypatch.setattr('qtile_extras.resources.footballscores.footballmatch.datetime', MockDatetime)
che = lfs_match('Chelsea')
assert (che.format_time_to_kick_off('{h}:{m}') == '1:15') |
class Template():
def __init__(self, raw_template):
self.raw_template = raw_template
self.name = self.raw_template['cn']
self.ra_signiture = self.raw_template['msPKI-RA-Signature']
self.schema_version = self.raw_template['msPKI-Template-Schema-Version']
self.cert_app_policy_friendly = []
self.EKUs_friendly = []
self.cert_name_flags_friendly = []
self.enrollment_flags_friendly = []
self.private_key_flags_friendly = []
self.get_EKUs('pKIExtendedKeyUsage', self.EKUs_friendly)
self.get_EKUs('msPKI-Certificate-Application-Policy', self.cert_app_policy_friendly)
self.get_flags('msPKI-Certificate-Name-Flag', MS_PKI_CERTIFICATE_NAME_FLAGS, self.cert_name_flags_friendly)
self.get_flags('msPKI-Enrollment-Flag', MS_PKI_ENROLLMENT_FLAGS, self.enrollment_flags_friendly)
def get_EKUs(self, attribute, result):
try:
for eku in self.raw_template[attribute]:
result.append(OID_TO_STR_MAP[eku])
except KeyError:
pass
def get_flags(self, attribute, PKI_DICT, result):
flags = self.raw_template.entry_raw_attribute(attribute)[0]
for (key, val) in PKI_DICT.items():
if ((int(flags) & key) == key):
result.append(val)
def print_template(self):
print(template_str.format(self.name, self.schema_version, self.join_list(self.cert_name_flags_friendly), self.join_list(self.enrollment_flags_friendly), self.ra_signiture, self.join_list(self.EKUs_friendly), self.join_list(self.cert_app_policy_friendly)))
def join_list(self, arr):
return ', '.join([str(word) for word in arr]) |
class PosConsensus(ConsensusAPI):
def __init__(self, base_db: AtomicDatabaseAPI) -> None:
pass
def validate_seal(self, header: BlockHeaderAPI) -> None:
pass
def validate_seal_extension(self, header: BlockHeaderAPI, parents: Iterable[BlockHeaderAPI]) -> None:
pass
def get_fee_recipient(cls, header: BlockHeaderAPI) -> Address:
return header.coinbase |
def get_sector_data():
global sector_data
global allStockData
sectors = allStockData['Sector'].unique()
metrics = allStockData.columns[7:(- 3)]
for sector in sectors:
rows = allStockData.loc[(allStockData['Sector'] == sector)]
for metric in metrics:
rows[metric] = rows[metric].str.rstrip('%')
rows[metric] = pd.to_numeric(rows[metric], errors='coerce')
data = remove_outliers(rows[metric], 2)
sector_data[sector][metric]['Median'] = data.median(skipna=True)
sector_data[sector][metric]['10Pct'] = data.quantile(0.1)
sector_data[sector][metric]['90Pct'] = data.quantile(0.9)
sector_data[sector][metric]['Std'] = (np.std(data, axis=0) / 5) |
class Poupanca():
def __init__(self):
self.query_url = '
self.acesso = AcessarBancoCentral(self.query_url)
self.req = self.acesso.getURL()
def get_poupanca_tax(self):
poupanca = self.acesso.cleanContent(self.req.content.decode('utf-8'))
tax = re.search('<div id=value>(\\d*[\\.\\,]?\\d+)</div>', poupanca)
if (not tax):
raise DataCotacaoNotFound
return float(tax.group(1).replace(',', '.')) |
class TestRun():
def __init__(self, test_config):
self._config = test_config
self._devices = {}
self._test_class_instance = None
self._exceptions = []
self._output_directory = None
self._ran = False
def name(self):
return self._config['name']
def config(self):
return self._config
def ran(self):
return self._ran
def set_ran(self):
self._ran = True
def test_class_instance(self):
return self._test_class_instance
def set_test_class_instance(self, test_class_instance):
self._test_class_instance = test_class_instance
def output_directory(self):
return self._output_directory
def set_output_directory(self, output_directory):
self._output_directory = output_directory
def devices(self):
return self._devices
def set_devices(self, devices):
self._devices = devices
def register_exception(self, step, exception):
self._exceptions.append((step, exception))
def passed(self):
return (self.ran() and (len(self._exceptions) == 0))
def failed(self):
return (self.ran() and (len(self._exceptions) != 0) and isinstance(self._exceptions[0][1], LeakTestFail))
def errored(self):
return (self.ran() and (not self.passed()) and (not self.failed()))
def exceptions(self):
return self._exceptions
def user_interrupt(self):
for (_, ex) in self._exceptions:
if isinstance(ex, KeyboardInterrupt):
return True
return False |
class _coconut_has_iter(_coconut_baseclass):
__slots__ = ('iter',)
def __new__(cls, iterable):
self = _coconut.super(_coconut_has_iter, cls).__new__(cls)
self.iter = iterable
return self
def get_new_iter(self):
self.iter = reiterable(self.iter)
return self.iter
def __fmap__(self, func):
return map(func, self) |
def tile(proc, consumer, old_i_iter, old_j_iter, new_i_iters, new_j_iters, i_tile_size, j_tile_size, perfect=True):
consumer_assign = proc.find(f'{consumer}[_] = _')
i_loop = _PC.get_enclosing_loop(consumer_assign, old_i_iter)
j_loop = _PC.get_enclosing_loop(consumer_assign, old_j_iter)
assert (j_loop.parent() == i_loop)
proc = divide_loop(proc, i_loop, i_tile_size, new_i_iters, perfect=perfect)
proc = divide_loop(proc, j_loop, j_tile_size, new_j_iters, perfect=perfect)
proc = reorder_loops(proc, f'{new_i_iters[1]} {new_j_iters[0]}')
return proc |
class CassandraInstrumentation(AbstractInstrumentedModule):
name = 'cassandra'
instrument_list = [('cassandra.cluster', 'Session.execute'), ('cassandra.cluster', 'Cluster.connect')]
def call(self, module, method, wrapped, instance, args, kwargs):
name = self.get_wrapped_name(wrapped, instance, method)
context = {}
if (method == 'Cluster.connect'):
span_action = 'connect'
if hasattr(instance, 'contact_points_resolved'):
host = instance.contact_points_resolved[0]
port = instance.port
else:
host = instance.endpoints_resolved[0].address
port = instance.endpoints_resolved[0].port
keyspace: Optional[str] = (args[0] if args else kwargs.get('keyspace'))
if keyspace:
context['db'] = {'instance': keyspace}
else:
hosts = list(instance.hosts)
if hasattr(hosts[0], 'endpoint'):
host = hosts[0].endpoint.address
port = hosts[0].endpoint.port
else:
host = hosts[0].address
port = instance.cluster.port
db_context = {}
if instance.keyspace:
db_context['instance'] = instance.keyspace
span_action = 'query'
query = (args[0] if args else kwargs.get('query'))
if hasattr(query, 'query_string'):
query_str = query.query_string
elif (hasattr(query, 'prepared_statement') and hasattr(query.prepared_statement, 'query')):
query_str = query.prepared_statement.query
elif isinstance(query, str):
query_str = query
else:
query_str = None
if query_str:
name = extract_signature(query_str)
db_context.update({'type': 'sql', 'statement': query_str})
if db_context:
context['db'] = db_context
context['destination'] = {'address': host, 'port': port}
with capture_span(name, span_type='db', span_subtype='cassandra', span_action=span_action, extra=context, leaf=True):
return wrapped(*args, **kwargs) |
def _trace_argument_adapter(t: Union[(LabeledTrace, Trace, TraceDir)], default_label: str) -> LabeledTrace:
lt: LabeledTrace
if isinstance(t, TraceDir):
lt = LabeledTrace(label=default_label, trace_dir=t)
elif isinstance(t, Trace):
lt = LabeledTrace(label=default_label, t=t)
elif isinstance(t, LabeledTrace):
lt = t
else:
raise ValueError(f'Invalid argument type for ({t}).')
return lt |
def clusters_from_partitions(partitions, reference, options):
clusters_final = []
large_partitions = 0
duplicate_signatures = 0
seed(1524)
for partition in partitions:
if (len(partition) > 100):
partition_sample = sample(partition, 100)
large_partitions += 1
else:
partition_sample = partition
element_type = partition_sample[0].type
assert (element_type in ['DEL', 'DUP_TAN', 'INV', 'INS', 'DUP_INT', 'BND'])
if (element_type == 'INV'):
partition_sample_without_duplicates = partition_sample
else:
duplicates_from_same_read = set()
for i in range((len(partition_sample) - 1)):
for j in range((i + 1), len(partition_sample)):
if ((partition_sample[i].read == partition_sample[j].read) and (span_position_distance(partition_sample[i], partition_sample[j], element_type, reference, options.position_distance_normalizer, options.edit_distance_normalizer, options.cluster_max_distance) <= options.cluster_max_distance)):
duplicates_from_same_read.add(j)
duplicate_signatures += len(duplicates_from_same_read)
partition_sample_without_duplicates = [partition_sample[i] for i in range(len(partition_sample)) if (i not in duplicates_from_same_read)]
if (len(partition_sample_without_duplicates) == 1):
clusters_final.append([partition_sample_without_duplicates[0]])
continue
distances = []
if (element_type == 'INV'):
for i in range((len(partition_sample_without_duplicates) - 1)):
for j in range((i + 1), len(partition_sample_without_duplicates)):
distances.append(span_position_distance(partition_sample_without_duplicates[i], partition_sample_without_duplicates[j], element_type, reference, options.position_distance_normalizer, options.edit_distance_normalizer, options.cluster_max_distance))
else:
for i in range((len(partition_sample_without_duplicates) - 1)):
for j in range((i + 1), len(partition_sample_without_duplicates)):
if (partition_sample_without_duplicates[i].read == partition_sample_without_duplicates[j].read):
distances.append(99999)
else:
distances.append(span_position_distance(partition_sample_without_duplicates[i], partition_sample_without_duplicates[j], element_type, reference, options.position_distance_normalizer, options.edit_distance_normalizer, options.cluster_max_distance))
Z = linkage(np.array(distances), method='average')
cluster_indices = list(fcluster(Z, options.cluster_max_distance, criterion='distance'))
new_clusters = [[] for i in range(max(cluster_indices))]
for (signature_index, cluster_index) in enumerate(cluster_indices):
new_clusters[(cluster_index - 1)].append(partition_sample_without_duplicates[signature_index])
clusters_final.extend(new_clusters)
if (len(partitions) > 0):
if (len(partitions[0]) > 0):
logging.debug(('%d out of %d partitions for %s exceeded 100 elements.' % (large_partitions, len(partitions), partitions[0][0].type)))
logging.debug(('%d %s signatures were removed due to similarity to another signature from the same read.' % (duplicate_signatures, partitions[0][0].type)))
return clusters_final |
def test_feeder_reboot(client: TestClient, with_registered_device: None, mocker):
from tests.test_database_models import SAMPLE_DEVICE_HID, SAMPLE_GATEWAY_HID
cmd = mocker.patch('feeder.api.routers.feeder.router.client.send_cmd_reboot')
response = client.post(f'/api/v1/feeder/{SAMPLE_DEVICE_HID}/restart')
assert (response.status_code == 200)
cmd.assert_called_once_with(gateway_id=SAMPLE_GATEWAY_HID, device_id=SAMPLE_DEVICE_HID) |
class Components():
def __init__(self, ui):
self.page = ui.page
def img(self, image: str=None, path: str=None, thumbnail: bool=False, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), align: str='center', html_code: str=None, profile: types.PROFILE_TYPE=None, tooltip: str=None, options: dict=None):
component = self.page.web.std.img(image, path, width, height, align, html_code, profile, tooltip, options)
component.attr['class'].initialise(['img-fluid'])
if thumbnail:
component.attr['class'].add('img-thumbnail')
return component
def badge(self, text: str='', category: str='primary', width: types.SIZE_TYPE=(None, 'px'), height: types.SIZE_TYPE=(None, 'px'), tooltip: str=None, options: str=None, profile: types.PROFILE_TYPE=None):
component = self.page.web.std.tags.span(text, width=width, height=height, tooltip=tooltip, options=options, profile=profile)
component.attr['class'].initialise(['badge'])
component.style.css.margin = 2
if (category is not None):
component.attr['class'].add(('bg-%s' % category))
return component
def pill(self, text: str='', category: str='primary', icon: str=None, width: types.SIZE_TYPE=(None, 'px'), height: types.SIZE_TYPE=(None, 'px'), tooltip: str=None, options: dict=None, profile: types.PROFILE_TYPE=None):
values = []
if (icon is not None):
values.append(self.page.web.bs.icon(icon))
if text:
values.append(text)
component = self.page.web.std.tags.span(values, width=width, height=height, tooltip=tooltip, options=options, profile=profile)
component.attr['class'].initialise(['badge', 'rounded-pill'])
component.style.css.margin = 2
if (category is not None):
component.attr['class'].add(('bg-%s' % category))
return component
def carousel(self, images: List[str]=None, active: bool=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(300, 'px'), html_code: str=None, options: dict=None, profile: types.PROFILE_TYPE=None):
width = Arguments.size(width, unit='px')
height = Arguments.size(height, unit='px')
component = HtmlBsWidgets.BsCarousel(self.page, None, html_code, (options or {}), profile, {'width': width, 'height': height})
component.attr['data-bs-ride'] = 'carousel'
if (images is not None):
for image in images:
component.add_item(image, active=active)
return component |
def upgrade():
op.add_column('orders', sa.Column('access_code_id', sa.Integer(), nullable=True))
op.create_foreign_key(u'orders_access_code_id_fkey', 'orders', 'access_codes', ['access_code_id'], ['id'], ondelete='SET NULL')
op.add_column('ticket_holders', sa.Column('is_discount_applied', sa.Boolean(), nullable=True))
op.add_column('ticket_holders', sa.Column('is_access_code_applied', sa.Boolean(), nullable=True)) |
def copy_data(solid_runs, library_defns):
for library_defn in library_defns:
sample = library_defn.split('/')[0]
library = library_defn.split('/')[1]
print(('Copy: look for samples matching pattern %s' % library_defn))
print(('Data files will be copied to %s' % os.getcwd()))
for run in solid_runs:
for lib in run.fetchLibraries(sample, library):
print(('-> matched %s/%s' % (lib.parent_sample.name, lib.name)))
primary_data_files = []
primary_data_files.append(lib.csfasta)
primary_data_files.append(lib.qual)
if run.is_paired_end:
primary_data_files.append(lib.csfasta_f5)
primary_data_files.append(lib.qual_f5)
for filn in primary_data_files:
print(('\tCopying .../%s' % os.path.basename(filn)))
dst = os.path.abspath(os.path.basename(filn))
if os.path.exists(dst):
logging.error(('File %s already exists! Skipped' % dst))
else:
shutil.copy(filn, dst) |
('tests.fixtures.DummyTransport.get_config')
def test_environment_doesnt_override_central_config(mock_get_config, elasticapm_client):
assert (elasticapm_client.config.transaction_sample_rate == 1.0)
assert (elasticapm_client.config.config_version is None)
mock_get_config.return_value = (2, {'transaction_sample_rate': 0.1}, 30)
with mock.patch.dict('os.environ', {'ELASTIC_APM_TRANSACTION_SAMPLE_RATE': '0.5'}):
elasticapm_client.config.update_config()
assert (elasticapm_client.config.transaction_sample_rate == 0.1)
assert (elasticapm_client.config.config_version == 2) |
class WafFirewallResponseData(ModelComposed):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'type': (TypeWafFirewall,), 'attributes': (WafFirewallResponseDataAttributes,), 'id': (str,), 'relationships': (RelationshipWafFirewallVersions,)}
_property
def discriminator():
return None
attribute_map = {'type': 'type', 'attributes': 'attributes', 'id': 'id', 'relationships': 'relationships'}
read_only_vars = {'id'}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
_property
def _composed_schemas():
lazy_import()
return {'anyOf': [], 'allOf': [WafFirewallData, WafFirewallResponseDataAllOf], 'oneOf': []} |
(config_path='conf', config_name='conf_rollout')
def maze_run(cfg: DictConfig) -> Optional[float]:
instance = HydraConfig.instance()
is_multi_run = ((instance.cfg is not None) and (instance.cfg.hydra.job.get('num') is not None))
if (not is_multi_run):
_run_job(cfg)
else:
max_mean_reward = _run_multirun_job(cfg)
return max_mean_reward |
.parametrize('use_bytes', [False, True])
def test_bulk_rejected_documents_are_retried_when_bytes_or_string(sync_client, use_bytes):
failing_client = FailingBulkClient(sync_client, fail_with=ApiError(message='Rejected!', body={}, meta=ApiResponseMeta(status=429, headers={}, duration=0, node=None)))
docs = [json.dumps({'field': x}, separators=(',', ':')) for x in range(3)]
if use_bytes:
docs = [doc.encode() for doc in docs]
results = list(helpers.streaming_bulk(failing_client, docs, index='i', raise_on_exception=False, raise_on_error=False, chunk_size=1, max_retries=1, initial_backoff=0))
assert (3 == len(results))
assert ([True, True, True] == [r[0] for r in results])
sync_client.indices.refresh(index='i')
res = sync_client.search(index='i')
assert ({'value': 3, 'relation': 'eq'} == res['hits']['total'])
assert (4 == failing_client._called) |
('input, expected', [({'type': 'categorical', 'choices': [1, 2, 3]}, CategoricalDistribution([1, 2, 3])), ({'type': 'int', 'low': 0, 'high': 10}, IntUniformDistribution(0, 10)), ({'type': 'int', 'low': 0, 'high': 10, 'step': 2}, IntUniformDistribution(0, 10, step=2)), ({'type': 'int', 'low': 0, 'high': 5}, IntUniformDistribution(0, 5)), ({'type': 'int', 'low': 1, 'high': 100, 'log': True}, IntLogUniformDistribution(1, 100)), ({'type': 'float', 'low': 0, 'high': 1}, UniformDistribution(0, 1)), ({'type': 'float', 'low': 0, 'high': 10, 'step': 2}, DiscreteUniformDistribution(0, 10, 2)), ({'type': 'float', 'low': 1, 'high': 100, 'log': True}, LogUniformDistribution(1, 100))])
def test_create_optuna_distribution_from_config(input: Any, expected: Any) -> None:
actual = _impl.create_optuna_distribution_from_config(input)
check_distribution(expected, actual) |
class AnalysisOutput():
def __init__(self, *, directory: Optional[str]=None, filename_specs: Optional[List[str]]=None, filename_glob: Optional[str]=None, file_handle: Optional[IO[str]]=None, metadata: Optional[Metadata]=None, tool: Optional[str]=None) -> None:
self.directory = directory
self.filename_specs: List[str] = (filename_specs or [])
self.filename_glob = filename_glob
self.file_handle = file_handle
self.metadata = metadata
self.tool = tool
if ((filename_specs is []) and file_handle and hasattr(file_handle, 'name')):
self.filename_specs = [file_handle.name]
def __str__(self) -> str:
if self.directory:
return f'AnalysisOutput({repr(self.directory)})'
return f'AnalysisOutput({repr(self.filename_specs)})'
def from_strs(cls, identifiers: List[str]) -> 'AnalysisOutput':
if (len(identifiers) > 1):
return cls.from_directories(identifiers)
else:
return cls.from_str(identifiers[0])
def from_str(cls, identifier: str) -> 'AnalysisOutput':
if os.path.isdir(identifier):
return cls.from_directory(identifier)
elif os.path.isfile(identifier):
return cls.from_file(identifier)
elif (os.path.isdir(os.path.dirname(identifier)) and ('' in os.path.basename(identifier))):
return cls.from_file(identifier)
else:
raise AnalysisOutputError(f'Unrecognized identifier `{identifier}`')
def from_directories(cls, directories: List[str]) -> 'AnalysisOutput':
main_metadata = None
filename_specs = []
for directory in directories:
if (not os.path.isdir(directory)):
raise AnalysisOutputError(f'`{directory}` is not a directory')
metadata = {}
for file in glob(os.path.join(directory, METADATA_GLOB)):
with open(file) as f:
metadata.update(json.load(f))
filename_specs.extend(_get_remapped_filename(metadata, 'filename_spec', directory))
repo_root = metadata.get('repo_root')
repo_roots = {(repo_root if (repo_root is not None) else metadata['root'])}
rules = {rule['code']: rule for rule in metadata.get('rules', [])}
class_type_intervals_filenames = _get_remapped_filename(metadata, 'class_type_intervals_filename', directory)
this_metadata = Metadata(analysis_tool_version=metadata['version'], commit_hash=metadata.get('commit'), repo_roots=repo_roots, job_instance=metadata.get('job_instance'), tool=metadata.get('tool'), repository_name=metadata.get('repository_name'), project=metadata.get('project'), rules=rules, class_type_intervals_filenames=class_type_intervals_filenames, category_coverage=metadata.get('category_coverage', []))
if (not main_metadata):
main_metadata = this_metadata
else:
main_metadata = main_metadata.merge(this_metadata)
return cls(filename_specs=filename_specs, metadata=main_metadata)
def from_directory(cls, directory: str) -> 'AnalysisOutput':
metadata = {}
for file in glob(os.path.join(directory, METADATA_GLOB)):
with open(file) as f:
metadata.update(json.load(f))
filename_specs = _get_remapped_filename(metadata, 'filename_spec', directory)
filename_glob = None
if filename_specs:
pass
elif ('filename_glob' in metadata):
filename_glob = metadata['filename_glob']
if (not filename_glob):
raise AnalysisOutputError(f"Empty 'filename_glob' not allowed. In {METADATA_GLOB}, Use either 'filename_spec' or specify something in 'filename_glob'.")
else:
filename_specs = [os.path.join(directory, os.path.basename(metadata['filenames'][0]))]
repo_root = metadata.get('repo_root')
repo_roots = {(repo_root if (repo_root is not None) else metadata['root'])}
rules = {rule['code']: rule for rule in metadata.get('rules', [])}
class_type_intervals_filenames = _get_remapped_filename(metadata, 'class_type_intervals_filename', directory)
return cls(directory=directory, filename_specs=filename_specs, filename_glob=filename_glob, metadata=Metadata(analysis_tool_version=metadata['version'], commit_hash=metadata.get('commit'), repo_roots=repo_roots, job_instance=metadata.get('job_instance'), tool=metadata.get('tool'), repository_name=metadata.get('repository_name'), project=metadata.get('project'), rules=rules, class_type_intervals_filenames=class_type_intervals_filenames, category_coverage=metadata.get('category_coverage', [])))
def from_file(cls, file_name: str) -> 'AnalysisOutput':
return cls(filename_specs=[file_name])
def from_handle(cls, file_handle: IO[str]) -> 'AnalysisOutput':
return cls(file_handle=file_handle)
def file_handles(self) -> Iterable[IO[str]]:
if self.file_handle:
(yield self.file_handle)
self.file_handle.close()
self.file_handle = None
else:
for name in self.file_names():
with open(name, 'r') as f:
(yield f)
def file_names(self) -> Iterable[str]:
filename_specs = self.filename_specs
filename_glob = self.filename_glob
for spec in filename_specs:
if self._is_sharded(spec):
(yield from ShardedFile(spec).get_filenames())
else:
(yield spec)
if (filename_glob is not None):
directory = self.directory
assert (directory is not None)
for path in Path(directory).glob(filename_glob):
(yield str(path))
def _is_sharded(cls, spec: str) -> bool:
return ('' in spec)
def has_sharded(self) -> bool:
return any((self._is_sharded(spec) for spec in self.filename_specs)) |
class Filtering():
def __init__(self):
resolutions = OrderedDict()
resolutions['filter_240p'] = ['240[p]', 'vhs\\-?rip']
resolutions['filter_480p'] = ['480[p]', 'xvid|dvd|dvdrip|hdtv|web\\-(dl)?rip|iptv|sat\\-?rip|tv\\-?rip']
resolutions['filter_720p'] = ['720[p]|1280x720', 'hd720p?|hd\\-?rip|b[rd]rip']
resolutions['filter_1080p'] = ['1080[pi]|1920x1080', 'hd1080p?|fullhd|fhd|blu\\W*ray|bd\\W*remux']
resolutions['filter_2k'] = ['1440[p]', '2k']
resolutions['filter_4k'] = ['4k|2160[p]|uhd', '4k|hd4k']
resolutions['filter_music'] = ['mp3|flac|alac|ost|sound\\-?track']
self.resolutions = resolutions
self.release_types = {'filter_brrip': ['brrip|bd\\-?rip|blu\\-?ray|bd\\-?remux'], 'filter_webdl': ['web_?\\-?dl|web\\-?rip|dl\\-?rip|yts'], 'filter_hdrip': ['hd\\-?rip'], 'filter_hdtv': ['hd\\-?tv'], 'filter_dvd': ['dvd|dvd\\-?rip|vcd\\-?rip'], 'filter_dvdscr': ['dvd\\-?scr'], 'filter_screener': ['screener|scr'], 'filter_3d': ['3d'], 'filter_telesync': ['telesync|ts|tc'], 'filter_cam': ['cam|hd\\-?cam'], 'filter_tvrip': ['tv\\-?rip|sat\\-?rip|dvb'], 'filter_vhsrip': ['vhs\\-?rip'], 'filter_iptvrip': ['iptv\\-?rip'], 'filter_trailer': ['trailer||'], 'filter_workprint': ['workprint'], 'filter_line': ['line']}
require = []
resolutions_allow = []
releases_allow = []
releases_deny = []
for resolution in self.resolutions:
if get_setting(resolution, bool):
resolutions_allow.append(resolution)
releases_allow.extend(self.resolutions[resolution])
self.resolutions_allow = resolutions_allow
self.filter_resolutions = True
if (len(self.resolutions_allow) == len(self.resolutions)):
self.filter_resolutions = False
for release_type in self.release_types:
if get_setting(release_type, bool):
releases_allow.extend(self.release_types[release_type])
else:
releases_deny.extend(self.release_types[release_type])
if use_additional_filters:
accept = use_accept
if accept:
accept = re.split(',\\s?', accept)
releases_allow.extend(accept)
block = use_block
if block:
block = re.split(',\\s?', block)
releases_deny.extend(block)
require = use_require
if require:
require = re.split(',\\s?', require)
self.releases_allow = releases_allow
self.releases_deny = releases_deny
self.require_keywords = require
self.min_size = get_float(use_min_size)
self.max_size = get_float(use_max_size)
self.check_sizes()
self.filter_title = False
self.queries = []
self.extras = []
self.queries_priorities = []
self.info = dict(title='', proxy_url='', internal_proxy_url='', elementum_url='', titles=[])
self.kodi_language = ''
self.language_exceptions = []
self.provider_languages = []
self.get_data = {}
self.post_data = {}
self.url = ''
self.title = ''
self.reason = ''
self.results = []
def use_general(self, provider, payload):
definition = definitions[provider]
definition = get_alias(definition, get_setting(('%s_alias' % provider)))
if (get_setting('use_public_dns', bool) and ('public_dns_alias' in definition)):
definition = get_alias(definition, definition['public_dns_alias'])
if (get_setting('use_tor_dns', bool) and ('tor_dns_alias' in definition)):
definition = get_alias(definition, definition['tor_dns_alias'])
general_query = (definition['general_query'] if (('general_query' in definition) and definition['general_query']) else '')
log.debug(('[%s] General URL: %s%s' % (provider, definition['base_url'], general_query)))
self.info = payload
self.url = (u'%s%s' % (definition['base_url'], general_query))
self.collect_queries('general', definition)
def use_movie(self, provider, payload):
definition = definitions[provider]
definition = get_alias(definition, get_setting(('%s_alias' % provider)))
if (get_setting('use_public_dns', bool) and ('public_dns_alias' in definition)):
definition = get_alias(definition, definition['public_dns_alias'])
if (get_setting('use_tor_dns', bool) and ('tor_dns_alias' in definition)):
definition = get_alias(definition, definition['tor_dns_alias'])
movie_query = (definition['movie_query'] if (('movie_query' in definition) and definition['movie_query']) else '')
log.debug(('[%s] Movies URL: %s%s' % (provider, definition['base_url'], movie_query)))
if get_setting('separate_sizes', bool):
self.min_size = get_float(get_setting('min_size_movies'))
self.max_size = get_float(get_setting('max_size_movies'))
self.check_sizes()
self.info = payload
self.url = (u'%s%s' % (definition['base_url'], movie_query))
self.collect_queries('movie', definition)
def use_episode(self, provider, payload):
definition = definitions[provider]
definition = get_alias(definition, get_setting(('%s_alias' % provider)))
if (get_setting('use_public_dns', bool) and ('public_dns_alias' in definition)):
definition = get_alias(definition, definition['public_dns_alias'])
if (get_setting('use_tor_dns', bool) and ('tor_dns_alias' in definition)):
definition = get_alias(definition, definition['tor_dns_alias'])
show_query = (definition['show_query'] if (('show_query' in definition) and definition['show_query']) else '')
log.debug(('[%s] Episode URL: %s%s' % (provider, definition['base_url'], show_query)))
if get_setting('separate_sizes', bool):
self.min_size = get_float(get_setting('min_size_episodes'))
self.max_size = get_float(get_setting('max_size_episodes'))
self.check_sizes()
self.info = payload
self.url = (u'%s%s' % (definition['base_url'], show_query))
self.collect_queries('tv', definition)
def use_season(self, provider, info):
definition = definitions[provider]
definition = get_alias(definition, get_setting(('%s_alias' % provider)))
if (get_setting('use_public_dns', bool) and ('public_dns_alias' in definition)):
definition = get_alias(definition, definition['public_dns_alias'])
if (get_setting('use_tor_dns', bool) and ('tor_dns_alias' in definition)):
definition = get_alias(definition, definition['tor_dns_alias'])
season_query = (definition['season_query'] if (('season_query' in definition) and definition['season_query']) else '')
log.debug(('[%s] Season URL: %s%s' % (provider, definition['base_url'], season_query)))
if get_setting('separate_sizes', bool):
self.min_size = get_float(get_setting('min_size_seasons'))
self.max_size = get_float(get_setting('max_size_seasons'))
self.check_sizes()
self.info = info
self.url = (u'%s%s' % (definition['base_url'], season_query))
self.collect_queries('season', definition)
def use_anime(self, provider, info):
definition = definitions[provider]
definition = get_alias(definition, get_setting(('%s_alias' % provider)))
if (get_setting('use_public_dns', bool) and ('public_dns_alias' in definition)):
definition = get_alias(definition, definition['public_dns_alias'])
if (get_setting('use_tor_dns', bool) and ('tor_dns_alias' in definition)):
definition = get_alias(definition, definition['tor_dns_alias'])
anime_query = (definition['anime_query'] if (('anime_query' in definition) and definition['anime_query']) else '')
log.debug(('[%s] Anime URL: %s%s' % (provider, definition['base_url'], anime_query)))
if get_setting('separate_sizes', bool):
self.min_size = get_float(get_setting('min_size_episodes'))
self.max_size = get_float(get_setting('max_size_episodes'))
self.check_sizes()
self.info = info
self.url = (u'%s%s' % (definition['base_url'], anime_query))
self.collect_queries('anime', definition)
def split_title_per_languages(self, text, item_type):
result = []
modified = False
keywords = self.read_keywords(text)
for keyword in keywords:
keyword = keyword.lower()
if (('title' in keyword) and (':' in keyword)):
if (item_type == 'general'):
result.append(text.replace(('{%s}' % keyword), '{title}'))
return result
langs = keyword.lower().split(':')[1:]
if (len(langs) < 2):
continue
modified = True
for lang in langs:
result.append(text.replace(('{%s}' % keyword), ('{title:%s}' % lang)))
if (not modified):
return [text]
else:
return result
def different_years(self):
if (('year' not in self.info) or ('years' not in self.info)):
return {}
self.info['years']['default'] = self.info['year']
res = {}
seen = set()
for key in self.info['years']:
if (self.info['years'][key] in seen):
continue
seen.add(self.info['years'][key])
res[key] = self.info['years'][key]
return res
def split_title_per_year(self, queries, years):
res = []
for item in queries:
if ('{year}' in item):
for key in years:
query = item.replace('{year}', ('{year:%s}' % key))
res.append(query)
else:
res.append(item)
return res
def collect_queries(self, item_type, definition):
different_years = self.different_years()
priority = 1
for item in ['', '2', '3', '4']:
key = ((item_type + '_keywords') + item)
extra = ((item_type + '_extra') + item)
if ((key in definition) and definition[key]):
qlist = self.split_title_per_languages(definition[key], item_type)
if (len(different_years) > 1):
qlist = self.split_title_per_year(qlist, different_years)
self.queries.extend(qlist)
eitem = (definition[extra] if ((extra in definition) and definition[extra]) else '')
for _ in qlist:
self.extras.append(eitem)
self.queries_priorities.append(priority)
for item in ['', '2', '3', '4']:
key = ((item_type + '_keywords_fallback') + item)
extra = ((item_type + '_extra_fallback') + item)
if ((key in definition) and definition[key]):
qlist = self.split_title_per_languages(definition[key], item_type)
if (len(different_years) > 1):
qlist = self.split_title_per_year(qlist, different_years)
self.queries.extend(qlist)
eitem = (definition[extra] if ((extra in definition) and definition[extra]) else '')
for _ in qlist:
priority += 1
self.extras.append(eitem)
self.queries_priorities.append(priority)
def information(self, provider):
log.debug(('[%s] Accepted resolutions: %s' % (provider, self.resolutions_allow)))
log.debug(('[%s] Accepted release types: %s' % (provider, self.releases_allow)))
log.debug(('[%s] Blocked release types: %s' % (provider, self.releases_deny)))
log.debug(('[%s] Minimum size: %s' % (provider, (str(self.min_size) + ' GB'))))
log.debug(('[%s] Maximum size: %s' % (provider, (str(self.max_size) + ' GB'))))
def check_sizes(self):
if (self.min_size > self.max_size):
log.warning('Minimum size above maximum, using max size minus 1 GB')
self.min_size = (self.max_size - 1)
def read_keywords(self, keywords):
results = []
if keywords:
for value in re.findall('{(.*?)}', keywords):
results.append(value)
return results
def process_keywords(self, provider, text, definition):
keywords = self.read_keywords(text)
replacing = use_filter_quotes
for keyword in keywords:
keyword = keyword.lower()
if ('title' in keyword):
title = self.info['title']
language = definitions[provider]['language']
use_language = None
if (':' in keyword):
use_language = keyword.split(':')[1].lower()
if ((provider not in self.language_exceptions) and (use_language or self.kodi_language) and ('titles' in self.info) and self.info['titles']):
try:
if ((not use_language) and self.kodi_language and (self.kodi_language in self.info['titles'])):
use_language = self.kodi_language
if ((not use_language) and language and (language in self.info['titles'])):
use_language = language
if ((use_language not in self.info['titles']) or (not self.info['titles'][use_language])):
log.info(('[%s] Falling back to original title in absence of %s language title' % (provider, use_language)))
use_language = 'original'
if ((use_language in self.info['titles']) and self.info['titles'][use_language]):
title = self.info['titles'][use_language]
title = normalize_string(title)
if ((use_language != 'original') and ((self.convert_language(use_language) not in self.provider_languages) or (self.convert_language(use_language) == 'en'))):
title = remove_accents(title)
if (('remove_special_characters' in definition) and definition['remove_special_characters']):
for char in definition['remove_special_characters']:
title = title.replace(char, '')
title = ' '.join(title.split())
log.info(("[%s] Using translated '%s' title %s" % (provider, use_language, repr(title))))
else:
log.debug(("[%s] Skipping the query '%s' due to missing '%s' language title" % (provider, text, use_language)))
return ''
except Exception as e:
import traceback
log.error(('%s failed with: %s' % (provider, repr(e))))
map(log.debug, traceback.format_exc().split('\n'))
text = text.replace(('{%s}' % keyword), title)
if ('year' in keyword):
if (':' not in keyword):
text = text.replace(('{%s}' % keyword), str(self.info['year']))
else:
use_language = keyword.split(':')[1].lower()
if ((use_language in self.info['years']) and self.info['years'][use_language]):
text = text.replace(('{%s}' % keyword), str(self.info['years'][use_language]))
if ('show_tmdb_id' in keyword):
if ('show_tmdb_id' not in self.info):
self.info['show_tmdb_id'] = ''
text = text.replace(('{%s}' % keyword), str(self.info['show_tmdb_id']))
if ('tmdb_id' in keyword):
if ('tmdb_id' not in self.info):
self.info['tmdb_id'] = ''
text = text.replace(('{%s}' % keyword), str(self.info['tmdb_id']))
if ('tvdb_id' in keyword):
if ('tvdb_id' not in self.info):
self.info['tvdb_id'] = ''
text = text.replace(('{%s}' % keyword), str(self.info['tvdb_id']))
if ('imdb_id' in keyword):
if ('imdb_id' not in self.info):
self.info['imdb_id'] = ''
text = text.replace(('{%s}' % keyword), str(self.info['imdb_id']))
if ('season' in keyword):
if ('+' in keyword):
keys = keyword.split('+')
season = str((self.info['season'] + get_int(keys[1])))
elif (':' in keyword):
keys = keyword.split(':')
season = (('%%.%sd' % keys[1]) % self.info['season'])
else:
season = ('%s' % self.info['season'])
text = text.replace(('{%s}' % keyword), season)
if (('episode' in keyword) and ('absolute' not in keyword)):
if ('+' in keyword):
keys = keyword.split('+')
episode = str((self.info['episode'] + get_int(keys[1])))
elif (':' in keyword):
keys = keyword.split(':')
episode = (('%%.%sd' % keys[1]) % self.info['episode'])
else:
episode = ('%s' % self.info['episode'])
text = text.replace(('{%s}' % keyword), episode)
if ('absolute_episode' in keyword):
if (('absolute_number' not in self.info) or (not self.info['absolute_number'])):
log.debug(("Skipping query '%s' due to missing absolute_number" % text))
return ''
if ('+' in keyword):
keys = keyword.split('+')
episode = str((self.info['absolute_number'] + get_int(keys[1])))
elif (':' in keyword):
keys = keyword.split(':')
episode = (('%%.%sd' % keys[1]) % self.info['absolute_number'])
else:
episode = ('%s' % self.info['absolute_number'])
text = text.replace(('{%s}' % keyword), episode)
if replacing:
text = text.replace(u"'", '')
return text
def verify(self, provider, name, size):
if (not name):
self.reason = ('[%s] %s' % (provider, '*** Empty name ***'))
return False
name = normalize_string(name)
if (self.filter_title and self.title):
self.title = normalize_string(self.title)
self.reason = ('[%s] %70s ***' % (provider, name))
if (self.filter_resolutions and use_require_resolution):
resolution = self.determine_resolution(name)[0]
if (resolution not in self.resolutions_allow):
self.reason += ' Resolution not allowed ({0})'.format(resolution)
return False
if self.filter_title:
if (not all(map((lambda match: (match in name)), re.split('\\s', self.title)))):
self.reason += ' Name mismatch'
return False
if (self.require_keywords and use_require_keywords):
for required in self.require_keywords:
if (not self.included(name, keys=[required])):
self.reason += ' Missing required keyword'
return False
if ((not self.included_rx(name, keys=self.releases_allow)) and use_require_release_type):
self.reason += ' Missing release type keyword'
return False
if (self.included_rx(name, keys=self.releases_deny) and use_require_release_type):
self.reason += ' Blocked by release type keyword'
return False
if (size and (not self.in_size_range(size)) and use_require_size):
self.reason += ' Size out of range ({0})'.format(size)
return False
return True
def in_size_range(self, size):
res = False
value = size_int(clean_number(size))
min_size = (self.min_size * .0)
max_size = (self.max_size * .0)
if (min_size <= value <= max_size):
res = True
return res
def determine_resolution(self, name):
idx = 0
count = (- 1)
res = 'filter_480p'
for resolution in self.resolutions:
count += 1
if self.included_rx(name, keys=self.resolutions[resolution]):
idx = count
res = resolution
return (res, idx)
def included(self, value, keys, strict=False):
value = ((' ' + value) + ' ')
if ('*' in keys):
res = True
else:
value = value.lower()
res1 = []
for key in keys:
res2 = []
for item in re.split('\\s', key):
item = item.replace('_', ' ')
if strict:
item = ((' ' + item) + ' ')
if (item.lower() in value):
res2.append(True)
else:
res2.append(False)
res1.append(all(res2))
res = any(res1)
return res
def included_rx(self, value, keys):
value = ((' ' + value.lower()) + ' ')
for key in keys:
rr = (('\\W+(' + key) + ')\\W*')
if re.search(rr, value):
return True
return False
def unescape(self, name):
name = name.replace('<![CDATA[', '').replace(']]', '')
if PY3:
name = html.unescape(name.lower())
else:
name = HTMLParser().unescape(name.lower())
return name
def exception(self, title=None):
if title:
title = title.lower()
title = title.replace('csi crime scene investigation', 'CSI')
title = title.replace('law and order special victims unit', 'law and order svu')
title = title.replace('law order special victims unit', 'law and order svu')
title = title.replace('S H I E L D', 'SHIELD')
return title
def add_provider_language(self, language):
if (language not in self.provider_languages):
self.provider_languages.append(language)
def convert_language(self, language):
if ((language == 'ru') or (language == 'ua') or (language == 'by')):
return 'cr'
else:
return language
def define_languages(self, provider):
definition = definitions[provider]
if (('language' in definition) and definition['language']):
self.add_provider_language(self.convert_language(definition['language']))
if (('languages' in definition) and definition['languages']):
for lang in definition['languages'].split(','):
self.add_provider_language(self.convert_language(lang)) |
def upgrade():
op.drop_index('ix_privacyexperiencehistory_experience_config_history_id', table_name='privacyexperiencehistory')
op.drop_index('ix_privacyexperiencehistory_experience_config_id', table_name='privacyexperiencehistory')
op.drop_index('ix_privacyexperiencehistory_id', table_name='privacyexperiencehistory')
op.drop_index('ix_privacyexperiencehistory_privacy_experience_id', table_name='privacyexperiencehistory')
op.drop_index('ix_privacyexperiencehistory_region', table_name='privacyexperiencehistory')
op.drop_constraint('privacypreferencehistory_privacy_experience_history_id_fkey', 'privacypreferencehistory', type_='foreignkey')
op.drop_table('privacyexperiencehistory')
op.drop_index('ix_privacyexperience_experience_config_history_id', table_name='privacyexperience')
op.drop_constraint('privacyexperience_experience_config_history_id_fkey', 'privacyexperience', type_='foreignkey')
op.drop_column('privacyexperience', 'disabled')
op.drop_column('privacyexperience', 'version')
op.drop_column('privacyexperience', 'experience_config_history_id')
op.drop_index('ix_privacypreferencehistory_privacy_experience_history_id', table_name='privacypreferencehistory')
op.drop_column('privacypreferencehistory', 'privacy_experience_history_id')
op.add_column('privacypreferencehistory', sa.Column('privacy_experience_id', sa.String(), nullable=True))
op.create_index(op.f('ix_privacypreferencehistory_privacy_experience_id'), 'privacypreferencehistory', ['privacy_experience_id'], unique=False)
op.create_foreign_key('privacypreferencehistory_privacy_experience_id_fkey', 'privacypreferencehistory', 'privacyexperience', ['privacy_experience_id'], ['id']) |
('list', help='Volume list', cls=FandoghCommand)
def volume_list():
table = present((lambda : list_volumes()), renderer='table', headers=['Name', 'Status', 'Condition', 'Mounted To', 'Volume', 'Capacity', 'Creation Date'], columns=['name', 'status', 'condition', 'mounted_to', 'volume', 'capacity', 'age'])
if table:
click.echo(table)
else:
click.echo('You have no volumes in your namespace!') |
class Solution():
def addTwoNumbers(self, l1: ListNode, l2: ListNode) -> ListNode:
carry = 0
new_head = None
(curr1, curr2) = (l1, l2)
(prev, curr) = (None, None)
while ((curr1 is not None) or (curr2 is not None)):
(val1, val2) = (0, 0)
if (curr1 is not None):
val1 = curr1.val
curr1 = curr1.next
if (curr2 is not None):
val2 = curr2.val
curr2 = curr2.next
(carry, val) = divmod(((val1 + val2) + carry), 10)
curr = ListNode(val)
if (prev is not None):
prev.next = curr
if (new_head is None):
new_head = curr
prev = curr
if (carry > 0):
curr = ListNode(carry)
if prev:
prev.next = curr
return new_head |
def validate_model(model, val_loader):
print('Validating the model')
model.eval()
y_true = []
y_pred = []
fnames = []
running_loss = 0.0
criterion = nn.CrossEntropyLoss()
with torch.no_grad():
for (step, (mfcc, mfcc_lengths, mel, mol, lid, fname)) in enumerate(val_loader):
(sorted_lengths, indices) = torch.sort(mfcc_lengths.view((- 1)), dim=0, descending=True)
sorted_lengths = sorted_lengths.long().numpy()
(mfcc, mel, mol, lid) = (mfcc[indices], mel[indices], mol[indices], lid[indices])
(mfcc, mel, mol, lid) = (Variable(mfcc).cuda(), Variable(mel).cuda(), Variable(mol).cuda(), Variable(lid).cuda())
(logits, vq_penalty, encoder_penalty, entropy) = model(mfcc, mel, mol, mfcc_lengths=sorted_lengths)
loss = criterion(logits, lid.long())
running_loss += loss.item()
targets = lid.cpu().view((- 1)).numpy()
y_true += targets.tolist()
predictions = return_classes(logits)
y_pred += predictions.tolist()
fnames += fname
ff = open((exp_dir + '/eval'), 'a')
assert (len(fnames) == len(y_pred))
for (f, yp, yt) in list(zip(fnames, y_pred, y_true)):
if (yp == yt):
continue
ff.write((((((f + ' ') + str(yp)) + ' ') + str(yt)) + '\n'))
ff.close()
averaged_loss = (running_loss / len(val_loader))
recall = get_metrics(y_pred, y_true)
log_value('Unweighted Recall per epoch', recall, global_epoch)
log_value('validation loss (per epoch)', averaged_loss, global_epoch)
print('Validation Loss: ', averaged_loss)
print('Unweighted Recall for the validation set: ', recall)
print('\n')
return (recall, model.train()) |
def test_eliminate_dead_code_forwarding3():
def foo():
x: (f32 DRAM)
for i in seq(0, 8):
if ((i + 3) > (- 1)):
x = 0.0
pass
loop_cursor = foo.find_loop('i')
if_cursor = loop_cursor.body()[0]
if_true_stmt = if_cursor.body()[0]
foo = eliminate_dead_code(foo, 'if _:_ #0')
loop_cursor = foo.forward(loop_cursor)
with pytest.raises(InvalidCursorError, match=''):
if_cursor = foo.forward(if_cursor)
if_true_stmt = foo.forward(if_true_stmt)
assert isinstance(loop_cursor, ForCursor)
assert (len(loop_cursor.body()) == 2)
assert isinstance(if_true_stmt, AssignCursor)
assert isinstance(if_true_stmt.parent(), ForCursor) |
def test_resolver_load_task():
square = ContainerTask(name='square', input_data_dir='/var/inputs', output_data_dir='/var/outputs', inputs=kwtypes(val=int), outputs=kwtypes(out=int), image='alpine', command=['sh', '-c', 'echo $(( {{.Inputs.val}} * {{.Inputs.val}} )) | tee /var/outputs/out'])
resolver = TaskTemplateResolver()
ts = get_serializable(OrderedDict(), serialization_settings, square)
file = tempfile.NamedTemporaryFile().name
write_proto_to_file(ts.template.to_flyte_idl(), file)
shim_task = resolver.load_task([file, f'{Placeholder.__module__}.Placeholder'])
assert isinstance(shim_task.executor, Placeholder)
assert (shim_task.task_template.id.name == 'square')
assert (shim_task.task_template.interface.inputs['val'] is not None)
assert (shim_task.task_template.interface.outputs['out'] is not None) |
class Variable():
def __init__(self, *args, **kwargs):
self.locs = []
self.binary = kwargs['binary']
self.name = kwargs['name']
self.ttype = kwargs['ttype']
self.low_pc = None
self.high_pc = None
self.var_type = LOC_VAR
self.fun_arg_loc = None
def add_loc(self, loc):
self.locs.append(loc)
self.low_pc = (min(self.low_pc, loc.low_pc) if (self.low_pc is not None) else loc.low_pc)
self.high_pc = (max(self.high_pc, loc.high_pc) if (self.high_pc is not None) else loc.high_pc)
if (loc.var_type == FUN_ARG):
self.var_type = FUN_ARG
if isinstance(loc, Reg):
self.fun_arg_loc = (loc.base_register, (- 1))
elif isinstance(loc, IndirectOffset):
self.fun_arg_loc = (loc.base_pointer, loc.offset)
def debug_info(self):
if ((len(self.locs) == 1) and isinstance(self.locs[0], IndirectOffset)):
loc = self.locs[0]
return loc.debug_info()
else:
bs = bytearray()
if (self.var_type == FUN_ARG):
bs.append(ENUM_ABBREV_CODE['LOC_FUN_ARG'])
else:
bs.append(ENUM_ABBREV_CODE['LOC_VARIABLE'])
bs.extend(map(ord, self.name))
bs.append(0)
if ((self.name not in TTYPES) and (self.name != UNKNOWN_LABEL)):
self.binary.predicted.add(self.name)
bs += utils.encode_kbytes(len(self.binary.debug_loc.content), 4)
self.binary.debug_loc.add_locs(self.locs)
if ((self.ttype is None) or (self.ttype in (UNKNOWN_LABEL, VOID)) or (self.ttype not in TTYPES)):
bs += utils.encode_kbytes(self.binary.types.get_offset(INT), 4)
else:
bs += utils.encode_kbytes(self.binary.types.get_offset(self.ttype), 4)
return bs
def __str__(self):
repr(self)
def __repr__(self):
return '[{}]'.format(', '.join(map(repr, self.locs))) |
class OptionPlotoptionsDependencywheelOnpointConnectoroptions(Options):
def dashstyle(self):
return self._config_get(None)
def dashstyle(self, text: str):
self._config(text, js_type=False)
def stroke(self):
return self._config_get(None)
def stroke(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(1)
def width(self, num: float):
self._config(num, js_type=False) |
class TestIsSSML():
.parametrize('ssml_text', ['<speak>hello</speak>', "<speak version='1.0'>hello</speak>", '<speak> Hello <break time="3s"/> world </speak>', '<speak><voice-id 453>Hello world</voice-id></speak>', '<speak><voice-id 453>Hello <break time="3s"/> world</voice-id></speak>', '<speak><voice-id 453>Hello <break time="3s"/> <break time="3s"/> <break time="3s"/> world</voice-id></speak>', '<speak><voice-id 453><prosody pitch=0.35>Hello <break time="3s"/> world</prosody></voice-id></speak>'])
def test__is_valid_ssml(self, ssml_text: str):
assert (is_ssml(ssml_text) == True), f'ssml_text `{ssml_text}` is valid ssml_text'
.parametrize('ssml_text', ['hello', '<speak>hello', 'hello</speak>', '<speak>hello</speak><speak>hello</speak>', '<speak>hello</speak><speak>hello</speak><speak>hello</speak>', '<speak>hello</speak>hello', 'hello<speak>hello</speak>', "<speak version='1.0'>hello</speak dsdcs>"])
def test__is_invalid_ssml(self, ssml_text: str):
assert (is_ssml(ssml_text) == False), f'ssml_text `{ssml_text}` is not valid ssml_text' |
(scope='function')
def system_manager(db: Session, system) -> System:
user = FidesUser.create(db=db, data={'username': 'test_system_manager_user', 'password': '&%3Qe2fGo7'})
client = ClientDetail(hashed_secret='thisisatest', salt='thisisstillatest', scopes=[], roles=[VIEWER], user_id=user.id, systems=[system.id])
FidesUserPermissions.create(db=db, data={'user_id': user.id, 'roles': [VIEWER]})
db.add(client)
db.commit()
db.refresh(client)
user.set_as_system_manager(db, system)
(yield user)
try:
user.remove_as_system_manager(db, system)
except (SystemManagerException, StaleDataError):
pass
user.delete(db) |
class HaServerTest(unittest.TestCase):
def start_master(cls, host, port):
port = str(port)
server_uri = ((host + ':') + port)
ha_manager = SimpleNotificationServerHaManager()
ha_storage = DbHighAvailabilityStorage(db_conn=_SQLITE_DB_URI)
service = HighAvailableNotificationService(cls.storage, ha_manager, server_uri, ha_storage, 5000)
master = NotificationServer(service, port=int(port))
master.run()
return master
def wait_for_master_started(cls, server_uri='localhost:50051'):
last_exception = None
for i in range(60):
try:
return EmbeddedNotificationClient(server_uri=server_uri, namespace=None, sender=None)
except Exception as e:
time.sleep(2)
last_exception = e
raise Exception(('The server %s is unavailable.' % server_uri)) from last_exception
def setUpClass(cls):
db.create_all_tables(_SQLITE_DB_URI)
cls.storage = DbEventStorage(db_conn=_SQLITE_DB_URI)
cls.master1 = None
cls.master2 = None
cls.master3 = None
def tearDownClass(cls) -> None:
os.remove(_SQLITE_DB_FILE)
def setUp(self):
db.prepare_db()
self.storage.clean_up()
self.master1 = self.start_master('localhost', '50051')
self.client = self.wait_for_master_started('localhost:50051,localhost:50052')
def tearDown(self):
self.client.disable_high_availability()
if (self.master1 is not None):
self.master1.stop()
if (self.master2 is not None):
self.master2.stop()
if (self.master3 is not None):
self.master3.stop()
db.clear_engine_and_session()
def wait_for_new_members_detected(self, new_member_uri):
for i in range(100):
living_member = self.client.living_members
if (new_member_uri in living_member):
break
else:
time.sleep(10)
def test_server_change(self):
self.client.send_event(Event('key', 'value1'))
self.client.send_event(Event('key', 'value2'))
self.client.send_event(Event('key', 'value3'))
results = self.client.list_all_events()
self.master2 = self.start_master('localhost', '50052')
self.wait_for_new_members_detected('localhost:50052')
self.master1.stop()
results2 = self.client.list_all_events()
self.assertEqual(results, results2)
self.assertEqual(self.client.current_uri, 'localhost:50052')
self.master3 = self.start_master('localhost', '50053')
self.wait_for_new_members_detected('localhost:50053')
self.master2.stop()
results3 = self.client.list_all_events()
self.assertEqual(results2, results3)
self.assertEqual(self.client.current_uri, 'localhost:50053')
def test_send_listening_on_different_server(self):
event_list = []
class TestWatch(ListenerProcessor):
def __init__(self, event_list) -> None:
super().__init__()
self.event_list = event_list
def process(self, events: List[Event]):
self.event_list.extend(events)
self.master2 = self.start_master('localhost', '50052')
self.wait_for_new_members_detected('localhost:50052')
another_client = EmbeddedNotificationClient(server_uri='localhost:50052', namespace=None, sender=None)
try:
event1 = another_client.send_event(Event('key', 'value1'))
registration_id = self.client.register_listener(listener_processor=TestWatch(event_list), event_keys=['key'], offset=event1.offset)
another_client.send_event(Event('key', 'value2'))
another_client.send_event(Event('key', 'value3'))
finally:
self.client.unregister_listener(registration_id)
self.assertEqual(2, len(event_list))
def test_start_with_multiple_servers(self):
self.client.disable_high_availability()
self.client = EmbeddedNotificationClient(server_uri='localhost:55001,localhost:50051', namespace=None, sender=None)
self.assertTrue(self.client.current_uri, 'localhost:50051') |
def test_deposit_updates_next_val_index(concise_casper, funded_account, validation_key, deposit_amount, deposit_validator):
next_validator_index = concise_casper.next_validator_index()
validator_index = deposit_validator(funded_account, validation_key, deposit_amount)
assert (validator_index == next_validator_index)
assert (concise_casper.next_validator_index() == (next_validator_index + 1)) |
class DagdaServerWithoutSysdigFalcoMonitor(DagdaServer):
def __init__(self, dagda_server_host='127.0.0.1', dagda_server_port=5000, mongodb_host='127.0.0.1', mongodb_port=27017):
super(DagdaServer, self).__init__()
self.dagda_server_host = dagda_server_host
self.dagda_server_port = dagda_server_port
self.sysdig_falco_monitor = Mock()
self.sysdig_falco_monitor.pre_check.return_value = 0
self.sysdig_falco_monitor.run.return_value = 0 |
def test_cli_subcommands_child_extra():
cli = Radicli()
ran_parent = False
ran_child = False
('parent', a=Arg('--a'), b=Arg('--b'))
def parent(a: int, b: str):
assert (a == 1)
assert (b == 'hello')
nonlocal ran_parent
ran_parent = True
_with_extra('parent', 'child', a=Arg('--a'), b=Arg('--b'))
def child(a: str, b: int, _extra: List[str]):
assert (a == 'hey')
assert (b == 2)
assert (_extra == ['xyz'])
nonlocal ran_child
ran_child = True
cli.run(['', 'parent', '--a', '1', '--b', 'hello'])
assert ran_parent
cli.run(['', 'parent', 'child', '--a', 'hey', '--b', '2', 'xyz'])
assert ran_child |
def extractDeltatranslationsOrg(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Summoning the Holy Sword', 'Summoning the Holy Sword', 'translated'), ('King of Mercenaries', 'King of Mercenaries', 'translated'), ('For a Prosperous World', 'For a Prosperous World', 'translated'), ('Battle of the Third Reich', 'Battle of the Third Reich', 'translated'), ('EDSG', 'Eight Desolate Sword God', 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def extractSadhoovysinhumantranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def extractKermilleWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class SchemasVersionResponse(ModelComposed):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'active': (bool,), 'comment': (str, none_type), 'deployed': (bool,), 'locked': (bool,), 'number': (int,), 'staging': (bool,), 'testing': (bool,), 'created_at': (datetime, none_type), 'deleted_at': (datetime, none_type), 'updated_at': (datetime, none_type), 'service_id': (str,)}
_property
def discriminator():
return None
attribute_map = {'active': 'active', 'comment': 'comment', 'deployed': 'deployed', 'locked': 'locked', 'number': 'number', 'staging': 'staging', 'testing': 'testing', 'created_at': 'created_at', 'deleted_at': 'deleted_at', 'updated_at': 'updated_at', 'service_id': 'service_id'}
read_only_vars = {'number', 'created_at', 'deleted_at', 'updated_at', 'service_id'}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
_property
def _composed_schemas():
lazy_import()
return {'anyOf': [], 'allOf': [SchemasVersion, Timestamps, VersionResponseAllOf], 'oneOf': []} |
class ExamplePluginWithCli():
def update_mdit(mdit: MarkdownIt):
mdit.enable('table')
def add_cli_options(parser: argparse.ArgumentParser) -> None:
parser.add_argument('--o1', type=str)
parser.add_argument('--o2', type=str, default='a')
parser.add_argument('--o3', dest='arg_name', type=int) |
def test_bucket_keys_get_deserialized(aggs_data, aggs_search):
class Commit(Document):
info = Object(properties={'committed_date': Date()})
class Index():
name = 'test-commit'
aggs_search = aggs_search.doc_type(Commit)
agg_response = response.Response(aggs_search, aggs_data)
per_month = agg_response.aggregations.per_month
for b in per_month:
assert isinstance(b.key, date) |
def test_data_integrity_test_duplicated_columns() -> None:
test_dataset = pd.DataFrame({'numerical_feature': ['1', '1', '1', '1'], 'target': ['1', '1', '1', '1']})
suite = TestSuite(tests=[TestNumberOfDuplicatedColumns()])
suite.run(current_data=test_dataset, reference_data=test_dataset, column_mapping=ColumnMapping())
assert suite
suite = TestSuite(tests=[TestNumberOfDuplicatedColumns(gte=5)])
suite.run(current_data=test_dataset, reference_data=None, column_mapping=ColumnMapping())
assert (not suite)
suite = TestSuite(tests=[TestNumberOfDuplicatedColumns(eq=1)])
suite.run(current_data=test_dataset, reference_data=None, column_mapping=ColumnMapping())
assert suite
assert suite.show()
assert suite.json() |
class MenuLink(BaseMenu):
def __init__(self, name, url=None, endpoint=None, category=None, class_name=None, icon_type=None, icon_value=None, target=None):
super(MenuLink, self).__init__(name, class_name, icon_type, icon_value, target)
self.category = category
self.url = url
self.endpoint = endpoint
def get_url(self):
return (self.url or url_for(self.endpoint)) |
def sources(args, config):
(parsed_url, distgit_config) = get_distgit_config(config, args.forked_from)
namespace = parsed_url.path.strip('/').split('/')
repo_name = namespace.pop()
if repo_name.endswith('.git'):
repo_name = repo_name[:(- 4)]
namespace = list(reversed(namespace))
output = check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD'])
output = output.decode('utf-8').strip()
if (output == 'HEAD'):
output = check_output(['git', 'rev-parse', 'HEAD'])
output = output.decode('utf-8').strip()
refspec = output
specfile = get_spec(distgit_config)
name = specfile[:(- 5)]
sources_file = distgit_config['sources_file'].format(name=name)
if (not os.path.exists(sources_file)):
logging.info("'%s' file not found, download skipped", sources_file)
return
logging.info('Reading sources specification file: %s', sources_file)
with open(sources_file, 'r') as sfd:
while True:
line = sfd.readline()
if (not line):
break
kwargs = {'name': repo_name, 'refspec': refspec, 'namespace': namespace}
source_spec = line.split()
if (not source_spec):
continue
if (len(source_spec) == 2):
kwargs['hashtype'] = distgit_config['default_sum'].lower()
kwargs['hash'] = source_spec[0]
kwargs['filename'] = source_spec[1]
elif (len(source_spec) == 4):
kwargs['hashtype'] = source_spec[0].lower()
kwargs['hash'] = source_spec[3]
filename = os.path.basename(source_spec[1])
kwargs['filename'] = filename.strip('()')
else:
msg = 'Weird sources line: {0}'.format(line)
raise RuntimeError(msg)
url_file = '/'.join([distgit_config['lookaside_location'], distgit_config['lookaside_uri_pattern'].format(**kwargs)])
download_file_and_check(url_file, kwargs, distgit_config) |
class StringChannelInfo(ChannelInfo):
column_names: Union[(List[str], np.array)]
def __init__(self, column_names: Union[(List[str], np.array)]) -> None:
if (type(column_names) is list):
column_names = np.array(column_names, dtype='U32')
if (not is_one_axis_array(column_names)):
raise ValueError('column_names must be 1D')
super().__init__('column_names', len(column_names))
self.check_and_assign('column_names', column_names, 'U32') |
class OptionPlotoptionsOrganizationSonificationTracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
def getSpecialCaseHandler(specialcase, joburl=None, netloc=None):
if (not (netloc or joburl)):
raise RuntimeError('You need to pass either joburl or netloc!')
if (not netloc):
netloc = urllib.parse.urlsplit(joburl).netloc
if (netloc in specialcase):
return specialcase[netloc]
if (joburl in specialcase):
return specialcase[joburl]
return None |
def abi_ens_resolver(w3: 'Web3', type_str: TypeStr, val: Any) -> Tuple[(TypeStr, Any)]:
if ((type_str == 'address') and is_ens_name(val)):
if (w3 is None):
raise InvalidAddress(f'Could not look up name {val!r} because no web3 connection available')
_ens = cast(ENS, w3.ens)
if (_ens is None):
raise InvalidAddress(f'Could not look up name {val!r} because ENS is set to None')
else:
try:
return (type_str, validate_name_has_address(_ens, val))
except NameNotFound as e:
if (not isinstance(_ens, StaticENS)):
raise InvalidAddress(f'{e}')
raise e
else:
return (type_str, val) |
def test_fixed_tuple_array(accounts):
code = '\npragma solidity ^0.6.0;\npragma experimental ABIEncoderV2;\n\ncontract Test {\n\nstruct Foo { uint256 a; string b; }\n\nFoo[2][2] bar;\n\nfunction foo() public returns (Foo[2][2] memory, Foo[2] memory) {\n bar[0][0].a = 42;\n bar[0][0].b = "hello";\n bar[1][1].a = 69;\n return (bar, bar[1]);\n}\n\n}\n'
contract = compile_source(code).Test.deploy({'from': accounts[0]})
assert (contract.foo.call() == [([(42, 'hello'), (0, '')], [(0, ''), (69, '')]), [(0, ''), (69, '')]]) |
class TestTaskExecutorBase(unittest.TestCase):
def setUp(self) -> None:
self.file = 'test.db'
self.db_uri = 'sqlite:///{}'.format(self.file)
self._delete_db_file()
init_db(self.db_uri)
session.prepare_session(db_uri=self.db_uri)
self.executor = MockTaskExecutor()
self.executor.start()
def tearDown(self) -> None:
self._delete_db_file()
self.executor.stop()
session.clear_engine_and_session()
def test_executor_not_started(self):
command = TaskScheduleCommand(action=TaskAction.START, new_task_execution=TaskExecutionKey(1, 'task', 1))
with self.assertRaises(AIFlowException):
executor = MockTaskExecutor()
executor.schedule_task(command)
def test__process_command(self):
try:
command_processor = StoppableThread(target=self.executor._process_command)
command_processor.start()
key = TaskExecutionKey(1, 'task', 1)
command = TaskScheduleCommand(action=TaskAction.START, new_task_execution=key)
self.executor.command_queue.put(command)
command = TaskScheduleCommand(action=TaskAction.STOP, current_task_execution=key, new_task_execution=None)
self.executor.command_queue.put(command)
time.sleep(0.5)
self.assertEqual(str(key), str(self.executor.started_task[0]))
self.assertEqual(str(key), str(self.executor.stopped_task[0]))
finally:
command_processor.stop()
('ai_flow.task_executor.common.task_executor_base.TaskExecutorBase._send_task_status_change')
def test_process_restart_command(self, mock_status):
try:
command_processor = StoppableThread(target=self.executor._process_command)
command_processor.start()
key = TaskExecutionKey(1, 'task', 1)
key_new = TaskExecutionKey(1, 'task', 2)
command = TaskScheduleCommand(action=TaskAction.RESTART, current_task_execution=key, new_task_execution=key_new)
self.executor.command_queue.put(command)
time.sleep(0.5)
self.assertEqual(str(key_new), str(self.executor.started_task[0]))
self.assertEqual(str(key), str(self.executor.stopped_task[0]))
finally:
command_processor.stop()
def _delete_db_file(self):
if os.path.exists(self.file):
os.remove(self.file) |
class ExpiresAt(BaseModel):
_attr
def expires_at(cls) -> MappedColumn[TIMESTAMPAware]:
try:
default_lifetime_seconds = getattr(cls, '__lifetime_seconds__')
default = functools.partial(_get_default_expires_at, timedelta_seconds=default_lifetime_seconds)
except AttributeError:
default = None
return mapped_column(TIMESTAMPAware(timezone=True), nullable=False, index=True, default=default) |
('requests.post')
def test_notify_start_failed(post, url, service_match, task_definition):
with raises(SlackException):
post.return_value = NotifyResponseUnsuccessfulMock()
slack = SlackNotification(url, service_match)
slack.notify_start('my-cluster', 'my-tag', task_definition, 'my-comment', 'my-user', 'my-service', 'my-rule') |
def distribute_numbers(min_value, max_value, num_elements=1, min_distance=0, max_distance=None, base=[]):
if (max_value < min_value):
raise ValueError('invalid value range.')
if (max_distance and (max_distance < min_distance)):
raise ValueError('invalid distance range.')
numbers = sorted(base)
results = []
for _ in range(num_elements):
allowed_range = []
for i in range((- 1), len(numbers)):
if (i == (- 1)):
min_allowed_value = min_value
else:
min_allowed_value = max((numbers[i] + min_distance), min_value)
if (i == (len(numbers) - 1)):
max_allowed_value = max_value
else:
max_allowed_value = min((numbers[(i + 1)] - min_distance), max_value)
if (min_allowed_value < max_allowed_value):
allowed_range.append((min_allowed_value, max_allowed_value))
if (not allowed_range):
break
estimated_num_elements = [min(int(((r[1] - r[0]) // min_distance)), num_elements) for r in allowed_range]
r = random.choices(allowed_range, k=1, weights=estimated_num_elements)[0]
d = (r[1] - r[0])
min_v = ((r[0] + min_distance) if (r[0] == min_value) else r[0])
max_v = r[1]
if (max_distance and (d > max_distance)):
value = random.uniform(min_v, ((r[0] + max_distance) - min_distance))
else:
value = random.uniform(min_v, max_v)
numbers = sorted((numbers + [value]))
results.append(value)
return sorted(results) |
_action_type(ofproto.OFPAT_OUTPUT, ofproto.OFP_ACTION_OUTPUT_SIZE)
class OFPActionOutput(OFPAction):
def __init__(self, port, max_len=65509):
super(OFPActionOutput, self).__init__()
self.port = port
self.max_len = max_len
def parser(cls, buf, offset):
(type_, len_, port, max_len) = struct.unpack_from(ofproto.OFP_ACTION_OUTPUT_PACK_STR, buf, offset)
assert (type_ == ofproto.OFPAT_OUTPUT)
assert (len_ == ofproto.OFP_ACTION_OUTPUT_SIZE)
return cls(port, max_len)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_OUTPUT_PACK_STR, buf, offset, self.type, self.len, self.port, self.max_len) |
def check_exists_ask_overwrite(arg_value, overwrite):
if (os.path.exists(arg_value) and (not overwrite)):
try:
msg = ('File %s already exists, would you like to overwrite the existing file? (y/n)' % arg_value)
if (input(msg).lower() == 'y'):
return True
else:
raise OSError
except OSError:
raise RuntimeError(('File %s already exists, please choose a different name.' % arg_value))
else:
return True |
(tags=['candidate'], description=docs.CANDIDATE_SEARCH)
class CandidateSearch(CandidateList):
schema = schemas.CandidateSearchSchema
page_schema = schemas.CandidateSearchPageSchema
query_options = [sa.orm.joinedload(models.Candidate.flags), sa.orm.subqueryload(models.Candidate.principal_committees)] |
('config_name,overrides,expected', [param('include_nested_group', [], DefaultsTreeNode(node=ConfigDefault(path='include_nested_group'), children=[DefaultsTreeNode(node=GroupDefault(group='group1', value='group_item1'), children=[GroupDefault(group='group2', value='file1'), ConfigDefault(path='_self_')]), ConfigDefault(path='_self_')]), id='delete:include_nested_group:baseline'), param('include_nested_group', ['~group1'], DefaultsTreeNode(node=ConfigDefault(path='include_nested_group'), children=[GroupDefault(group='group1', value='group_item1', deleted=True), ConfigDefault(path='_self_')]), id='delete:include_nested_group:group1'), param('include_nested_group', ['~group1/group2'], DefaultsTreeNode(node=ConfigDefault(path='include_nested_group'), children=[DefaultsTreeNode(node=GroupDefault(group='group1', value='group_item1'), children=[GroupDefault(group='group2', value='file1', deleted=True), ConfigDefault(path='_self_')]), ConfigDefault(path='_self_')]), id='delete:include_nested_group:group1/group2'), param('include_nested_group', ['~group1=group_item1'], DefaultsTreeNode(node=ConfigDefault(path='include_nested_group'), children=[GroupDefault(group='group1', value='group_item1', deleted=True), ConfigDefault(path='_self_')]), id='delete:include_nested_group:group1=group_item1'), param('include_nested_group', ['~group1=wrong'], raises(ConfigCompositionException, match="Could not delete 'group1=wrong'. No match in the defaults list"), id='delete:include_nested_group:group1=wrong'), param('two_group_defaults_different_pkgs', [], DefaultsTreeNode(node=ConfigDefault(path='two_group_defaults_different_pkgs'), children=[GroupDefault(group='group1', value='file1', package='pkg1'), GroupDefault(group='group1', value='file1', package='pkg2'), ConfigDefault(path='_self_')]), id='delete:two_group_defaults_different_pkgs:baseline'), param('two_group_defaults_different_pkgs', ['~'], DefaultsTreeNode(node=ConfigDefault(path='two_group_defaults_different_pkgs'), children=[GroupDefault(group='group1', value='file1', package='pkg1', deleted=True), GroupDefault(group='group1', value='file1', package='pkg2'), ConfigDefault(path='_self_')]), id='delete:two_group_defaults_different_pkgs:delete_pkg1'), param('two_group_defaults_different_pkgs', ['~'], DefaultsTreeNode(node=ConfigDefault(path='two_group_defaults_different_pkgs'), children=[GroupDefault(group='group1', value='file1', package='pkg1'), GroupDefault(group='group1', value='file1', package='pkg2', deleted=True), ConfigDefault(path='_self_')]), id='delete:two_group_defaults_different_pkgs:delete_pkg1')])
def test_deletion(config_name: str, overrides: List[str], expected: DefaultsTreeNode) -> None:
_test_defaults_tree_impl(config_name=config_name, input_overrides=overrides, expected=expected) |
(EcsClient, '__init__')
def test_run_action_run(client, task_definition):
action = RunAction(client, CLUSTER_NAME)
client.run_task.return_value = dict(tasks=[dict(taskArn='A'), dict(taskArn='B')])
action.run(task_definition, 2, 'test', LAUNCH_TYPE_EC2, (), (), False, None)
client.run_task.assert_called_once_with(cluster=CLUSTER_NAME, task_definition=task_definition.family_revision, count=2, started_by='test', overrides=dict(containerOverrides=task_definition.get_overrides()), launchtype=LAUNCH_TYPE_EC2, subnets=(), security_groups=(), public_ip=False, platform_version=None)
assert (len(action.started_tasks) == 2) |
class ConfigSourceExample(ConfigSource):
def __init__(self, provider: str, path: str):
super().__init__(provider=provider, path=path)
self.headers: Dict[(str, Dict[(str, str)])] = {'package_test/explicit.yaml': {'package': 'a.b'}, 'package_test/global.yaml': {'package': '_global_'}, 'package_test/group.yaml': {'package': '_group_'}, 'package_test/group_name.yaml': {'package': 'foo._group_._name_'}, 'package_test/name.yaml': {'package': '_name_'}, 'package_test/none.yaml': {}, 'primary_config_with_non_global_package.yaml': {'package': 'foo'}, 'configs_with_defaults_list.yaml': {'package': '_global_'}, 'configs_with_defaults_list/global_package.yaml': {'package': '_global_'}, 'configs_with_defaults_list/group_package.yaml': {'package': '_group_'}}
self.configs: Dict[(str, Dict[(str, Any)])] = {'primary_config.yaml': {'primary': True}, 'primary_config_with_non_global_package.yaml': {'primary': True}, 'config_without_group.yaml': {'group': False}, 'config_with_unicode.yaml': {'group': ''}, 'dataset/imagenet.yaml': {'name': 'imagenet', 'path': '/datasets/imagenet'}, 'dataset/cifar10.yaml': {'name': 'cifar10', 'path': '/datasets/cifar10'}, 'level1/level2/nested1.yaml': {'l1_l2_n1': True}, 'level1/level2/nested2.yaml': {'l1_l2_n2': True}, 'package_test/explicit.yaml': {'foo': 'bar'}, 'package_test/global.yaml': {'foo': 'bar'}, 'package_test/group.yaml': {'foo': 'bar'}, 'package_test/group_name.yaml': {'foo': 'bar'}, 'package_test/name.yaml': {'foo': 'bar'}, 'package_test/none.yaml': {'foo': 'bar'}, 'config_with_defaults_list.yaml': {'defaults': [{'dataset': 'imagenet'}], 'key': 'value'}, 'configs_with_defaults_list/global_package.yaml': {'defaults': [{'foo': 'bar'}], 'x': 10}, 'configs_with_defaults_list/group_package.yaml': {'defaults': [{'foo': 'bar'}], 'x': 10}}
def scheme() -> str:
return 'example'
def load_config(self, config_path: str, package_override: Optional[str]=None) -> ConfigResult:
name = self._normalize_file_name(config_path)
if (name not in self.configs):
raise ConfigLoadError(('Config not found : ' + config_path))
res_header: Dict[(str, Optional[str])] = {'package': None}
if (name in self.headers):
header = self.headers[name]
res_header['package'] = (header['package'] if ('package' in header) else None)
cfg = OmegaConf.create(self.configs[name])
return ConfigResult(config=cfg, path=f'{self.scheme()}://{self.path}', provider=self.provider, header=res_header)
def available(self) -> bool:
return (self.path == 'valid_path')
def is_group(self, config_path: str) -> bool:
groups = {'', 'dataset', 'optimizer', 'level1', 'level1/level2', 'configs_with_defaults_list'}
return (config_path in groups)
def is_config(self, config_path: str) -> bool:
base = {'dataset', 'dataset/imagenet', 'level1/level2/nested1', 'level1/level2/nested2', 'configs_with_defaults_list/global_package', 'configs_with_defaults_list/group_package'}
configs = set(([x for x in base] + [f'{x}.yaml' for x in base]))
return (config_path in configs)
def list(self, config_path: str, results_filter: Optional[ObjectType]) -> List[str]:
groups: Dict[(str, List[str])] = {'': ['dataset', 'level1', 'optimizer'], 'dataset': [], 'optimizer': [], 'level1': ['level2'], 'level1/level2': []}
configs: Dict[(str, List[str])] = {'': ['config_without_group', 'dataset'], 'dataset': ['cifar10', 'imagenet'], 'optimizer': ['adam', 'nesterov'], 'level1': [], 'level1/level2': ['nested1', 'nested2'], 'configs_with_defaults_list': ['global_package', 'group_package']}
if (results_filter is None):
return sorted(set((groups[config_path] + configs[config_path])))
elif (results_filter == ObjectType.GROUP):
return groups[config_path]
elif (results_filter == ObjectType.CONFIG):
return configs[config_path]
else:
raise ValueError() |
class TestWAFCName(unittest.TestCase):
def setUp(self):
self.waf_cname_map = {'incapdns': 'Incapsula', 'edgekey': 'Akamai', 'akamai': 'Akamai', 'edgesuite': 'Akamai', 'distil': 'Distil Networks', 'cloudfront': 'CloudFront', 'netdna-cdn': 'MaxCDN'}
def detect_by_cname(self, cnames):
for waf in self.waf_cname_map:
if any(((waf in str(cname)) for cname in cnames)):
return self.waf_cname_map.get(waf)
def test_akamai_detection(self):
records = {'some_akamai_dns_value': 'Akamai', 'otherkey': 'othervalue'}
self.assertEqual(self.detect_by_cname(records), 'Akamai')
def test_second_akamai_detection(self):
records = {'example_edgesuite_example': 'Akamai', 'otherkey': 'othervalue'}
self.assertEqual(self.detect_by_cname(records), 'Akamai')
def test_third_akamai_detection(self):
records = {'example_edgekey_example': 'Akamai', 'otherkey': 'othervalue'}
self.assertEqual(self.detect_by_cname(records), 'Akamai')
def test_incapsula_detection(self):
records = {'example.incapdns.or.not': 'Incapsula', 'otherkey': 'othervalue'}
self.assertEqual(self.detect_by_cname(records), 'Incapsula')
def test_distil_detection(self):
records = {'lolz.distil.kthx': 'Distil Networks', 'not': 'real'}
self.assertEqual(self.detect_by_cname(records), 'Distil Networks')
def test_cloudfront_detection(self):
records = {'aws.cloudfront.is.it': 'CloudFront', 'AWS': 'CO.UK'}
self.assertEqual(self.detect_by_cname(records), 'CloudFront')
def test_maxcdn_detection(self):
records = {'mycdn.netdna-cdn.godmode': 'MaxCDN', 'HAI1.2': 'IHAZAVAR'}
self.assertEqual(self.detect_by_cname(records), 'MaxCDN') |
def ddg_search(query: str, numResults: int, region: str=None, time_period=None):
try:
results = ddg(query, region, 'on', time_period, numResults)
except Exception as e:
print(e)
return failed_ddg_search(query)
if (results == None):
return failed_ddg_search(query)
st.write(results)
results = pd.DataFrame(results)
results.columns = ['title', 'link', 'text']
results['query'] = [query for _ in results.index]
results['text_length'] = results['text'].str.len()
results['ada_search'] = results['text'].apply((lambda x: create_embedding(x)))
return results |
('evennia.commands.account.COMMAND_DEFAULT_CLASS', MuxCommand)
('evennia.commands.admin.COMMAND_DEFAULT_CLASS', MuxCommand)
('evennia.commands.batchprocess.COMMAND_DEFAULT_CLASS', MuxCommand)
('evennia.commands.building.COMMAND_DEFAULT_CLASS', MuxCommand)
('evennia.commands.comms.COMMAND_DEFAULT_CLASS', MuxCommand)
('evennia.commands.general.COMMAND_DEFAULT_CLASS', MuxCommand)
('evennia.commands.help.COMMAND_DEFAULT_CLASS', MuxCommand)
('evennia.commands.syscommands.COMMAND_DEFAULT_CLASS', MuxCommand)
('evennia.commands.system.COMMAND_DEFAULT_CLASS', MuxCommand)
('evennia.commands.unloggedin.COMMAND_DEFAULT_CLASS', MuxCommand)
_settings(**DEFAULT_SETTINGS)
class BaseEvenniaCommandTest(BaseEvenniaTest, EvenniaCommandTestMixin): |
def test_graph_with_input_arguments_more_variable_types_minimization(graph_with_input_arguments_different_variable_types_2, variable):
(nodes, cfg) = graph_with_input_arguments_different_variable_types_2
run_out_of_ssa(cfg, SSAOptions.minimization, arguments=[(argument1 := Variable('arg1', Integer.int32_t())), (argument2 := Variable('arg2', Integer.int32_t()))])
new_node = [node for node in cfg.nodes if (node not in set(nodes))][0]
variable[3] = Variable('var_3', Integer.int64_t())
assert ((nodes[0].instructions == [Branch(Condition(OperationType.less, [argument2, argument1]))]) and (nodes[1].instructions == []) and (nodes[2].instructions == [Branch(Condition(OperationType.greater, [argument1, BinaryOperation(OperationType.plus, [argument2, argument2])]))]) and (nodes[3].instructions == [Assignment(argument2, BinaryOperation(OperationType.minus, [argument1, argument2]))]) and (nodes[4].instructions == [Assignment(argument1, BinaryOperation(OperationType.plus, [BinaryOperation(OperationType.minus, [argument1, argument2]), Constant(1)])), Assignment(variable[0], argument1), Assignment(variable[1], Constant(1)), Assignment(argument1, Constant(1))]) and (nodes[5].instructions == [Branch(Condition(OperationType.less_or_equal, [argument1, argument2]))]) and (nodes[6].instructions == [Assignment(variable[2], BinaryOperation(OperationType.plus, [argument1, Constant(1)])), Assignment(variable[3], BinaryOperation(OperationType.divide, [BinaryOperation(OperationType.multiply, [UnaryOperation(OperationType.cast, [variable[0]], vartype=Integer.int64_t()), variable[1]]), argument1])), Assignment(variable[1], variable[3]), Assignment(argument1, variable[2])]) and (nodes[7].instructions == [Return([variable[1]])]) and (new_node.instructions == [Assignment(argument2, argument1)]) and (len(cfg) == 9))
assert ((len(cfg.edges) == 11) and isinstance(cfg.get_edge(nodes[0], nodes[1]), TrueCase) and isinstance(cfg.get_edge(nodes[0], new_node), FalseCase) and isinstance(cfg.get_edge(new_node, nodes[2]), UnconditionalEdge) and isinstance(cfg.get_edge(nodes[1], nodes[2]), UnconditionalEdge) and isinstance(cfg.get_edge(nodes[2], nodes[3]), FalseCase) and isinstance(cfg.get_edge(nodes[2], nodes[4]), TrueCase) and isinstance(cfg.get_edge(nodes[3], nodes[4]), UnconditionalEdge) and isinstance(cfg.get_edge(nodes[4], nodes[5]), UnconditionalEdge) and isinstance(cfg.get_edge(nodes[5], nodes[6]), TrueCase) and isinstance(cfg.get_edge(nodes[5], nodes[7]), FalseCase) and isinstance(cfg.get_edge(nodes[6], nodes[5]), UnconditionalEdge)) |
def test_group_post_access_deny(db, client, user, jwt):
user.is_verified = True
event = EventFactoryBasic()
(role, _) = get_or_create(Role, name='moderator', title_name='Moderator')
UsersEventsRoles(user=user, event=event, role=role)
db.session.commit()
data = json.dumps({'data': {'type': 'group', 'relationships': {'events': {'data': [{'id': str(event.id), 'type': 'event'}]}}, 'attributes': {'name': 'eventgp2'}}})
response = client.post(f'/v1/groups', content_type='application/vnd.api+json', headers=jwt, data=data)
assert (json.loads(response.data)['errors'][0]['detail'] == 'Event owner access required') |
def fetch_consumption(zone_key: str='MD', session: (Session | None)=None, target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)) -> (list[dict] | dict):
if target_datetime:
archive_data = get_archive_data(session, target_datetime)
datapoints = []
for entry in archive_data:
datapoint = template_consumption_response(zone_key, entry.datetime, entry.consumption)
datapoints.append(datapoint)
return datapoints
else:
field_values = get_data(session)
consumption = field_values[other_fields[0]['index']]
dt = datetime.now(tz=TZ)
datapoint = template_consumption_response(zone_key, dt, consumption)
return datapoint |
class IDBTest(unittest.TestCase):
def setUp(self):
pass
def test_set_bundle_id(self):
idb = IDB()
idb.setBundleId('TEST')
self.assertEqual(idb.bundle_id, 'TEST')
def _util_base_run(self, *args, **kwargs):
self.assertEqual(args[0], ['ios-deploy'])
def test_run(self):
idb = IDB()
with patch('platforms.platform_util_base.PlatformUtilBase.run', side_effect=self._util_base_run):
idb.run()
def _ios_run_for_push(self, *args, **kwargs):
return args
def test_push(self):
src = os.path.abspath(os.path.join(BENCHMARK_DIR, os.pardir, 'specifications/models/caffe2/squeezenet/squeezenet.json'))
tgt = 'TEST_TGT'
idb = IDB()
with patch('platforms.ios.idb.IDB.run', side_effect=self._ios_run_for_push):
push_res = idb.push(src, tgt)
self.assertEqual(push_res, ('--upload', src, '--to', tgt))
def _ios_run_for_reboot(self, *args, **kwargs):
self.assertTrue(((args[0] == 'idevicepair') or (args[0] == 'idevicediagnostics')))
self.assertEqual(args[1], '-u')
self.assertEqual(args[2], 'TEST_DEVICE')
self.assertTrue(((args[3] == 'pair') or (args[3] == 'restart')))
def test_reboot(self):
idb = IDB(device='TEST_DEVICE')
with patch('platforms.platform_util_base.PlatformUtilBase.run', side_effect=self._ios_run_for_reboot):
push_res = idb.reboot()
self.assertTrue(push_res) |
def start_download(download_job):
download_job.job_status_id = JOB_STATUS_DICT['running']
download_job.number_of_rows = 0
download_job.number_of_columns = 0
download_job.file_size = 0
download_job.save()
write_to_log(message=f'Starting to process DownloadJob {download_job.download_job_id}', download_job=download_job)
return download_job.file_name |
def fetch_contract_parent_award_details(parent_piid: str, parent_fpds_agency: str) -> Optional[OrderedDict]:
parent_guai = 'CONT_IDV_{}_{}'.format((parent_piid or 'NONE'), (parent_fpds_agency or 'NONE'))
parent_award_ids = ParentAward.objects.filter(generated_unique_award_id=parent_guai).annotate(parent_award_award_id=F('award_id'), parent_award_guai=F('generated_unique_award_id')).values('parent_award_award_id', 'parent_award_guai').first()
return _fetch_parent_award_details(parent_award_ids) |
('fetch')
('--recursive', '-r', is_flag=True, help='Fetch recursively, all variables in the URI. This is not needed for directrories as they are automatically recursively downloaded.')
('flyte-data-uri', type=str, required=True, metavar='FLYTE-DATA-URI (format flyte://...)')
('download-to', type=click.Path(), required=False, default=None, metavar='DOWNLOAD-TO Local path (optional)')
_context
def fetch(ctx: click.Context, recursive: bool, flyte_data_uri: str, download_to: typing.Optional[str]=None):
remote: FlyteRemote = get_and_save_remote_with_click_context(ctx, project='flytesnacks', domain='development')
click.secho(f'Fetching data from {flyte_data_uri}...', dim=True)
data = remote.get(flyte_data_uri)
if isinstance(data, Literal):
p = literal_string_repr(data)
elif isinstance(data, LiteralsResolver):
p = literal_map_string_repr(data.literals)
else:
p = data
pretty = Pretty(p)
panel = Panel(pretty)
print(panel)
if download_to:
remote.download(data, download_to, recursive=recursive) |
class TestUniquenessValidation(TestCase):
def setUp(self):
self.instance = UniquenessModel.objects.create(username='existing')
def test_repr(self):
serializer = UniquenessSerializer()
expected = dedent("\n UniquenessSerializer():\n id = IntegerField(label='ID', read_only=True)\n username = CharField(max_length=100, validators=[<UniqueValidator(queryset=UniquenessModel.objects.all())>])\n ")
assert (repr(serializer) == expected)
def test_is_not_unique(self):
data = {'username': 'existing'}
serializer = UniquenessSerializer(data=data)
assert (not serializer.is_valid())
assert (serializer.errors == {'username': ['uniqueness model with this username already exists.']})
def test_relation_is_not_unique(self):
RelatedModel.objects.create(user=self.instance)
data = {'user': self.instance.pk}
serializer = RelatedModelUserSerializer(data=data)
assert (not serializer.is_valid())
assert (serializer.errors == {'user': ['related model with this user already exists.']})
def test_is_unique(self):
data = {'username': 'other'}
serializer = UniquenessSerializer(data=data)
assert serializer.is_valid()
assert (serializer.validated_data == {'username': 'other'})
def test_updated_instance_excluded(self):
data = {'username': 'existing'}
serializer = UniquenessSerializer(self.instance, data=data)
assert serializer.is_valid()
assert (serializer.validated_data == {'username': 'existing'})
def test_doesnt_pollute_model(self):
instance = AnotherUniquenessModel.objects.create(code='100')
serializer = AnotherUniquenessSerializer(instance)
assert (AnotherUniquenessModel._meta.get_field('code').validators == [])
serializer.data
assert (AnotherUniquenessModel._meta.get_field('code').validators == [])
def test_related_model_is_unique(self):
data = {'username': 'Existing', 'email': 'new-'}
rs = RelatedModelSerializer(data=data)
assert (not rs.is_valid())
assert (rs.errors == {'username': ['This field must be unique.']})
data = {'username': 'new-username', 'email': 'new-'}
rs = RelatedModelSerializer(data=data)
assert rs.is_valid()
def test_value_error_treated_as_not_unique(self):
serializer = UniquenessIntegerSerializer(data={'integer': 'abc'})
assert serializer.is_valid() |
def insert_diagnostic_report(doc, patient, sample_collection=None):
diagnostic_report = frappe.new_doc('Diagnostic Report')
diagnostic_report.company = doc.company
diagnostic_report.patient = patient
diagnostic_report.ref_doctype = doc.doctype
diagnostic_report.docname = doc.name
diagnostic_report.practitioner = doc.ref_practitioner
diagnostic_report.sample_collection = sample_collection
diagnostic_report.save(ignore_permissions=True) |
class OptionSeriesDependencywheelDragdropDraghandle(Options):
def className(self):
return self._config_get('highcharts-drag-handle')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#fff')
def color(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('rgba(0, 0, 0, 0.6)')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(901)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def sub_comment_list(nid):
comment_query = Comment.objects.filter(article_id=nid).order_by('-create_time')
comment_list = []
for comment in comment_query:
if (not comment.parent_comment):
lis = []
find_root_sub_comment(comment, lis)
comment.sub_comment = lis
comment_list.append(comment)
continue
return comment_list |
def make_custom_current_source():
field_components = {}
for field in 'EH':
for component in 'xyz':
field_components[(field + component)] = make_scalar_data()
current_dataset = td.FieldDataset(**field_components)
return td.CustomCurrentSource(size=SIZE, source_time=ST, current_dataset=current_dataset) |
class OptionSeriesHistogramDatalabels(Options):
def align(self):
return self._config_get('undefined')
def align(self, text: str):
self._config(text, js_type=False)
def allowOverlap(self):
return self._config_get(False)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self) -> 'OptionSeriesHistogramDatalabelsAnimation':
return self._config_sub_data('animation', OptionSeriesHistogramDatalabelsAnimation)
def backgroundColor(self):
return self._config_get(None)
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(0)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def crop(self):
return self._config_get(True)
def crop(self, flag: bool):
self._config(flag, js_type=False)
def defer(self):
return self._config_get(True)
def defer(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def filter(self) -> 'OptionSeriesHistogramDatalabelsFilter':
return self._config_sub_data('filter', OptionSeriesHistogramDatalabelsFilter)
def format(self):
return self._config_get('point.value')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get(None)
def formatter(self, value: Any):
self._config(value, js_type=False)
def inside(self):
return self._config_get(None)
def inside(self, flag: bool):
self._config(flag, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, flag: bool):
self._config(flag, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def overflow(self):
return self._config_get('justify')
def overflow(self, text: str):
self._config(text, js_type=False)
def padding(self):
return self._config_get(5)
def padding(self, num: float):
self._config(num, js_type=False)
def position(self):
return self._config_get('center')
def position(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def shape(self):
return self._config_get('square')
def shape(self, text: str):
self._config(text, js_type=False)
def style(self):
return self._config_get(None)
def style(self, value: Any):
self._config(value, js_type=False)
def textPath(self) -> 'OptionSeriesHistogramDatalabelsTextpath':
return self._config_sub_data('textPath', OptionSeriesHistogramDatalabelsTextpath)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('undefined')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get('undefined')
def y(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(6)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = 'name'
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'firewall_gtp': {'required': False, 'type': 'dict', 'default': None, 'options': {}}}
for attribute_name in module_spec['options']:
fields['firewall_gtp']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['firewall_gtp']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=False)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'firewall_gtp')
(is_error, has_changed, result, diff) = fortios_firewall(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
def insert_missing_rows(source: ETLObjectBase, destination: ETLWritableObjectBase) -> int:
insertable_columns = _get_shared_columns((source.columns + list(destination.insert_overrides)), destination.columns)
sql = '\n insert into {destination_object_representation} ({insert_columns})\n select {select_columns}\n from {source_object_representation} as s\n left outer join {destination_object_representation} as d on {join}\n where {excluder}\n '
sql = SQL(sql).format(destination_object_representation=destination.object_representation, insert_columns=primatives.make_column_list(insertable_columns), select_columns=primatives.make_column_list(insertable_columns, 's', destination.insert_overrides), source_object_representation=source.object_representation, join=primatives.make_join_conditional(destination.key_columns, 'd', 's'), excluder=primatives.make_join_excluder_conditional(destination.key_columns, 'd'))
return sql_helpers.execute_dml_sql(sql) |
class OptionSeriesArcdiagramSonificationTracksMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def _merge_larger_casts_into_smaller(instruction: Instruction):
for expr in _find_cast_subexpressions(instruction):
if _is_cast(expr.operand):
cast_operand = expr.operand
if (not (expr.contraction or cast_operand.contraction)):
if _is_larger_int_type(expr.type, cast_operand.type):
instruction.substitute(expr, cast_operand) |
class OptionPlotoptionsPyramid3dSonificationDefaultinstrumentoptionsMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class HeaderHelpersResource():
def __init__(self, last_modified=None):
if (last_modified is not None):
self.last_modified = last_modified
else:
self.last_modified = _utcnow()
def _overwrite_headers(self, req, resp):
resp.content_type = 'x-falcon/peregrine'
resp.cache_control = ['no-store']
def on_get(self, req, resp):
resp.text = '{}'
resp.content_type = 'x-falcon/peregrine'
resp.cache_control = ['public', 'private', 'no-cache', 'no-store', 'must-revalidate', 'proxy-revalidate', 'max-age=3600', 's-maxage=60', 'no-transform']
resp.etag = None
resp.etag = 'fa0d1a60ef6616bbc8ea4cb2'
resp.last_modified = self.last_modified
resp.retry_after = 3601
resp.location = '/things/87'
resp.content_location = '/things/78'
resp.downloadable_as = None
resp.downloadable_as = 'Some File.zip'
if ((req.range_unit is None) or (req.range_unit == 'bytes')):
resp.content_range = (0, 499, (10 * 1024))
else:
resp.content_range = (0, 25, 100, req.range_unit)
resp.accept_ranges = None
resp.accept_ranges = 'bytes'
resp.set_header('X-Client-Should-Never-See-This', 'abc')
assert (resp.get_header('x-client-should-never-see-this') == 'abc')
resp.delete_header('x-client-should-never-see-this')
self.req = req
self.resp = resp
def on_head(self, req, resp):
resp.set_header('Content-Type', 'x-swallow/unladen')
resp.set_header('X-Auth-Token', 'setecastronomy')
resp.set_header('X-AUTH-TOKEN', 'toomanysecrets')
resp.location = '/things/87'
del resp.location
self._overwrite_headers(req, resp)
self.resp = resp
def on_post(self, req, resp):
resp.set_headers([('CONTENT-TYPE', 'x-swallow/unladen'), ('X-Auth-Token', 'setecastronomy'), ('X-AUTH-TOKEN', 'toomanysecrets')])
self._overwrite_headers(req, resp)
self.resp = resp
def on_put(self, req, resp):
resp.set_headers({'CONTENT-TYPE': 'x-swallow/unladen', 'X-aUTH-tOKEN': 'toomanysecrets'})
self._overwrite_headers(req, resp)
self.resp = resp |
class Transaction(models.Model):
reason = models.CharField(max_length=200)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
date = models.DateTimeField(default=timezone.now)
approver = models.ForeignKey(User, related_name='approved_transactions', blank=True, null=True)
valid = models.BooleanField(default=False)
def __str__(self):
return ('Transaction %s' % self.pk)
def save(self, *args, **kwargs):
entries = self.entries.all()
if (len(entries) < 2):
self.valid = False
else:
if (len(entries) != 2):
raise Exception('Transactions must have 2 entries')
balance = sum([e.amount for e in entries])
if (balance != 0):
raise Exception('Transaction entries must balance out and there must be only 2')
if (len(set([e.account.currency for e in entries])) > 1):
raise Exception('Transaction entries must be between accounts of the same currency')
self.valid = True
Entry.objects.filter(transaction=self).update(valid=True)
super(Transaction, self).save(*args, **kwargs)
def magnitude(self):
resp = self.entries.filter(amount__gt=0).aggregate(Sum('amount'))
return resp['amount__sum'] |
class SteamInfo(Base):
__tablename__ = 'steam_info'
id: Mapped[int] = mapped_column(primary_key=True)
url: Mapped[str]
game: Mapped[(Game | None)] = relationship('Game', back_populates='steam_info', default=None)
name: Mapped[(str | None)] = mapped_column(default=None)
short_description: Mapped[(str | None)] = mapped_column(default=None)
release_date: Mapped[(datetime | None)] = mapped_column(AwareDateTime, default=None)
genres: Mapped[(str | None)] = mapped_column(default=None)
publishers: Mapped[(str | None)] = mapped_column(default=None)
image_url: Mapped[(str | None)] = mapped_column(default=None)
recommendations: Mapped[(int | None)] = mapped_column(default=None)
percent: Mapped[(int | None)] = mapped_column(default=None)
score: Mapped[(int | None)] = mapped_column(default=None)
metacritic_score: Mapped[(int | None)] = mapped_column(default=None)
metacritic_url: Mapped[(str | None)] = mapped_column(default=None)
recommended_price_eur: Mapped[(float | None)] = mapped_column(default=None) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.