code stringlengths 281 23.7M |
|---|
class Phyml(TreeTask):
def __init__(self, nodeid, alg_phylip_file, constrain_id, model, seqtype, conf, confname, parts_id=None):
GLOBALS['citator'].add('phyml')
base_args = OrderedDict({'--model': '', '--no_memory_check': '', '--quiet': '', '--constraint_tree': ''})
if (model and model.startswith('pmodeltest-')):
model = model.replace('pmodeltest-', '')
self.fullmodel = model
if ('+I' in model):
conf[confname]['-v'] = ' e'
elif ('!I' in model):
conf[confname]['-v'] = ' 0'
if ('+G' in model):
conf[confname]['-a'] = 'e'
elif ('!G' in model):
conf[confname]['-c'] = ' 1'
conf[confname].pop('-a', None)
if ('+F' in model):
conf[confname]['-f'] = ('m' if (seqtype == 'nt') else 'e')
elif ('!F' in model):
conf[confname]['-f'] = ('0.25,0.25,0.25,0.25' if (seqtype == 'nt') else 'm')
model = model.split('+')[0].split('!')[0]
if (seqtype == 'nt'):
model = modelcodes[model]
elif (not model):
model = (conf[confname]['_aa_model'] if (seqtype == 'aa') else conf[confname]['_nt_model'])
self.fullmodel = ''
else:
self.fullmodel = (model + '-prottest')
model = model
self.model = model
self.confname = confname
self.conf = conf
self.constrain_tree = None
if constrain_id:
self.constrain_tree = db.get_dataid(constrain_id, DATATYPES.constrain_tree)
self.alg_phylip_file = alg_phylip_file
TreeTask.__init__(self, nodeid, 'tree', 'Phyml', base_args, conf[confname])
self.seqtype = seqtype
self.lk = None
self.init()
def load_jobs(self):
appname = self.conf[self.confname]['_app']
args = OrderedDict(self.args)
args['--datatype'] = self.seqtype
args['--model'] = self.model
args['--input'] = self.alg_phylip_file
if self.constrain_tree:
args['--constraint_tree'] = self.constrain_tree
args['-u'] = self.constrain_tree
else:
del args['--constraint_tree']
job = Job(self.conf['app'][appname], args, parent_ids=[self.nodeid])
job.add_input_file(self.alg_phylip_file, job.jobdir)
if self.constrain_tree:
job.add_input_file(self.constrain_tree, job.jobdir)
job.jobname += ('-' + self.fullmodel)
self.jobs.append(job)
def finish(self):
lks = []
j = self.jobs[0]
tree_file = os.path.join(j.jobdir, (self.alg_phylip_file + '_phyml_tree.txt'))
stats_file = os.path.join(j.jobdir, (self.alg_phylip_file + '_phyml_stats.txt'))
m = re.search('Log-likelihood:\\s+(-?\\d+\\.\\d+)', open(stats_file).read())
lk = float(m.groups()[0])
stats = {'lk': lk}
tree = PhyloTree(open(tree_file))
TreeTask.store_data(self, tree.write(), stats) |
class OptionPlotoptionsSolidgauge(Options):
def accessibility(self) -> 'OptionPlotoptionsSolidgaugeAccessibility':
return self._config_sub_data('accessibility', OptionPlotoptionsSolidgaugeAccessibility)
def allowPointSelect(self):
return self._config_get(False)
def allowPointSelect(self, flag: bool):
self._config(flag, js_type=False)
def animation(self):
return self._config_get(True)
def animation(self, flag: bool):
self._config(flag, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def clip(self):
return self._config_get(True)
def clip(self, flag: bool):
self._config(flag, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def colorByPoint(self):
return self._config_get(True)
def colorByPoint(self, flag: bool):
self._config(flag, js_type=False)
def colorIndex(self):
return self._config_get(None)
def colorIndex(self, num: float):
self._config(num, js_type=False)
def crisp(self):
return self._config_get(True)
def crisp(self, flag: bool):
self._config(flag, js_type=False)
def cursor(self):
return self._config_get(None)
def cursor(self, text: str):
self._config(text, js_type=False)
def custom(self):
return self._config_get(None)
def custom(self, value: Any):
self._config(value, js_type=False)
def dataLabels(self) -> 'OptionPlotoptionsSolidgaugeDatalabels':
return self._config_sub_data('dataLabels', OptionPlotoptionsSolidgaugeDatalabels)
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def enableMouseTracking(self):
return self._config_get(True)
def enableMouseTracking(self, flag: bool):
self._config(flag, js_type=False)
def events(self) -> 'OptionPlotoptionsSolidgaugeEvents':
return self._config_sub_data('events', OptionPlotoptionsSolidgaugeEvents)
def inactiveOtherPoints(self):
return self._config_get(False)
def inactiveOtherPoints(self, flag: bool):
self._config(flag, js_type=False)
def includeInDataExport(self):
return self._config_get(None)
def includeInDataExport(self, flag: bool):
self._config(flag, js_type=False)
def innerRadius(self):
return self._config_get('"60%"')
def innerRadius(self, text: str):
self._config(text, js_type=False)
def keys(self):
return self._config_get(None)
def keys(self, value: Any):
self._config(value, js_type=False)
def label(self) -> 'OptionPlotoptionsSolidgaugeLabel':
return self._config_sub_data('label', OptionPlotoptionsSolidgaugeLabel)
def legendSymbol(self):
return self._config_get('rectangle')
def legendSymbol(self, text: str):
self._config(text, js_type=False)
def linecap(self):
return self._config_get(round)
def linecap(self, value: Any):
self._config(value, js_type=False)
def lineWidth(self):
return self._config_get(2)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def linkedTo(self):
return self._config_get(None)
def linkedTo(self, text: str):
self._config(text, js_type=False)
def onPoint(self) -> 'OptionPlotoptionsSolidgaugeOnpoint':
return self._config_sub_data('onPoint', OptionPlotoptionsSolidgaugeOnpoint)
def opacity(self):
return self._config_get(1)
def opacity(self, num: float):
self._config(num, js_type=False)
def overshoot(self):
return self._config_get(0)
def overshoot(self, num: float):
self._config(num, js_type=False)
def point(self) -> 'OptionPlotoptionsSolidgaugePoint':
return self._config_sub_data('point', OptionPlotoptionsSolidgaugePoint)
def pointDescriptionFormat(self):
return self._config_get(None)
def pointDescriptionFormat(self, value: Any):
self._config(value, js_type=False)
def pointDescriptionFormatter(self):
return self._config_get(None)
def pointDescriptionFormatter(self, value: Any):
self._config(value, js_type=False)
def pointInterval(self):
return self._config_get(1)
def pointInterval(self, num: float):
self._config(num, js_type=False)
def pointIntervalUnit(self):
return self._config_get(None)
def pointIntervalUnit(self, value: Any):
self._config(value, js_type=False)
def pointStart(self):
return self._config_get(0)
def pointStart(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get('"100%"')
def radius(self, text: str):
self._config(text, js_type=False)
def relativeXValue(self):
return self._config_get(False)
def relativeXValue(self, flag: bool):
self._config(flag, js_type=False)
def rounded(self):
return self._config_get(False)
def rounded(self, flag: bool):
self._config(flag, js_type=False)
def selected(self):
return self._config_get(False)
def selected(self, flag: bool):
self._config(flag, js_type=False)
def showCheckbox(self):
return self._config_get(False)
def showCheckbox(self, flag: bool):
self._config(flag, js_type=False)
def showInLegend(self):
return self._config_get(False)
def showInLegend(self, flag: bool):
self._config(flag, js_type=False)
def skipKeyboardNavigation(self):
return self._config_get(None)
def skipKeyboardNavigation(self, flag: bool):
self._config(flag, js_type=False)
def sonification(self) -> 'OptionPlotoptionsSolidgaugeSonification':
return self._config_sub_data('sonification', OptionPlotoptionsSolidgaugeSonification)
def stickyTracking(self):
return self._config_get(True)
def stickyTracking(self, flag: bool):
self._config(flag, js_type=False)
def threshold(self):
return self._config_get(None)
def threshold(self, num: float):
self._config(num, js_type=False)
def tooltip(self) -> 'OptionPlotoptionsSolidgaugeTooltip':
return self._config_sub_data('tooltip', OptionPlotoptionsSolidgaugeTooltip)
def visible(self):
return self._config_get(True)
def visible(self, flag: bool):
self._config(flag, js_type=False) |
def from_grid3d(grid, template=None, where='top', mode='depth', rfactor=1):
if (where == 'top'):
klayer = 1
option = 0
elif (where == 'base'):
klayer = grid.nlay
option = 1
else:
(klayer, what) = where.split('_')
klayer = int(klayer)
if (grid.nlay < klayer < 0):
raise ValueError(f'Klayer out of range in where={where}')
option = 0
if (what == 'base'):
option = 1
if (rfactor < 0.5):
raise KeyError('Refinefactor rfactor is too small, should be >= 0.5')
args = _update_regsurf(template, grid, rfactor=float(rfactor))
args['rotation'] = 0.0
val = args['values']
val = val.ravel()
val = ma.filled(val, fill_value=xtgeo.UNDEF)
svalues = ((val * 0.0) + xtgeo.UNDEF)
ivalues = svalues.copy()
jvalues = svalues.copy()
grid._xtgformat1()
_cxtgeo.surf_sample_grd3d_lay(grid.ncol, grid.nrow, grid.nlay, grid._coordsv, grid._zcornsv, grid._actnumsv, klayer, args['ncol'], args['nrow'], args['xori'], args['xinc'], args['yori'], args['yinc'], args['rotation'], svalues, ivalues, jvalues, option)
logger.info('Extracted surfaces from 3D grid...')
svalues = np.ma.masked_greater(svalues, xtgeo.UNDEF_LIMIT)
ivalues = np.ma.masked_greater(ivalues, xtgeo.UNDEF_LIMIT)
jvalues = np.ma.masked_greater(jvalues, xtgeo.UNDEF_LIMIT)
if (mode == 'i'):
ivalues = ivalues.reshape((args['ncol'], args['nrow']))
ivalues = ma.masked_invalid(ivalues)
args['values'] = ivalues
return (args, None, None)
if (mode == 'j'):
jvalues = jvalues.reshape((args['ncol'], args['nrow']))
jvalues = ma.masked_invalid(jvalues)
args['values'] = jvalues
return (args, None, None)
svalues = svalues.reshape((args['ncol'], args['nrow']))
svalues = ma.masked_invalid(svalues)
args['values'] = svalues
return (args, ivalues, jvalues) |
def update_field(field: dict, migration_direction: str) -> dict:
data_categories = field.get('data_categories', [])
updated = []
if (migration_direction == 'up'):
for data_category in data_categories:
if is_removed_upgrade(data_category):
logger.info('Removing %s, this is no longer a valid category', data_category)
if data_category:
if (not is_removed_upgrade(data_category)):
new = DATA_MAP_UPGRADE.get(data_category)
if new:
updated.append(new)
else:
updated.append(data_category)
else:
updated.append(data_category)
field['data_categories'] = updated
else:
for data_category in data_categories:
if is_removed_downgrade(data_category):
logger.info('Removing %s, this is no longer a valid category', data_category)
if data_category:
if (not is_removed_downgrade(data_category)):
new = DATA_MAP_DOWNGRADE.get(data_category)
if new:
updated.append(new)
else:
updated.append(data_category)
else:
updated.append(data_category)
field['data_categories'] = updated
if field.get('fields'):
for field in field['fields']:
update_field(field, migration_direction)
return field |
.parametrize('vm_class, address, expected', ((ConstantinopleVM, ADDRESS_NOT_IN_STATE, '0x'), (ConstantinopleVM, EMPTY_ADDRESS_IN_STATE, '0x'), (ConstantinopleVM, ADDRESS_WITH_JUST_BALANCE, '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470'), (ConstantinopleVM, ADDRESS_WITH_CODE[0], '0xb6f5188e2984211a0de167a56a92d85bee084d7a469d97a59e1e2b573dbb4301')))
def test_extcodehash(vm_class, address, expected):
computation = run_general_computation(vm_class)
computation.stack_push_bytes(decode_hex(address))
computation.opcodes[opcode_values.EXTCODEHASH](computation)
result = computation.stack_pop1_bytes()
assert (encode_hex(pad32(result)) == expected) |
class BrokerIntegrationTestCase(TestCase):
databases = {'default', 'data_broker'}
dummy_table_name = 'dummy_broker_table_to_be_rolled_back'
def setUpClass(cls):
pass
def tearDownClass(cls):
with connections['data_broker'].cursor() as cursor:
cursor.execute("select * from pg_tables where tablename = '{}'".format(cls.dummy_table_name))
results = cursor.fetchall()
assert (results is not None)
if (len(results) != 0):
pytest.fail('Test test_broker_transactional_test did not run transactionally. Creation of table {} in Broker DB was not rolled back and still exists.'.format(cls.dummy_table_name))
.usefixtures('broker_db_setup')
def test_can_connect_to_broker(self):
connection = connections['data_broker']
with connection.cursor() as cursor:
cursor.execute('SELECT now()')
results = cursor.fetchall()
assert (results is not None)
assert (len(str(results[0][0])) > 0)
.usefixtures('broker_db_setup')
def test_broker_transactional_test(self):
dummy_contents = 'dummy_text'
connection = connections['data_broker']
with connection.cursor() as cursor:
cursor.execute('create table {} (contents text)'.format(self.dummy_table_name))
cursor.execute("insert into {} values ('{}')".format(self.dummy_table_name, dummy_contents))
with connection.cursor() as cursor:
cursor.execute("select * from pg_tables where tablename = '{}'".format(self.dummy_table_name))
results = cursor.fetchall()
assert (results is not None)
assert (len(str(results[0][0])) > 0)
with connection.cursor() as cursor:
cursor.execute('select * from {}'.format(self.dummy_table_name))
results = cursor.fetchall()
assert (results is not None)
assert (str(results[0][0]) == dummy_contents)
.usefixtures('broker_db_setup')
def test_broker_db_fully_setup(self):
connection = connections['data_broker']
with connection.cursor() as cursor:
cursor.execute("select * from pg_tables where tablename = 'alembic_version'")
results = cursor.fetchall()
assert (results is not None)
assert (len(results) > 0)
assert (len(str(results[0][0])) > 0) |
class ErrorDocument(_common.FlyteIdlEntity):
def __init__(self, error):
self._error = error
def error(self):
return self._error
def to_flyte_idl(self):
return _errors_pb2.ErrorDocument(error=self.error.to_flyte_idl())
def from_flyte_idl(cls, proto):
return cls(ContainerError.from_flyte_idl(proto.error)) |
class BlockIndexer(object):
__slots__ = ['tensor', 'block_cache']
def __init__(self, tensor):
self.tensor = tensor
self.block_cache = {}
def __getitem__(self, key):
key = as_tuple(key)
key = (key + tuple((slice(None) for i in range((self.tensor.rank - len(key))))))
if (len(key) > self.tensor.rank):
raise ValueError(('Attempting to index a rank-%s tensor with %s indices.' % (self.tensor.rank, len(key))))
block_shape = tuple((len(V) for V in self.tensor.arg_function_spaces))
blocks = tuple(((tuple(range((k.start or 0), (k.stop or n), (k.step or 1))) if isinstance(k, slice) else (k,)) for (k, n) in zip(key, block_shape)))
if (blocks == tuple((tuple(range(n)) for n in block_shape))):
return self.tensor
try:
block = self.block_cache[blocks]
except KeyError:
block = Block(tensor=self.tensor, indices=blocks)
self.block_cache[blocks] = block
return block |
class AMIResponseTest(unittest.TestCase):
def test_login_response(self):
login_response = ('\r\n'.join(['Response: Success', 'Message: Authentication accepted']) + '\r\n')
self.assertTrue(Response.match(login_response))
response = Response.read(login_response)
self.assertFalse(response.is_error())
self.assertFalse(response.follows)
self.assertEqual(login_response, str(response))
def test_login_response_fail(self):
login_response = ('\r\n'.join(['Response: Error', 'Message: Authentication failed']) + '\r\n')
self.assertTrue(Response.match(login_response))
response = Response.read(login_response)
self.assertTrue(response.is_error())
self.assertFalse(response.follows)
self.assertEqual(login_response, str(response))
def test_goodbye_response(self):
goodbye_response = ('\r\n'.join(['Response: Goodbye', 'Message: Thanks for all the fish.']) + '\r\n')
self.assertTrue(Response.match(goodbye_response))
response = Response.read(goodbye_response)
self.assertFalse(response.is_error())
self.assertFalse(response.follows)
self.assertEqual(goodbye_response, str(response))
def test_with_follows(self):
follows_response = ('\n'.join(['Response: Follows\r', 'Channel (Context Extension Pri ) State Appl. Data', '0 active channel(s)', '--END COMMAND--']) + '\r\n')
self.assertTrue(Response.match(follows_response))
response = Response.read(follows_response)
self.assertFalse(response.is_error())
self.assertIsNotNone(response.follows)
self.assertListEqual(LINE_REGEX.split(follows_response)[1:(- 1)], response.follows)
self.assertEqual(follows_response, str(response))
def test_with_key_value_follows(self):
follows_response = ('\n'.join(['Response: Follows\r', 'Privilege: Command\r', '/AMPUSER/*/concurrency_limit : 4 ', '/AMPUSER/2000/answermode : disabled ', '/AMPUSER/2000/cfringtimer : 0 ', '/AMPUSER/2000/cidname : Wainer ', '/AMPUSER/2000/cidnum : 2000 ', '/AMPUSER/2000/concurrency_limit : 0 ', '/AMPUSER/2000/device : 2000 ', '/AMPUSER/2000/language : ', '/AMPUSER/2000/noanswer : ', '/AMPUSER/2000/outboundcid : ', '/AMPUSER/2000/password : ', '/AMPUSER/2000/queues/qnostate : usestate ', '/AMPUSER/2000/recording : ', '/AMPUSER/2000/recording/in/external : dontcare ', '/AMPUSER/2000/recording/in/internal : dontcare ', '/AMPUSER/2000/recording/ondemand : disabled ', '/AMPUSER/2000/recording/out/external : dontcare ', '/AMPUSER/2000/recording/out/internal : dontcare ', '/AMPUSER/2000/recording/priority : 0 ', '/AMPUSER/2000/ringtimer : 0 ', '/AMPUSER/2000/voicemail : novm ', '/AMPUSER/2001/answermode : disabled ', '/AMPUSER/2001/cfringtimer : 0 ', '/AMPUSER/2001/cidname : Wainer - Local ', '/AMPUSER/2001/cidnum : 2001 ', '/AMPUSER/2001/concurrency_limit : 0 ', '/AMPUSER/2001/device : 2001 ', '/AMPUSER/2001/language : ', '/AMPUSER/2001/noanswer : ', '/AMPUSER/2001/outboundcid : ', '/AMPUSER/2001/password : ', '/AMPUSER/2001/queues/qnostate : usestate ', '/AMPUSER/2001/recording : ', '/AMPUSER/2001/recording/in/external : always ', '/AMPUSER/2001/recording/in/internal : always ', '/AMPUSER/2001/recording/ondemand : disabled ', '/AMPUSER/2001/recording/out/external : always ', '/AMPUSER/2001/recording/out/internal : always ', '/AMPUSER/2001/recording/priority : 0 ', '/AMPUSER/2001/ringtimer : 0 ', '/AMPUSER/2001/voicemail : novm ', '/AMPUSER/2003/answermode : disabled ', '/AMPUSER/2003/cfringtimer : 0 ', '/AMPUSER/2003/cidname : Teo ', '/AMPUSER/2003/cidnum : 2003 ', '/AMPUSER/2003/concurrency_limit : 0 ', '/AMPUSER/2003/device : 2003 ', '/AMPUSER/2003/language : ', '/AMPUSER/2003/noanswer : ', '/AMPUSER/2003/outboundcid : ', '/AMPUSER/2003/password : ', '/AMPUSER/2003/queues/qnostate : usestate ', '/AMPUSER/2003/recording : ', '/AMPUSER/2003/recording/in/external : always ', '/AMPUSER/2003/recording/in/internal : always ', '/AMPUSER/2003/recording/ondemand : disabled ', '/AMPUSER/2003/recording/out/external : always ', '/AMPUSER/2003/recording/out/internal : always ', '/AMPUSER/2003/recording/priority : 0 ', '/AMPUSER/2003/ringtimer : 0 ', '/AMPUSER/2003/voicemail : novm ', '/AMPUSER/2004/answermode : disabled ', '/AMPUSER/2004/cfringtimer : 0 ', '/AMPUSER/2004/cidname : Gustavo - Suporte ', '/AMPUSER/2004/cidnum : 2004 ', '/AMPUSER/2004/concurrency_limit : 0 ', '/AMPUSER/2004/device : 2004 ', '/AMPUSER/2004/language : ', '/AMPUSER/2004/noanswer : ', '/AMPUSER/2004/outboundcid : ', '/AMPUSER/2004/password : ', '/AMPUSER/2004/queues/qnostate : usestate ', '/AMPUSER/2004/recording : ', '/AMPUSER/2004/recording/in/external : always ', '/AMPUSER/2004/recording/in/internal : always ', '/AMPUSER/2004/recording/ondemand : disabled ', '/AMPUSER/2004/recording/out/external : always ', '/AMPUSER/2004/recording/out/internal : always ', '/AMPUSER/2004/recording/priority : 0 ', '/AMPUSER/2004/ringtimer : 0 ', '/AMPUSER/2004/voicemail : novm ', '/AMPUSER/2005/answermode : disabled ', '/AMPUSER/2005/cfringtimer : 0 ', '/AMPUSER/2005/cidname : NoteTeo ', '/AMPUSER/2005/cidnum : 2005 ', '/AMPUSER/2005/concurrency_limit : 0 ', '/AMPUSER/2005/device : 2005 ', '/AMPUSER/2005/language : ', '/AMPUSER/2005/noanswer : ', '/AMPUSER/2005/outboundcid : ', '/AMPUSER/2005/password : ', '/AMPUSER/2005/queues/qnostate : usestate ', '/AMPUSER/2005/recording : ', '/AMPUSER/2005/recording/in/external : always ', '/AMPUSER/2005/recording/in/internal : always ', '/AMPUSER/2005/recording/ondemand : disabled ', '/AMPUSER/2005/recording/out/external : always ', '/AMPUSER/2005/recording/out/internal : always ', '/AMPUSER/2005/recording/priority : 0 ', '/AMPUSER/2005/ringtimer : 0 ', '/AMPUSER/2005/voicemail : novm ', '/AMPUSER/2007/answermode : disabled ', '/AMPUSER/2007/cfringtimer : 0 ', '/AMPUSER/2007/cidname : Carol - Financeiro ', '/AMPUSER/2007/cidnum : 2007 ', '/AMPUSER/2007/concurrency_limit : 0 ', '/AMPUSER/2007/device : 2007 ', '/AMPUSER/2007/language : ', '/AMPUSER/2007/noanswer : ', '/AMPUSER/2007/outboundcid : ', '/AMPUSER/2007/password : ', '/AMPUSER/2007/queues/qnostate : usestate ', '/AMPUSER/2007/recording : ', '/AMPUSER/2007/recording/in/external : always ', '/AMPUSER/2007/recording/in/internal : always ', '/AMPUSER/2007/recording/ondemand : disabled ', '/AMPUSER/2007/recording/out/external : always ', '/AMPUSER/2007/recording/out/internal : always ', '/AMPUSER/2007/recording/priority : 0 ', '/AMPUSER/2007/ringtimer : 0 ', '/AMPUSER/2007/voicemail : novm ', '/AMPUSER/2008/answermode : disabled ', '/AMPUSER/2008/cfringtimer : 0 ', '/AMPUSER/2008/cidname : Suporte NoteHP ', '/AMPUSER/2008/cidnum : 2008 ', '/AMPUSER/2008/concurrency_limit : 0 ', '/AMPUSER/2008/device : 2008 ', '/AMPUSER/2008/language : ', '/AMPUSER/2008/noanswer : ', '/AMPUSER/2008/outboundcid : ', '/AMPUSER/2008/password : ', '/AMPUSER/2008/queues/qnostate : usestate ', '/AMPUSER/2008/recording : ', '/AMPUSER/2008/recording/in/external : always ', '/AMPUSER/2008/recording/in/internal : always ', '/AMPUSER/2008/recording/ondemand : disabled ', '/AMPUSER/2008/recording/out/external : always ', '/AMPUSER/2008/recording/out/internal : always ', '/AMPUSER/2008/recording/priority : 0 ', '/AMPUSER/2008/ringtimer : 0 ', '/AMPUSER/2008/voicemail : novm ', '/AMPUSER/2009/answermode : disabled ', '/AMPUSER/2009/cfringtimer : 0 ', '/AMPUSER/2009/cidname : Fernando ', '/AMPUSER/2009/cidnum : 2009 ', '/AMPUSER/2009/concurrency_limit : 0 ', '/AMPUSER/2009/device : 2009 ', '/AMPUSER/2009/language : ', '/AMPUSER/2009/noanswer : ', '/AMPUSER/2009/outboundcid : ', '/AMPUSER/2009/password : ', '/AMPUSER/2009/queues/qnostate : usestate ', '/AMPUSER/2009/recording : ', '/AMPUSER/2009/recording/in/external : always ', '/AMPUSER/2009/recording/in/internal : always ', '/AMPUSER/2009/recording/ondemand : disabled ', '/AMPUSER/2009/recording/out/external : always ', '/AMPUSER/2009/recording/out/internal : always ', '/AMPUSER/2009/recording/priority : 10 ', '/AMPUSER/2009/ringtimer : 0 ', '/AMPUSER/2009/voicemail : novm ', '/AMPUSER/2010/answermode : disabled ', '/AMPUSER/2010/cfringtimer : 0 ', '/AMPUSER/2010/cidname : Ettore - Desenvolvimento ', '/AMPUSER/2010/cidnum : 2010 ', '/AMPUSER/2010/concurrency_limit : 0 ', '/AMPUSER/2010/device : 3000&2010 ', '/AMPUSER/2010/language : ', '/AMPUSER/2010/noanswer : ', '/AMPUSER/2010/outboundcid : ', '/AMPUSER/2010/password : 1234 ', '/AMPUSER/2010/queues/qnostate : usestate ', '/AMPUSER/2010/recording : ', '/AMPUSER/2010/recording/in/external : always ', '/AMPUSER/2010/recording/in/internal : always ', '/AMPUSER/2010/recording/ondemand : disabled ', '/AMPUSER/2010/recording/out/external : always ', '/AMPUSER/2010/recording/out/internal : always ', '/AMPUSER/2010/recording/priority : 10 ', '/AMPUSER/2010/ringtimer : 0 ', '/AMPUSER/2010/voicemail : novm ', '/AMPUSER/2011/answermode : disabled ', '/AMPUSER/2011/cfringtimer : 0 ', '/AMPUSER/2011/cidname : ATA Treinamento p1 ', '/AMPUSER/2011/cidnum : 2011 ', '/AMPUSER/2011/concurrency_limit : 0 ', '/AMPUSER/2011/device : 2011 ', '/AMPUSER/2011/language : ', '/AMPUSER/2011/noanswer : ', '/AMPUSER/2011/outboundcid : ', '/AMPUSER/2011/password : ', '/AMPUSER/2011/queues/qnostate : usestate ', '/AMPUSER/2011/recording : ', '/AMPUSER/2011/recording/in/external : always ', '/AMPUSER/2011/recording/in/internal : always ', '/AMPUSER/2011/recording/ondemand : disabled ', '/AMPUSER/2011/recording/out/external : always ', '/AMPUSER/2011/recording/out/internal : always ', '/AMPUSER/2011/recording/priority : 0 ', '/AMPUSER/2011/ringtimer : 0 ', '/AMPUSER/2011/voicemail : novm ', '/AMPUSER/2012/answermode : disabled ', '/AMPUSER/2012/cfringtimer : 0 ', '/AMPUSER/2012/cidname : ATA Treinamento p2 ', '/AMPUSER/2012/cidnum : 2012 ', '/AMPUSER/2012/concurrency_limit : 0 ', '/AMPUSER/2012/device : 2012 ', '/AMPUSER/2012/language : ', '/AMPUSER/2012/noanswer : ', '/AMPUSER/2012/outboundcid : ', '/AMPUSER/2012/password : ', '/AMPUSER/2012/queues/qnostate : usestate ', '/AMPUSER/2012/recording : ', '/AMPUSER/2012/recording/in/external : always ', '/AMPUSER/2012/recording/in/internal : always ', '/AMPUSER/2012/recording/ondemand : disabled ', '/AMPUSER/2012/recording/out/external : always ', '/AMPUSER/2012/recording/out/internal : always ', '/AMPUSER/2012/recording/priority : 0 ', '/AMPUSER/2012/ringtimer : 0 ', '/AMPUSER/2012/voicemail : novm ', '/AMPUSER/2013/answermode : disabled ', '/AMPUSER/2013/cfringtimer : 0 ', '/AMPUSER/2013/cidname : Suporte SmartPhone ', '/AMPUSER/2013/cidnum : 2013 ', '/AMPUSER/2013/concurrency_limit : 4 ', '/AMPUSER/2013/device : 2013 ', '/AMPUSER/2013/language : ', '/AMPUSER/2013/noanswer : ', '/AMPUSER/2013/outboundcid : ', '/AMPUSER/2013/password : ', '/AMPUSER/2013/queues/qnostate : usestate ', '/AMPUSER/2013/recording : ', '/AMPUSER/2013/recording/in/external : always ', '/AMPUSER/2013/recording/in/internal : always ', '/AMPUSER/2013/recording/ondemand : disabled ', '/AMPUSER/2013/recording/out/external : always ', '/AMPUSER/2013/recording/out/internal : always ', '/AMPUSER/2013/recording/priority : 0 ', '/AMPUSER/2013/ringtimer : 0 ', '/AMPUSER/2013/voicemail : novm ', '/AMPUSER/2014/answermode : disabled ', '/AMPUSER/2014/cfringtimer : 0 ', '/AMPUSER/2014/cidname : Teste Dispositivo ', '/AMPUSER/2014/cidnum : 2014 ', '/AMPUSER/2014/concurrency_limit : 0 ', '/AMPUSER/2014/device : 2014 ', '/AMPUSER/2014/language : ', '/AMPUSER/2014/noanswer : ', '/AMPUSER/2014/outboundcid : ', '/AMPUSER/2014/password : ', '/AMPUSER/2014/queues/qnostate : usestate ', '/AMPUSER/2014/recording : ', '/AMPUSER/2014/recording/in/external : always ', '/AMPUSER/2014/recording/in/internal : always ', '/AMPUSER/2014/recording/ondemand : disabled ', '/AMPUSER/2014/recording/out/external : always ', '/AMPUSER/2014/recording/out/internal : always ', '/AMPUSER/2014/recording/priority : 0 ', '/AMPUSER/2014/ringtimer : 0 ', '/AMPUSER/2014/voicemail : novm ', '/AMPUSER/2015/answermode : disabled ', '/AMPUSER/2015/cfringtimer : 0 ', '/AMPUSER/2015/cidname : Julio - Suporte ', '/AMPUSER/2015/cidnum : 2015 ', '/AMPUSER/2015/concurrency_limit : 0 ', '/AMPUSER/2015/device : 2015 ', '/AMPUSER/2015/language : ', '/AMPUSER/2015/noanswer : ', '/AMPUSER/2015/outboundcid : ', '/AMPUSER/2015/password : ', '/AMPUSER/2015/queues/qnostate : usestate ', '/AMPUSER/2015/recording : ', '/AMPUSER/2015/recording/in/external : always ', '/AMPUSER/2015/recording/in/internal : always ', '/AMPUSER/2015/recording/ondemand : disabled ', '/AMPUSER/2015/recording/out/external : always ', '/AMPUSER/2015/recording/out/internal : always ', '/AMPUSER/2015/recording/priority : 10 ', '/AMPUSER/2015/ringtimer : 0 ', '/AMPUSER/2015/voicemail : novm ', '/AMPUSER/2016/answermode : disabled ', '/AMPUSER/2016/cfringtimer : 0 ', '/AMPUSER/2016/cidname : Suporte Movel ', '/AMPUSER/2016/cidnum : 2016 ', '/AMPUSER/2016/concurrency_limit : 0 ', '/AMPUSER/2016/device : 2016 ', '/AMPUSER/2016/followme/changecid : default ', '/AMPUSER/2016/followme/ddial : DIRECT ', '/AMPUSER/2016/followme/fixedcid : ', '/AMPUSER/2016/followme/grpconf : DISABLED ', '/AMPUSER/2016/followme/grplist : # ', '/AMPUSER/2016/followme/grptime : 60 ', '/AMPUSER/2016/followme/prering : 0 ', '/AMPUSER/2016/language : ', '/AMPUSER/2016/noanswer : ', '/AMPUSER/2016/outboundcid : ', '/AMPUSER/2016/password : ', '/AMPUSER/2016/queues/qnostate : usestate ', '/AMPUSER/2016/recording : ', '/AMPUSER/2016/recording/in/external : always ', '/AMPUSER/2016/recording/in/internal : always ', '/AMPUSER/2016/recording/ondemand : disabled ', '/AMPUSER/2016/recording/out/external : always ', '/AMPUSER/2016/recording/out/internal : always ', '/AMPUSER/2016/recording/priority : 10 ', '/AMPUSER/2016/ringtimer : 0 ', '/AMPUSER/2016/voicemail : novm ', '/AMPUSER/2020/answermode : disabled ', '/AMPUSER/2020/cfringtimer : 0 ', '/AMPUSER/2020/cidname : UraPrincipal IMF ', '/AMPUSER/2020/cidnum : 2020 ', '/AMPUSER/2020/concurrency_limit : 0 ', '/AMPUSER/2020/device : 2020 ', '/AMPUSER/2020/language : ', '/AMPUSER/2020/noanswer : ', '/AMPUSER/2020/outboundcid : ', '/AMPUSER/2020/password : ', '/AMPUSER/2020/queues/qnostate : usestate ', '/AMPUSER/2020/recording : ', '/AMPUSER/2020/recording/in/external : always ', '/AMPUSER/2020/recording/in/internal : always ', '/AMPUSER/2020/recording/ondemand : disabled ', '/AMPUSER/2020/recording/out/external : always ', '/AMPUSER/2020/recording/out/internal : always ', '/AMPUSER/2020/recording/priority : 10 ', '/AMPUSER/2020/ringtimer : 0 ', '/AMPUSER/2020/voicemail : novm ', '/AMPUSER/2021/answermode : disabled ', '/AMPUSER/2021/cfringtimer : 0 ', '/AMPUSER/2021/cidname : UraNetwork - IMF ', '/AMPUSER/2021/cidnum : 2021 ', '/AMPUSER/2021/concurrency_limit : 0 ', '/AMPUSER/2021/device : 2021 ', '/AMPUSER/2021/language : ', '/AMPUSER/2021/noanswer : ', '/AMPUSER/2021/outboundcid : ', '/AMPUSER/2021/password : ', '/AMPUSER/2021/queues/qnostate : usestate ', '/AMPUSER/2021/recording : ', '/AMPUSER/2021/recording/in/external : always ', '/AMPUSER/2021/recording/in/internal : always ', '/AMPUSER/2021/recording/ondemand : disabled ', '/AMPUSER/2021/recording/out/external : always ', '/AMPUSER/2021/recording/out/internal : always ', '/AMPUSER/2021/recording/priority : 10 ', '/AMPUSER/2021/ringtimer : 0 ', '/AMPUSER/2021/voicemail : novm ', '/AMPUSER/2022/answermode : disabled ', '/AMPUSER/2022/cfringtimer : 0 ', '/AMPUSER/2022/cidname : Rafael - IMF ', '/AMPUSER/2022/cidnum : 2022 ', '/AMPUSER/2022/concurrency_limit : 0 ', '/AMPUSER/2022/device : 2022 ', '/AMPUSER/2022/language : ', '/AMPUSER/2022/noanswer : ', '/AMPUSER/2022/outboundcid : ', '/AMPUSER/2022/password : ', '/AMPUSER/2022/queues/qnostate : usestate ', '/AMPUSER/2022/recording : ', '/AMPUSER/2022/recording/in/external : always ', '/AMPUSER/2022/recording/in/internal : always ', '/AMPUSER/2022/recording/ondemand : disabled ', '/AMPUSER/2022/recording/out/external : always ', '/AMPUSER/2022/recording/out/internal : always ', '/AMPUSER/2022/recording/priority : 0 ', '/AMPUSER/2022/ringtimer : 0 ', '/AMPUSER/2022/voicemail : novm ', '/AMPUSER/2023/answermode : disabled ', '/AMPUSER/2023/cfringtimer : 0 ', '/AMPUSER/2023/cidname : Shima - IMF ', '/AMPUSER/2023/cidnum : 2023 ', '/AMPUSER/2023/concurrency_limit : 0 ', '/AMPUSER/2023/device : 2023 ', '/AMPUSER/2023/language : ', '/AMPUSER/2023/noanswer : ', '/AMPUSER/2023/outboundcid : ', '/AMPUSER/2023/password : ', '/AMPUSER/2023/queues/qnostate : usestate ', '/AMPUSER/2023/recording : ', '/AMPUSER/2023/recording/in/external : always ', '/AMPUSER/2023/recording/in/internal : always ', '/AMPUSER/2023/recording/ondemand : disabled ', '/AMPUSER/2023/recording/out/external : always ', '/AMPUSER/2023/recording/out/internal : always ', '/AMPUSER/2023/recording/priority : 10 ', '/AMPUSER/2023/ringtimer : 0 ', '/AMPUSER/2023/voicemail : novm ', '/AMPUSER/2098/answermode : disabled ', '/AMPUSER/2098/callmenum : 2098 ', '/AMPUSER/2098/cfringtimer : 0 ', '/AMPUSER/2098/cidname : VoicemailSuporte ', '/AMPUSER/2098/cidnum : 2098 ', '/AMPUSER/2098/concurrency_limit : 0 ', '/AMPUSER/2098/device : 2098 ', '/AMPUSER/2098/language : ', '/AMPUSER/2098/noanswer : ', '/AMPUSER/2098/outboundcid : ', '/AMPUSER/2098/password : ', '/AMPUSER/2098/queues/qnostate : usestate ', '/AMPUSER/2098/recording : ', '/AMPUSER/2098/recording/in/external : always ', '/AMPUSER/2098/recording/in/internal : always ', '/AMPUSER/2098/recording/ondemand : disabled ', '/AMPUSER/2098/recording/out/external : always ', '/AMPUSER/2098/recording/out/internal : always ', '/AMPUSER/2098/recording/priority : 0 ', '/AMPUSER/2098/ringtimer : 0 ', '/AMPUSER/2098/voicemail : default ', '/AMPUSER/2099/answermode : disabled ', '/AMPUSER/2099/cfringtimer : 0 ', '/AMPUSER/2099/cidname : Fax Digital ', '/AMPUSER/2099/cidnum : 2099 ', '/AMPUSER/2099/concurrency_limit : 0 ', '/AMPUSER/2099/device : 2099 ', '/AMPUSER/2099/language : ', '/AMPUSER/2099/noanswer : ', '/AMPUSER/2099/outboundcid : ', '/AMPUSER/2099/password : ', '/AMPUSER/2099/queues/qnostate : usestate ', '/AMPUSER/2099/recording : ', '/AMPUSER/2099/recording/in/external : dontcare ', '/AMPUSER/2099/recording/in/internal : dontcare ', '/AMPUSER/2099/recording/ondemand : disabled ', '/AMPUSER/2099/recording/out/external : dontcare ', '/AMPUSER/2099/recording/out/internal : dontcare ', '/AMPUSER/2099/recording/priority : 0 ', '/AMPUSER/2099/ringtimer : 0 ', '/AMPUSER/2099/voicemail : novm ', '/AMPUSER/3000/answermode : disabled ', '/AMPUSER/3000/cfringtimer : 0 ', '/AMPUSER/3000/cidname : Tecnosuper ', '/AMPUSER/3000/cidnum : 3000 ', '/AMPUSER/3000/concurrency_limit : 0 ', '/AMPUSER/3000/language : ', '/AMPUSER/3000/noanswer : ', '/AMPUSER/3000/outboundcid : ', '/AMPUSER/3000/password : ', '/AMPUSER/3000/queues/qnostate : usestate ', '/AMPUSER/3000/recording : ', '/AMPUSER/3000/recording/in/external : always ', '/AMPUSER/3000/recording/in/internal : always ', '/AMPUSER/3000/recording/ondemand : disabled ', '/AMPUSER/3000/recording/out/external : always ', '/AMPUSER/3000/recording/out/internal : always ', '/AMPUSER/3000/recording/priority : 0 ', '/AMPUSER/3000/ringtimer : 0 ', '/AMPUSER/3000/voicemail : novm ', '/AMPUSER/400/answermode : disabled ', '/AMPUSER/400/cfringtimer : 0 ', '/AMPUSER/400/cidname : Videoconf ', '/AMPUSER/400/cidnum : 400 ', '/AMPUSER/400/concurrency_limit : 0 ', '/AMPUSER/400/device : 400 ', '/AMPUSER/400/language : ', '/AMPUSER/400/noanswer : ', '/AMPUSER/400/outboundcid : ', '/AMPUSER/400/password : ', '/AMPUSER/400/queues/qnostate : usestate ', '/AMPUSER/400/recording : ', '/AMPUSER/400/recording/in/external : dontcare ', '/AMPUSER/400/recording/in/internal : dontcare ', '/AMPUSER/400/recording/ondemand : disabled ', '/AMPUSER/400/recording/out/external : dontcare ', '/AMPUSER/400/recording/out/internal : dontcare ', '/AMPUSER/400/recording/priority : 10 ', '/AMPUSER/400/ringtimer : 0 ', '/AMPUSER/400/voicemail : novm ', '468 results found.', '--END COMMAND--']) + '\r\n')
self.assertTrue(Response.match(follows_response))
response = Response.read(follows_response)
self.assertFalse(response.is_error())
self.assertIsNotNone(response.follows)
self.assertListEqual(LINE_REGEX.split(follows_response)[2:(- 1)], response.follows)
self.assertEqual(follows_response, str(response))
def test_with_follows_queue_show_command(self):
follows_response = ('\n'.join(['Response: Follows\r', 'Privilege: Command\r', "911 has 0 calls (max unlimited) in 'ringall' strategy (0s holdtime, 0s talktime), W:0, C:0, A:0, SL:0.0% within 60s", ' Members: ', ' Carol - Financeiro (Local/-queue/n from hint:-local) (ringinuse enabled) (Unavailable) has taken no calls yet', ' No Callers', '', "910 has 0 calls (max unlimited) in 'ringall' strategy (0s holdtime, 0s talktime), W:0, C:0, A:0, SL:0.0% within 60s", ' Members: ', ' Julio - Suporte (Local/-queue/n from hint:-local) (ringinuse enabled) (Unavailable) has taken no calls yet', ' Ettore - Desenvolvimento (Local/-queue/n from hint:-local) (ringinuse enabled) (dynamic) (Unavailable) has taken no calls yet', ' Gustavo - Suporte (Local/-queue/n from hint:-local) (ringinuse enabled) (Unavailable) has taken no calls yet', ' No Callers', '', "912 has 0 calls (max unlimited) in 'ringall' strategy (0s holdtime, 0s talktime), W:0, C:0, A:0, SL:0.0% within 60s", ' Members: ', ' Carol - Financeiro (Local/-queue/n from hint:-local) (ringinuse enabled) (Unavailable) has taken no calls yet', ' No Callers', '', "901 has 0 calls (max unlimited) in 'ringall' strategy (0s holdtime, 0s talktime), W:0, C:0, A:0, SL:0.0% within 60s", ' Members: ', ' NoteTeo (Local/-queue/n from hint:-local) (ringinuse enabled) (Unavailable) has taken no calls yet', ' Carol - Financeiro (Local/-queue/n from hint:-local) (ringinuse enabled) (Unavailable) has taken no calls yet', ' No Callers', '', "default has 0 calls (max unlimited) in 'ringall' strategy (0s holdtime, 0s talktime), W:0, C:0, A:0, SL:0.0% within 0s", ' No Members', ' No Callers', '', "902 has 0 calls (max unlimited) in 'ringall' strategy (0s holdtime, 0s talktime), W:0, C:0, A:0, SL:0.0% within 60s", ' Members: ', ' Julio - Suporte (Local/-queue/n from hint:-local) (ringinuse enabled) (dynamic) (Unavailable) has taken no calls yet', ' Wainer (Local/-queue/n from hint:-local) (ringinuse enabled) (dynamic) (Unavailable) has taken no calls yet', ' Gustavo - Suporte (Local/-queue/n from hint:-local) (ringinuse enabled) (dynamic) (Unavailable) has taken no calls yet', ' No Callers', '', "903 has 0 calls (max unlimited) in 'ringall' strategy (0s holdtime, 0s talktime), W:0, C:0, A:0, SL:0.0% within 60s", ' Members: ', ' Ettore - Desenvolvimento (Local/-queue/n from hint:-local) (ringinuse enabled) (dynamic) (paused) (Unavailable) has taken no calls yet', ' No Callers', '', "951 has 0 calls (max unlimited) in 'ringall' strategy (0s holdtime, 0s talktime), W:0, C:0, A:0, SL:0.0% within 60s", ' Members: ', ' Wainer (Local/-queue/n from hint:-local) (ringinuse enabled) (dynamic) (Unavailable) has taken no calls yet', ' No Callers', '', "950 has 0 calls (max 1) in 'ringall' strategy (0s holdtime, 0s talktime), W:0, C:0, A:0, SL:0.0% within 60s", ' Members: ', ' Ettore - Desenvolvimento (Local/-queue/n from hint:-local) (ringinuse enabled) (dynamic) (paused) (Unavailable) has taken no calls yet', ' No Callers', '', '--END COMMAND--']) + '\r\n')
self.assertTrue(Response.match(follows_response))
response = Response.read(follows_response)
self.assertFalse(response.is_error())
self.assertIsNotNone(response.follows)
self.assertListEqual(LINE_REGEX.split(follows_response)[2:(- 1)], response.follows)
self.assertEqual(follows_response, str(response))
def test_event(self):
event = '\r\n'.join(['Event: FullyBooted', 'Privilege: system, all', 'Status: Fully Booted'])
self.assertFalse(Response.match(event))
with self.assertRaises(Exception):
Response.read(event) |
def __convert_2_gpt_messages(messages: List[ModelMessage]):
gpt_messages = []
last_usr_message = ''
system_messages = []
for message in messages:
if ((message.role == ModelMessageRoleType.HUMAN) or (message.role == 'user')):
last_usr_message = message.content
elif (message.role == ModelMessageRoleType.SYSTEM):
system_messages.append(message.content)
elif ((message.role == ModelMessageRoleType.AI) or (message.role == 'assistant')):
last_ai_message = message.content
gpt_messages.append({'role': 'user', 'content': last_usr_message})
gpt_messages.append({'role': 'assistant', 'content': last_ai_message})
if (len(system_messages) > 0):
if (len(system_messages) < 2):
gpt_messages.insert(0, {'role': 'system', 'content': system_messages[0]})
gpt_messages.append({'role': 'user', 'content': last_usr_message})
else:
gpt_messages.append({'role': 'user', 'content': system_messages[1]})
else:
last_message = messages[(- 1)]
if (last_message.role == ModelMessageRoleType.HUMAN):
gpt_messages.append({'role': 'user', 'content': last_message.content})
return gpt_messages |
def load_data(worker: TaskSpec, records: List[dict], client: Elasticsearch) -> Tuple[(int, int)]:
start = perf_counter()
logger.info(format_log(f'Starting Index operation', name=worker.name, action='Index'))
(success, failed) = streaming_post_to_es(client, records, worker.index, worker.name, delete_before_index=worker.is_incremental)
logger.info(format_log(f'Index operation took {(perf_counter() - start):.2f}s', name=worker.name, action='Index'))
return (success, failed) |
class ResourceTreeService(object):
model = None
def __init__(self, service_cls):
service_cls.model = self.model
self.service = service_cls
def from_resource_deeper(self, resource_id=None, limit_depth=1000000, db_session=None, *args, **kwargs):
return self.service.from_resource_deeper(*args, resource_id=resource_id, limit_depth=limit_depth, db_session=db_session, **kwargs)
def delete_branch(self, resource_id=None, db_session=None, *args, **kwargs):
return self.service.delete_branch(*args, resource_id=resource_id, db_session=db_session, **kwargs)
def from_parent_deeper(self, parent_id=None, limit_depth=1000000, db_session=None, *args, **kwargs):
return self.service.from_parent_deeper(*args, parent_id=parent_id, limit_depth=limit_depth, db_session=db_session, **kwargs)
def build_subtree_strut(self, result, *args, **kwargs):
return self.service.build_subtree_strut(*args, result=result, **kwargs)
def path_upper(self, object_id, limit_depth=1000000, db_session=None, *args, **kwargs):
return self.service.path_upper(*args, object_id=object_id, limit_depth=limit_depth, db_session=db_session, **kwargs)
def move_to_position(self, resource_id, to_position, new_parent_id=noop, db_session=None, *args, **kwargs):
return self.service.move_to_position(*args, resource_id=resource_id, to_position=to_position, new_parent_id=new_parent_id, db_session=db_session, **kwargs)
def shift_ordering_down(self, parent_id, position, db_session=None, *args, **kwargs):
return self.service.shift_ordering_down(*args, parent_id=parent_id, position=position, db_session=db_session, **kwargs)
def shift_ordering_up(self, parent_id, position, db_session=None, *args, **kwargs):
return self.service.shift_ordering_up(*args, parent_id=parent_id, position=position, db_session=db_session, **kwargs)
def set_position(self, resource_id, to_position, db_session=None, *args, **kwargs):
return self.service.set_position(*args, resource_id=resource_id, to_position=to_position, db_session=db_session, **kwargs)
def check_node_parent(self, resource_id, new_parent_id, db_session=None, *args, **kwargs):
return self.service.check_node_parent(*args, resource_id=resource_id, new_parent_id=new_parent_id, db_session=db_session, **kwargs)
def count_children(self, resource_id, db_session=None, *args, **kwargs):
return self.service.count_children(*args, resource_id=resource_id, db_session=db_session, **kwargs)
def check_node_position(self, parent_id, position, on_same_branch, db_session=None, *args, **kwargs):
return self.service.check_node_position(*args, parent_id=parent_id, position=position, on_same_branch=on_same_branch, db_session=db_session, **kwargs) |
def test_warning_when_df_contains_unseen_categories(df_enc, df_enc_rare):
msg = 'During the encoding, NaN values were introduced in the feature(s) var_A.'
encoder = CountFrequencyEncoder(unseen='ignore')
encoder.fit(df_enc)
with pytest.warns(UserWarning) as record:
encoder.transform(df_enc_rare)
assert (len(record) == 1)
assert (record[0].message.args[0] == msg) |
def to_png_sprite(index, shortname, alias, uc, alt, title, category, options, md):
attributes = {'class': ('%(class)s-%(size)s-%(category)s _%(unicode)s' % {'class': options.get('classes', index), 'size': options.get('size', '64'), 'category': (category if category else ''), 'unicode': uc})}
if title:
attributes['title'] = title
add_attriubtes(options, attributes)
el = etree.Element('span', attributes)
el.text = md_util.AtomicString(alt)
return el |
def add_HealthServicer_to_server(servicer, server):
rpc_method_handlers = {'Check': grpc.unary_unary_rpc_method_handler(servicer.Check, request_deserializer=health__pb2.HealthCheckRequest.FromString, response_serializer=health__pb2.HealthCheckResponse.SerializeToString), 'Watch': grpc.unary_stream_rpc_method_handler(servicer.Watch, request_deserializer=health__pb2.HealthCheckRequest.FromString, response_serializer=health__pb2.HealthCheckResponse.SerializeToString)}
generic_handler = grpc.method_handlers_generic_handler('grpc.health.v1.Health', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,)) |
def slice_by_max_duration(gen: np.ndarray, slice_max_duration: float, rate: int) -> Iterable[np.ndarray]:
if (len(gen) > (slice_max_duration * rate)):
n_chunks = math.ceil((len(gen) / (slice_max_duration * rate)))
chunk_size = math.ceil((len(gen) / n_chunks))
for i in range(0, len(gen), chunk_size):
(yield gen[i:(i + chunk_size)])
else:
(yield gen) |
def _fuse_split_and_group_gemm(sorted_graph: List[Tensor]) -> List[Tensor]:
sorted_ops = graph_utils.get_sorted_ops(sorted_graph)
for op in sorted_ops:
op_type = op._attrs['op']
if (op_type != 'split'):
continue
split_op = op
if (not _can_fuse_split_op(split_op)):
continue
def _optional_group_gemm_op(dst_ops):
if (len(dst_ops) != 1):
return None
dst_op = list(dst_ops)[0]
if dst_op._attrs['op'].startswith('group_gemm_r'):
return dst_op
return None
split_outputs = split_op._attrs['outputs']
group_gemm_op = _optional_group_gemm_op(split_outputs[0]._attrs['dst_ops'])
if (group_gemm_op is None):
continue
if (group_gemm_op._attrs['groups'] != len(split_outputs)):
continue
all_as = []
all_a_indices = {}
stride = (3 if group_gemm_op._attrs['op'].endswith('bias') else 2)
group_gemm_inputs = group_gemm_op._attrs['inputs']
for i in range(group_gemm_op._attrs['groups']):
t = group_gemm_inputs[(i * stride)]
all_as.append(t)
all_a_indices[t] = (i, (i * stride))
def _valid_input(input_tensor):
return (len(input_tensor._attrs['src_ops']) == len(input_tensor._attrs['dst_ops']) == 1)
if (set(all_as) != set(split_outputs)):
continue
if all((_valid_input(x) for x in all_as)):
input_indices = all_a_indices
input_accessors = group_gemm_op.input_a_accessors()
else:
continue
split_input = split_op._attrs['inputs'][0]
split_dim = split_op._attrs['split_dim']
split_dim_offset = 0
for split_output_tensor in split_outputs:
(accessor_idx, input_idx) = input_indices[split_output_tensor]
input_accessors[accessor_idx].update_base_tensor(split_input, split_dim, split_dim_offset)
group_gemm_op._attrs['inputs'][input_idx] = split_input
split_dim_offset += split_output_tensor._attrs['shape'][split_dim]._attrs['values'][0]
transform_utils.remove_tensor_from_sorted_graph(split_output_tensor)
assert (split_dim_offset == split_input._attrs['shape'][split_dim]._attrs['values'][0])
split_input._attrs['dst_ops'] = StableSet([group_gemm_op])
return transform_utils.sanitize_sorted_graph(sorted_graph) |
def make_pydoc(dotted_path: str, destination_file: Path) -> None:
print(f'Running with dotted path={dotted_path} and destination_file={destination_file}... ', end='')
try:
api_doc_content = run_pydoc_markdown(dotted_path)
destination_file.parent.mkdir(parents=True, exist_ok=True)
destination_file.write_text(api_doc_content)
except Exception as e:
print(f'Error: {str(e)}')
return
print('Done!') |
def extractDistractedTranslations(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
ltags = [tmp.lower() for tmp in item['tags']]
if (('gonna get captured' in ltags) or ('Get Captured: Chapter' in item['title'])):
return buildReleaseMessageWithType(item, 'Like Hell Im Gonna Get Captured!', vol, chp, frag=frag, postfix=postfix)
if ('Girl Who Ate Death' in item['title']):
return buildReleaseMessageWithType(item, 'Shinigami wo Tabeta Shouko', vol, chp, frag=frag, postfix=postfix)
return False |
.usefixtures('use_tmpdir')
def test_mocked_simulator_configuration(monkeypatch):
conf = {'versions': {'mocked': {'scalar': {'executable': 'bin/scalar_exe'}, 'mpi': {'executable': 'bin/mpi_exe', 'mpirun': 'bin/mpirun'}}}}
with open('ecl100_config.yml', 'w', encoding='utf-8') as filehandle:
filehandle.write(yaml.dump(conf))
os.mkdir('bin')
monkeypatch.setenv('ECL100_SITE_CONFIG', 'ecl100_config.yml')
for filename in ['scalar_exe', 'mpi_exe', 'mpirun']:
fname = os.path.join('bin', filename)
with open(fname, 'w', encoding='utf-8') as filehandle:
filehandle.write('This is an executable ...')
os.chmod(fname, stat.S_IEXEC)
with open('ECLIPSE.DATA', 'w', encoding='utf-8') as filehandle:
filehandle.write('Mock eclipse data file')
econfig = ecl_config.Ecl100Config()
sim = econfig.sim('mocked')
mpi_sim = econfig.mpi_sim('mocked')
erun = ecl_run.EclRun('ECLIPSE.DATA', sim)
assert (erun.runPath() == os.getcwd())
os.mkdir('path')
with open('path/ECLIPSE.DATA', 'w', encoding='utf-8') as filehandle:
filehandle.write('Mock eclipse data file')
erun = ecl_run.EclRun('path/ECLIPSE.DATA', sim)
assert (erun.runPath() == os.path.join(os.getcwd(), 'path'))
assert (erun.baseName() == 'ECLIPSE')
assert (erun.numCpu() == 1)
with pytest.raises(ValueError):
ecl_run.EclRun('path/ECLIPSE.DATA', sim, num_cpu='xxx')
erun = ecl_run.EclRun('path/ECLIPSE.DATA', mpi_sim, num_cpu='10')
assert (erun.numCpu() == 10)
with pytest.raises(IOError):
ecl_run.EclRun('DOES/NOT/EXIST', mpi_sim, num_cpu='10') |
.parametrize('degrees', (False, True), ids=('radians', 'degrees'))
def test_magnetic_vec_to_angles_array(arrays, degrees):
(intensity, inclination, declination) = arrays[0]
(magnetic_e, magnetic_n, magnetic_u) = arrays[1]
if (not degrees):
(inclination, declination) = (np.radians(inclination), np.radians(declination))
npt.assert_allclose(magnetic_vec_to_angles(magnetic_e, magnetic_n, magnetic_u, degrees=degrees), (intensity, inclination, declination)) |
def call_resource(path, qs):
app = task_utils.get_app()
(endpoint, arguments) = app.url_map.bind('').match(path)
resource_type = app.view_functions[endpoint].view_class
resource = resource_type()
(fields, kwargs) = parse_kwargs(resource, qs)
kwargs = utils.extend(arguments, kwargs)
for field in IGNORE_FIELDS:
kwargs.pop(field, None)
(query, model, schema) = unpack(resource.build_query(**kwargs), 3)
(count, _) = counts.get_count(resource, query)
return {'path': path, 'qs': qs, 'name': get_s3_name(path, qs), 'query': query, 'schema': (schema or resource.schema), 'resource': resource, 'count': count, 'timestamp': datetime.datetime.utcnow(), 'fields': fields, 'kwargs': kwargs} |
class TestFileHook(fake_filesystem_unittest.TestCase):
def setUp(self):
self.setUpPyfakefs()
self.os = fake_filesystem.FakeOsModule(self.fs)
self.hook = FileHook()
def test_directory_pre(self):
self.assertFalse(self.fs.exists('/fake/dir'))
self.hook.before([{'path': '/fake/dir', 'type': 'dir'}], MagicMock())
self.assertTrue(self.fs.isdir('/fake/dir'))
def test_directory_pre_exists(self):
self.fs.create_dir('/fake/dir')
self.hook.before([{'path': '/fake/dir', 'type': 'dir'}], MagicMock())
self.assertTrue(self.fs.isdir('/fake/dir'))
def test_directory_pre_no_permissions(self):
self.fs.create_dir('/fake')
self.os.chmod('/fake', 292)
with self.assertRaises(OSError):
self.hook.before([{'path': '/fake/dir', 'type': 'dir'}], MagicMock())
def test_directory_post(self):
self.fs.create_dir('/fake/dir')
self.hook.after([{'path': '/fake/dir', 'type': 'dir'}], MagicMock())
self.assertFalse(self.fs.exists('/fake/dir'))
def test_file_pre(self):
self.fs.create_dir('/fake')
self.assertFalse(self.fs.exists('/fake/file'))
self.hook.before([{'path': '/fake/file', 'type': 'file'}], MagicMock())
self.assertTrue(self.fs.isfile('/fake/file'))
def test_file_post(self):
self.fs.create_dir('/fake')
self.fs.create_file('/fake/file')
self.hook.after([{'path': '/fake/file', 'type': 'file'}], MagicMock())
self.assertFalse(self.fs.exists('/fake/file')) |
('ecs_deploy.cli.get_client')
def test_cron(get_client, runner):
get_client.return_value = EcsTestClient('acces_key', 'secret_key')
result = runner.invoke(cli.cron, (CLUSTER_NAME, TASK_DEFINITION_FAMILY_1, 'rule'))
assert (not result.exception)
assert (result.exit_code == 0)
assert (u'Update task definition based on: test-task:2' in result.output)
assert (u'Creating new task definition revision' in result.output)
assert (u'Successfully created revision: 2' in result.output)
assert (u'Updating scheduled task' in result.output)
assert (u'Deregister task definition revision' in result.output)
assert (u'Successfully deregistered revision: 2' in result.output) |
def flake8_entrypoint(argv: List[str]=None) -> NoReturn:
if (argv is None):
argv = sys.argv[1:]
(exit_code, msg) = main((['lint'] + argv))
if msg:
print(colored(msg, 'red'))
if isinstance(exit_code, ExitCode):
exit_code = exit_code.value
sys.exit(exit_code) |
class PublisherElement(object):
passing_text = 'Passing'
not_passing_text = 'Not Passing'
def __init__(self, publisher=None):
self.publisher = publisher
self.layout = None
self.check_push_button = None
self.fix_push_button = None
self.fix_identifier = '___fix'
self.help_push_button = None
self.publisher_name_label = None
self.publisher_state_ok_icon = None
self.publisher_state_not_ok_icon = None
self.publisher_state_label = None
self.performance_label = None
self.exception_message = None
self.progress_bar = None
self.progress_bar_manager = None
self.duration = 0.0
self._state = False
def create(self, parent=None):
self.layout = QtWidgets.QHBoxLayout(parent)
self.publisher_state_ok_icon = parent.style().standardIcon(QtWidgets.QStyle.SP_DialogYesButton)
self.publisher_state_not_ok_icon = parent.style().standardIcon(QtWidgets.QStyle.SP_DialogNoButton)
self.check_push_button = QtWidgets.QPushButton(parent)
self.check_push_button.setText('Check')
self.layout.addWidget(self.check_push_button)
self.check_push_button.setSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
self.fix_push_button = QtWidgets.QPushButton(parent)
self.fix_push_button.setText('Fix')
self.layout.addWidget(self.fix_push_button)
self.fix_push_button.setSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
self.fix_push_button.setMaximumWidth(40)
self.progress_bar = QtWidgets.QProgressBar(parent)
self.progress_bar.setFixedWidth(100)
self.progress_bar.setSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed)
self.layout.addWidget(self.progress_bar)
self.progress_bar_manager = QProgressBarWrapper(progress_bar=self.progress_bar, minimum=0, maximum=100.0, value=0.0)
self.publisher_state_label = QtWidgets.QLabel(parent)
self.publisher_state_label.setSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
self.layout.addWidget(self.publisher_state_label)
self.performance_label = QtWidgets.QLabel(parent)
self.performance_label.setText('x.x sec')
self.performance_label.setSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
self.layout.addWidget(self.performance_label)
self.publisher_name_label = QtWidgets.QLabel(parent)
self.publisher_name_label.setText(self.publisher.__doc__.split('\n')[0].strip())
self.publisher_name_label.setAlignment((QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter))
self.publisher_name_label.setToolTip(self.publisher.__doc__)
self.performance_label.setSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
self.layout.addWidget(self.publisher_name_label)
self.help_push_button = QtWidgets.QPushButton(parent)
self.help_push_button.setText('?')
self.layout.addWidget(self.help_push_button)
self.help_push_button.setSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
self.help_push_button.setMaximumWidth(20)
spacer = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.layout.addItem(spacer)
self.state = False
QtCore.QObject.connect(self.check_push_button, QtCore.SIGNAL('clicked()'), self.run_publisher)
QtCore.QObject.connect(self.help_push_button, QtCore.SIGNAL('clicked()'), self.show_publisher_docs)
QtCore.QObject.connect(self.fix_push_button, QtCore.SIGNAL('clicked()'), self.run_fix_definition)
def run_fix_definition(self):
m = QtWidgets.QMessageBox()
m.setWindowTitle('Continue?')
m.setText(('This command will try to fix<br/>[ <b>%s</b> ] issue<br/>automatically.<br/><br/>Confirm ? <b>(BETA)</b>' % self.publisher.__doc__.split('\n')[0].strip()))
m.setStandardButtons((QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No))
m.setDefaultButton(QtWidgets.QMessageBox.Yes)
m.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint)
m.exec_()
if (m.clickedButton() == m.defaultButton()):
fix_def_name = ('%s%s' % (self.publisher.__name__, self.fix_identifier))
try:
from anima.dcc.mayaEnv import publish
fix_func = publish.__dict__[fix_def_name]
fix_func()
except KeyError:
pass
self.check_push_button.click()
def show_publisher_docs(self):
if self.publisher:
m = QtWidgets.QMessageBox()
m.setWindowTitle('Help')
import sys
if (sys.version_info.major > 2):
stringify = str
else:
stringify = unicode
import re
error = str(''.join([i for i in stringify(self.publisher_state_label.toolTip()) if (ord(i) < 128)]))
publish_error = ''
for exception in ['PublishError:', 'RuntimeError:']:
try:
clean_error = error.split(exception)[1]
formatted_error = clean_error.replace('<br>', '\n')
publish_error = re.sub('<[^>]*>', '', formatted_error)
except IndexError:
continue
message = self.publisher.__doc__
message += '\n\n'
message += publish_error
m.setText(message)
m.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint)
m.exec_()
def _set_label_icon(self, label, icon):
pixmap = icon.pixmap(16, 16)
label.setPixmap(pixmap)
label.setMask(pixmap.mask())
def state(self):
return self._state
def state(self, state):
if state:
self._set_label_icon(self.publisher_state_label, self.publisher_state_ok_icon)
self.publisher_state_label.setToolTip(self.passing_text)
self.publisher_name_label.setStyleSheet('color: ;')
else:
self._set_label_icon(self.publisher_state_label, self.publisher_state_not_ok_icon)
self.publisher_state_label.setToolTip(self.not_passing_text)
self.publisher_name_label.setStyleSheet('color: red;')
self._state = state
def run_publisher(self):
if self.publisher:
self.state = False
self.performance_label.setText('x.x sec')
self.progress_bar.setValue(0)
import sys
import traceback
import time
start = time.time()
try:
self.check_push_button.setText('Checking...')
self.check_push_button.setEnabled(False)
try:
qApp = QtWidgets.qApp
except AttributeError:
qApp = QtWidgets.QApplication
qApp.sendPostedEvents()
self.publisher(progress_controller=self.progress_bar_manager)
end = time.time()
except Exception as e:
end = time.time()
self.state = False
self.publisher_state_label.setToolTip('\n'.join(traceback.format_exc().splitlines()[(- 25):]))
else:
self.state = True
self.publisher_state_label.setToolTip('')
self.duration = (end - start)
self.performance_label.setText(('%0.1f sec' % self.duration))
self.check_push_button.setText('Check')
self.check_push_button.setEnabled(True)
if (self.state is True):
self.fix_push_button.setDisabled(True)
self.fix_push_button.setStyleSheet('background-color: None')
else:
fix_def_name = ('%s%s' % (self.publisher.__name__, self.fix_identifier))
from anima.dcc.mayaEnv import publish
self.fix_push_button.setDisabled(True)
self.fix_push_button.setStyleSheet('background-color: None')
if (fix_def_name in publish.__dict__):
self.fix_push_button.setEnabled(True)
self.fix_push_button.setStyleSheet('background-color: green') |
(auto_attribs=True)
class InitiateUploadInfo():
file_name: str
content_type: str
additional_properties: Dict[(str, Any)] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[(str, Any)]:
file_name = self.file_name
content_type = self.content_type
field_dict: Dict[(str, Any)] = {}
field_dict.update(self.additional_properties)
field_dict.update({'file_name': file_name, 'content_type': content_type})
return field_dict
def from_dict(cls: Type[T], src_dict: Dict[(str, Any)]) -> T:
d = src_dict.copy()
file_name = d.pop('file_name')
content_type = d.pop('content_type')
initiate_upload_info = cls(file_name=file_name, content_type=content_type)
initiate_upload_info.additional_properties = d
return initiate_upload_info
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return (key in self.additional_properties) |
def setup_selinux_enforcing():
original_status = shellexec('getenforce').stdout[0]
shutil.copy('/etc/selinux/config', '/etc/selinux/config.bak')
shellexec('setenforce 1')
shellexec("sed -i '/^SELINUX=/ s/=.*/=enforcing/' /etc/selinux/config")
(yield None)
shellexec(f'setenforce {original_status}')
shutil.move('/etc/selinux/config.bak', '/etc/selinux/config') |
class AutumnWindHandler(THBEventHandler):
interested = ['action_after']
def handle(self, evt_type, act):
if ((evt_type == 'action_after') and isinstance(act, DropCardStage)):
self.n = n = act.dropn
if (n <= 0):
return act
tgt = act.target
if (not tgt.has_skill(AutumnWind)):
return act
g = self.game
if (not g.user_input([tgt], ChooseOptionInputlet(self, (False, True)))):
return act
candidates = [p for p in g.players if ((p is not tgt) and (p.cards or p.showncards or p.equips) and (not p.dead))]
pl = (candidates and user_choose_players(self, tgt, candidates))
if (not pl):
return act
g.process_action(AutumnWindAction(tgt, pl))
return act
def choose_player_target(self, tl):
if (not tl):
return (tl, False)
return (tl[:self.n], True) |
class SystemInfo(BaseTest):
def setUp(self):
session = SessionURL(self.url, self.password, volatile=True)
modules.load_modules(session)
self.run_argv = modules.loaded['system_info'].run_argv
_capture()
def test_commands(self, log_captured):
vectors_names = [v.name for v in modules.loaded['system_info'].vectors]
self.assertEqual(set(self.run_argv(['-extended']).keys()), set(vectors_names))
self.assertEqual(os.path.split(self.run_argv(['-info', 'script']))[1], os.path.split(self.path)[1])
self.assertRaises(ArgparseError, self.run_argv, ['-info', 'BOGUS']) |
class PrivateComputationBaseStageFlow(StageFlow):
def __init__(self, data: PrivateComputationStageFlowData) -> None:
super().__init__()
self.initialized_status: PrivateComputationInstanceStatus = data.initialized_status
self.started_status: PrivateComputationInstanceStatus = data.started_status
self.failed_status: PrivateComputationInstanceStatus = data.failed_status
self.completed_status: PrivateComputationInstanceStatus = data.completed_status
self.is_joint_stage: bool = data.is_joint_stage
self.timeout: int = data.timeout
self.is_retryable: bool = data.is_retryable
def cls_name_to_cls(cls: Type[C], name: str) -> Type[C]:
for subclass in cls.__subclasses__():
if (name == subclass.__name__):
return subclass
raise PCStageFlowNotFoundException(f'Could not find subclass with name={name!r}. Make sure it has been imported in stage_flows/__init__.py')
def get_cls_name(cls: Type[C]) -> str:
return cls.__name__
def get_stage_service(self, args: 'PrivateComputationStageServiceArgs') -> 'PrivateComputationStageService':
raise NotImplementedError(f'get_stage_service not implemented for {self.__class__}')
def get_default_stage_service(self, args: PrivateComputationStageServiceArgs) -> PrivateComputationStageService:
stage = StageSelector.get_stage_service(self, args)
if (stage is None):
raise NotImplementedError(f'No stage service configured for {self}')
return stage |
class JSONParser(BaseParser):
media_type = 'application/json'
renderer_class = renderers.JSONRenderer
strict = api_settings.STRICT_JSON
def parse(self, stream, media_type=None, parser_context=None):
parser_context = (parser_context or {})
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
try:
decoded_stream = codecs.getreader(encoding)(stream)
parse_constant = (json.strict_constant if self.strict else None)
return json.load(decoded_stream, parse_constant=parse_constant)
except ValueError as exc:
raise ParseError(('JSON parse error - %s' % str(exc))) |
def get_lab_tests(filters):
conditions = get_conditions(filters)
data = frappe.get_all(doctype='Lab Test', fields=['name', 'template', 'company', 'patient', 'patient_name', 'practitioner', 'employee', 'status', 'invoiced', 'result_date', 'department'], filters=conditions, order_by='submitted_date desc')
return data |
def reverse(func):
def cmap(range, **traits):
cm = func(range, **traits)
cm.reverse_colormap()
return cm
cmap.__name__ = ('reversed_' + func.__name__)
if (func.__doc__ is not None):
cmap.__doc__ = ('Reversed: ' + func.__doc__)
else:
cmap.__doc__ = ('Reversed: ' + func.__name__)
return cmap |
class DifferentStrategySubClass(SomeAbstractStrategyClass):
name = 'different subclassed strategy'
configuration_model = SomeStrategyConfiguration
def some_abstract_method(self):
pass
def __init__(self, configuration: SomeStrategyConfiguration):
self.some_config = configuration.some_key
def process(self, data: Any, identity_data: Dict[(str, Any)]=None) -> Union[(List[Dict[(str, Any)]], Dict[(str, Any)])]:
pass |
class flow_stats_request(stats_request):
version = 4
type = 18
stats_type = 1
def __init__(self, xid=None, flags=None, table_id=None, out_port=None, out_group=None, cookie=None, cookie_mask=None, match=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (table_id != None):
self.table_id = table_id
else:
self.table_id = 0
if (out_port != None):
self.out_port = out_port
else:
self.out_port = 0
if (out_group != None):
self.out_group = out_group
else:
self.out_group = 0
if (cookie != None):
self.cookie = cookie
else:
self.cookie = 0
if (cookie_mask != None):
self.cookie_mask = cookie_mask
else:
self.cookie_mask = 0
if (match != None):
self.match = match
else:
self.match = ofp.match()
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(struct.pack('!B', self.table_id))
packed.append(('\x00' * 3))
packed.append(util.pack_port_no(self.out_port))
packed.append(struct.pack('!L', self.out_group))
packed.append(('\x00' * 4))
packed.append(struct.pack('!Q', self.cookie))
packed.append(struct.pack('!Q', self.cookie_mask))
packed.append(self.match.pack())
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = flow_stats_request()
_version = reader.read('!B')[0]
assert (_version == 4)
_type = reader.read('!B')[0]
assert (_type == 18)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 1)
obj.flags = reader.read('!H')[0]
reader.skip(4)
obj.table_id = reader.read('!B')[0]
reader.skip(3)
obj.out_port = util.unpack_port_no(reader)
obj.out_group = reader.read('!L')[0]
reader.skip(4)
obj.cookie = reader.read('!Q')[0]
obj.cookie_mask = reader.read('!Q')[0]
obj.match = ofp.match.unpack(reader)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.table_id != other.table_id):
return False
if (self.out_port != other.out_port):
return False
if (self.out_group != other.out_group):
return False
if (self.cookie != other.cookie):
return False
if (self.cookie_mask != other.cookie_mask):
return False
if (self.match != other.match):
return False
return True
def pretty_print(self, q):
q.text('flow_stats_request {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REQ_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('table_id = ')
q.text(('%#x' % self.table_id))
q.text(',')
q.breakable()
q.text('out_port = ')
q.text(util.pretty_port(self.out_port))
q.text(',')
q.breakable()
q.text('out_group = ')
q.text(('%#x' % self.out_group))
q.text(',')
q.breakable()
q.text('cookie = ')
q.text(('%#x' % self.cookie))
q.text(',')
q.breakable()
q.text('cookie_mask = ')
q.text(('%#x' % self.cookie_mask))
q.text(',')
q.breakable()
q.text('match = ')
q.pp(self.match)
q.breakable()
q.text('}') |
class MeanMetricWithRecall(torchmetrics.Metric):
full_state_update = True
def __init__(self):
super().__init__()
self.add_state('value', default=[], dist_reduce_fx='cat')
def compute(self):
return dim_zero_cat(self.value).mean(0)
def get_errors(self):
return dim_zero_cat(self.value)
def recall(self, thresholds):
error = self.get_errors()
thresholds = error.new_tensor(thresholds)
return ((error.unsqueeze((- 1)) < thresholds).float().mean(0) * 100) |
class FunctionCall(Expression):
__slots__ = ('name', 'arguments', 'as_method')
precedence = (Literal.precedence + 1)
template = Template('$name($arguments)')
delims = {'arguments': ', '}
def __init__(self, name, arguments, as_method=False):
self.name = name
self.arguments = (arguments or [])
self.as_method = as_method
def callback(self):
return self.signature.get_callback(*self.arguments)
def signature(self):
return get_function(self.name)
def _render(self):
if self.as_method:
return '{base}:{name}({remaining})'.format(base=self.arguments[0].render(self.precedence), name=self.name, remaining=', '.join((arg.render(self.precedence) for arg in self.arguments[1:])))
return super(FunctionCall, self)._render()
def render(self, precedence=None):
if self.signature:
alternate_render = self.signature.alternate_render(self.arguments, precedence)
if alternate_render:
return alternate_render
return super(FunctionCall, self).render()
def __or__(self, other):
if (isinstance(other, FunctionCall) and (self.name in ('wildcard', 'match', 'matchLite')) and (other.name == self.name)):
if (self.arguments[0] == other.arguments[0]):
return FunctionCall(self.name, (self.arguments + other.arguments[1:]))
return super(FunctionCall, self).__or__(other) |
class TestLinkedInGetIdEmail():
.asyncio
async def test_success(self, get_respx_call_args):
profile_request = respx.get(re.compile(f'^{PROFILE_ENDPOINT}')).mock(return_value=Response(200, json=profile_response))
email_request = respx.get(re.compile(f'^{EMAIL_ENDPOINT}')).mock(return_value=Response(200, json=email_response))
(user_id, user_email) = (await client.get_id_email('TOKEN'))
(profile_url, profile_headers, profile_content) = (await get_respx_call_args(profile_request))
(email_url, email_headers, email_content) = (await get_respx_call_args(email_request))
assert (profile_headers['Authorization'] == 'Bearer TOKEN')
assert (email_headers['Authorization'] == 'Bearer TOKEN')
assert (user_id == '424242')
assert (user_email == '')
.asyncio
async def test_profile_error(self):
respx.get(re.compile(f'^{PROFILE_ENDPOINT}')).mock(return_value=Response(400, json={'error': 'message'}))
respx.get(re.compile(f'^{EMAIL_ENDPOINT}')).mock(return_value=Response(200, json=email_response))
with pytest.raises(GetIdEmailError) as excinfo:
(await client.get_id_email('TOKEN'))
assert isinstance(excinfo.value.args[0], dict)
assert (excinfo.value.args[0] == {'error': 'message'})
.asyncio
async def test_email_error(self):
respx.get(re.compile(f'^{PROFILE_ENDPOINT}')).mock(return_value=Response(200, json=profile_response))
respx.get(re.compile(f'^{EMAIL_ENDPOINT}')).mock(return_value=Response(400, json={'error': 'message'}))
with pytest.raises(GetIdEmailError) as excinfo:
(await client.get_id_email('TOKEN'))
assert isinstance(excinfo.value.args[0], dict)
assert (excinfo.value.args[0] == {'error': 'message'}) |
class TestYum(testslide.TestCase):
def setUp(self):
super(TestYum, self).setUp()
self.yum = yum.Yum()
self.yum.yum = 'fakeyum'
def test_check_stuck_filenotfound(self):
self.mock_callable(pidutil, 'pidfile_info').for_call(yum.YUM_PID_PATH).to_raise(IOError()).and_assert_called_once()
self.assertTrue(self.yum.check_stuck())
def test_check_stuck_valueerror(self):
self.mock_callable(pidutil, 'pidfile_info').for_call(yum.YUM_PID_PATH).to_raise(ValueError()).and_assert_called_once()
self.assertFalse(self.yum.check_stuck())
def test_check_stuck_other_exception(self):
self.mock_callable(pidutil, 'pidfile_info').for_call(yum.YUM_PID_PATH).to_raise(Exception()).and_assert_called_once()
self.assertFalse(self.yum.check_stuck())
def test_check_stuck_yumpid_not_old_enough(self):
self.mock_callable(pidutil, 'pidfile_info').for_call(yum.YUM_PID_PATH).to_return_value((1234, (int(time.time()) - 3600))).and_assert_called_once()
self.assertTrue(self.yum.check_stuck())
def test_check_stuck_yumpid_no_such_process(self):
self.mock_callable(pidutil, 'pidfile_info').for_call(yum.YUM_PID_PATH).to_return_value((12345, (int(time.time()) - (7 * 3600)))).and_assert_called_once()
self.mock_callable(pidutil, 'process').for_call(12345).to_return_value(None).and_assert_called_once()
self.assertFalse(self.yum.check_stuck())
def test_check_stuck_kill_yumpid(self):
self.mock_callable(pidutil, 'pidfile_info').for_call(yum.YUM_PID_PATH).to_return_value((12345, (int(time.time()) - (7 * 3600)))).and_assert_called_once()
self.mock_callable(pidutil, 'process').for_call(12345).to_return_value(make_mock_process(pid=12345, open_files=[], name='fakeyum')).and_assert_called_once()
self.assertTrue(self.yum.check_stuck())
def test_check_stuck_kill_yumpid_no_such_process(self):
self.mock_callable(pidutil, 'pidfile_info').for_call(yum.YUM_PID_PATH).to_return_value((12345, (int(time.time()) - (7 * 3600)))).and_assert_called_once()
self.mock_callable(pidutil, 'process').for_call(12345).to_return_value(None).and_assert_called_once()
self.assertFalse(self.yum.check_stuck())
def test_check_stuck_kill_yumpid_timeout(self):
self.mock_callable(pidutil, 'pidfile_info').for_call(yum.YUM_PID_PATH).to_return_value((12345, (int(time.time()) - (7 * 3600)))).and_assert_called_once()
self.mock_callable(pidutil, 'process').for_call(12345).to_return_value(None).and_assert_called_once()
self.assertFalse(self.yum.check_stuck()) |
.parametrize('test_file', ['log3_Caller.json', 'log3_emptyMem.json', 'log3_logMemsizeZero.json', 'log3_MaxTopic.json', 'log3_nonEmptyMem.json', 'log3_nonEmptyMem_logMemSize1.json', 'log3_nonEmptyMem_logMemSize1_logMemStart31.json', 'log3_PC.json', 'log3_logMemsizeTooHigh.json', 'log3_logMemStartTooHigh.json'])
def test_log3(test_file: str) -> None:
run_logging_ops_vm_test(test_file) |
class VRRPV2StateMaster(VRRPState):
def master_down(self, ev):
vrrp_router = self.vrrp_router
vrrp_router.logger.debug('%s master_down %s %s', self.__class__.__name__, ev.__class__.__name__, vrrp_router.state)
def _adver(self):
vrrp_router = self.vrrp_router
vrrp_router.send_advertisement()
vrrp_router.adver_timer.start(vrrp_router.config.advertisement_interval)
def adver(self, ev):
self.vrrp_router.logger.debug('%s adver', self.__class__.__name__)
self._adver()
def preempt_delay(self, ev):
self.vrrp_router.logger.warning('%s preempt_delay', self.__class__.__name__)
def vrrp_received(self, ev):
vrrp_router = self.vrrp_router
vrrp_router.logger.debug('%s vrrp_received', self.__class__.__name__)
(ip, vrrp_) = vrrp.vrrp.get_payload(ev.packet)
config = vrrp_router.config
if (vrrp_.priority == 0):
vrrp_router.send_advertisement()
vrrp_router.adver_timer.start(config.advertisement_interval)
else:
params = vrrp_router.params
if ((config.priority < vrrp_.priority) or ((config.priority == vrrp_.priority) and vrrp.ip_address_lt(vrrp_router.interface.primary_ip_address, ip.src))):
vrrp_router.adver_timer.cancel()
vrrp_router.state_change(vrrp_event.VRRP_STATE_BACKUP)
vrrp_router.master_down_timer.start(params.master_down_interval)
def vrrp_shutdown_request(self, ev):
vrrp_router = self.vrrp_router
vrrp_router.logger.debug('%s vrrp_shutdown_request', self.__class__.__name__)
vrrp_router.adver_timer.cancel()
vrrp_router.send_advertisement(True)
vrrp_router.state_change(vrrp_event.VRRP_STATE_INITIALIZE)
def vrrp_config_change_request(self, ev):
vrrp_router = self.vrrp_router
vrrp_router.logger.warning('%s vrrp_config_change_request', self.__class__.__name__)
if ((ev.priority is not None) or (ev.advertisement_interval is not None)):
vrrp_router.adver_timer.cancel()
self._adver() |
('rocm.softmax.gen_function')
def softmax_gen_function(func_attrs: Dict[(str, Any)]) -> str:
dim = func_attrs['dim']
shapes = func_attrs['inputs'][0]._attrs['shape']
rank = len(shapes)
assert (dim == (rank - 1)), f'rocm softmax only supports dim == rank - 1, dim={dim}, rank={rank}'
assert isinstance(shapes[dim], IntImm), 'softmax requires reduction dim to be static'
return norm_common.gen_function(func_attrs, EXEC_TEMPLATE, EXTRA_HEADERS, get_func_signature) |
_toolkit([ToolkitName.qt, ToolkitName.wx])
class TestRangeEditor(BaseTestMixin, unittest.TestCase, UnittestTools):
def setUp(self):
BaseTestMixin.setUp(self)
def tearDown(self):
BaseTestMixin.tearDown(self)
def check_range_enum_editor_format_func(self, style):
obj = RangeModel()
view = View(UItem('value', editor=RangeEditor(low=1, high=3, format_func=(lambda v: '{:02d}'.format(v)), mode='enum'), style=style))
tester = UITester()
with tester.create_ui(obj, dict(view=view)) as ui:
editor = ui.get_editors('value')[0]
self.assertEqual(editor.names[:3], ['1', '2', '3'])
self.assertEqual(editor.mapping, {'1': 1, '2': 2, '3': 3})
self.assertEqual(editor.inverse_mapping, {1: '1', 2: '2', 3: '3'})
def test_simple_editor_format_func(self):
self.check_range_enum_editor_format_func('simple')
def test_custom_editor_format_func(self):
self.check_range_enum_editor_format_func('custom')
def check_slider_set_with_text_valid(self, mode):
model = RangeModel()
view = View(Item('value', editor=RangeEditor(low=1, high=12, mode=mode)))
tester = UITester()
with tester.create_ui(model, dict(view=view)) as ui:
self.assertEqual(model.value, 1)
number_field = tester.find_by_name(ui, 'value')
text = number_field.locate(Textbox())
text.perform(KeyClick('0'))
text.perform(KeyClick('Enter'))
displayed = text.inspect(DisplayedText())
self.assertEqual(model.value, 10)
self.assertEqual(displayed, str(model.value))
def test_simple_slider_editor_set_with_text_valid(self):
return self.check_slider_set_with_text_valid(mode='slider')
def test_large_range_slider_editor_set_with_text_valid(self):
return self.check_slider_set_with_text_valid(mode='xslider')
def test_log_range_slider_editor_set_with_text_valid(self):
return self.check_slider_set_with_text_valid(mode='logslider')
def test_range_text_editor_set_with_text_valid(self):
model = RangeModel()
view = View(Item('value', editor=RangeEditor(low=1, high=12, mode='text')))
tester = UITester()
with tester.create_ui(model, dict(view=view)) as ui:
self.assertEqual(model.value, 1)
number_field_text = tester.find_by_name(ui, 'value')
if (is_windows and is_wx()):
number_field_text.perform(KeyClick('End'))
number_field_text.perform(KeyClick('0'))
number_field_text.perform(KeyClick('Enter'))
displayed = number_field_text.inspect(DisplayedText())
self.assertEqual(model.value, 10)
self.assertEqual(displayed, str(model.value))
def test_range_text_editor_set_with_text_valid_and_none_bound(self):
model = ModelWithRangeTrait()
tester = UITester()
with tester.create_ui(model) as ui:
self.assertEqual(model.value, 1)
number_field_text = tester.find_by_name(ui, 'value')
if (is_windows and is_wx()):
number_field_text.perform(KeyClick('End'))
number_field_text.perform(KeyClick('0'))
number_field_text.perform(KeyClick('Enter'))
displayed = number_field_text.inspect(DisplayedText())
self.assertEqual(model.value, 10)
self.assertEqual(displayed, str(model.value))
_toolkit([ToolkitName.qt])
def test_simple_spin_editor_set_with_text_valid(self):
model = RangeModel()
view = View(Item('value', editor=RangeEditor(low=1, high=12, mode='spinner')))
LOCAL_REGISTRY = TargetRegistry()
_register_simple_spin(LOCAL_REGISTRY)
tester = UITester(registries=[LOCAL_REGISTRY])
with tester.create_ui(model, dict(view=view)) as ui:
self.assertEqual(model.value, 1)
number_field = tester.find_by_name(ui, 'value')
number_field.perform(KeyClick('Right'))
number_field.perform(KeyClick('0'))
displayed = number_field.inspect(DisplayedText())
self.assertEqual(model.value, 10)
self.assertEqual(displayed, str(model.value))
_toolkit([ToolkitName.qt])
def test_simple_spin_editor_auto_set_false(self):
model = RangeModel()
view = View(Item('value', editor=RangeEditor(low=1, high=12, mode='spinner', auto_set=False)))
LOCAL_REGISTRY = TargetRegistry()
_register_simple_spin(LOCAL_REGISTRY)
tester = UITester(registries=[LOCAL_REGISTRY])
with tester.create_ui(model, dict(view=view)) as ui:
self.assertEqual(model.value, 1)
number_field = tester.find_by_name(ui, 'value')
number_field.perform(KeyClick('Right'))
with self.assertTraitDoesNotChange(model, 'value'):
number_field.perform(KeyClick('0'))
displayed = number_field.inspect(DisplayedText())
self.assertEqual(displayed, '10')
with self.assertTraitChanges(model, 'value'):
number_field.perform(KeyClick('Enter'))
self.assertEqual(model.value, 10)
def check_slider_set_with_text_after_empty(self, mode):
model = RangeModel()
view = View(Item('value', editor=RangeEditor(low=1, high=12, mode=mode)))
tester = UITester()
with tester.create_ui(model, dict(view=view)) as ui:
number_field = tester.find_by_name(ui, 'value')
text = number_field.locate(Textbox())
for _ in range(5):
text.perform(KeyClick('Backspace'))
text.perform(KeySequence('11'))
text.perform(KeyClick('Enter'))
displayed = text.inspect(DisplayedText())
self.assertEqual(model.value, 11)
self.assertEqual(displayed, str(model.value))
def test_simple_slider_editor_set_with_text_after_empty(self):
return self.check_slider_set_with_text_after_empty(mode='slider')
def test_large_range_slider_editor_set_with_text_after_empty(self):
return self.check_slider_set_with_text_after_empty(mode='xslider')
def test_log_range_slider_editor_set_with_text_after_empty(self):
return self.check_slider_set_with_text_after_empty(mode='logslider')
_toolkit([ToolkitName.qt])
def test_range_text_editor_set_with_text_after_empty(self):
model = RangeModel()
view = View(Item('value', editor=RangeEditor(low=1, high=12, mode='text')))
tester = UITester()
with tester.create_ui(model, dict(view=view)) as ui:
number_field_text = tester.find_by_name(ui, 'value')
for _ in range(5):
number_field_text.perform(KeyClick('Backspace'))
number_field_text.perform(KeySequence('11'))
number_field_text.perform(KeyClick('Enter'))
displayed = number_field_text.inspect(DisplayedText())
self.assertEqual(model.value, 11)
self.assertEqual(displayed, str(model.value))
_toolkit([ToolkitName.qt])
def test_simple_spin_editor_set_with_text_after_empty(self):
model = RangeModel()
view = View(Item('value', editor=RangeEditor(low=1, high=12, mode='spinner')))
LOCAL_REGISTRY = TargetRegistry()
_register_simple_spin(LOCAL_REGISTRY)
tester = UITester(registries=[LOCAL_REGISTRY])
with tester.create_ui(model, dict(view=view)) as ui:
number_field_text = tester.find_by_name(ui, 'value')
number_field_text.perform(KeyClick('Right'))
for _ in range(5):
number_field_text.perform(KeyClick('Backspace'))
number_field_text.perform(KeySequence('11'))
number_field_text.perform(KeyClick('Enter'))
displayed = number_field_text.inspect(DisplayedText())
self.assertEqual(model.value, 11)
self.assertEqual(displayed, str(model.value))
def check_modify_slider(self, mode):
model = RangeModel(value=0)
view = View(Item('value', editor=RangeEditor(low=0, high=10, mode=mode)))
tester = UITester()
with tester.create_ui(model, dict(view=view)) as ui:
number_field = tester.find_by_name(ui, 'value')
slider = number_field.locate(Slider())
text = number_field.locate(Textbox())
for _ in range(10):
slider.perform(KeyClick('Right'))
displayed = text.inspect(DisplayedText())
self.assertEqual(model.value, 1)
self.assertEqual(displayed, str(model.value))
slider.perform(KeyClick('Page Up'))
displayed = text.inspect(DisplayedText())
self.assertEqual(model.value, 2)
self.assertEqual(displayed, str(model.value))
def test_modify_slider_simple_slider(self):
return self.check_modify_slider('slider')
def test_modify_slider_large_range_slider(self):
return self.check_modify_slider('xslider')
def test_modify_slider_log_range_slider(self):
model = RangeModel()
view = View(Item('float_value', editor=RangeEditor(low=0.1, high=, mode='logslider')))
tester = UITester()
with tester.create_ui(model, dict(view=view)) as ui:
number_field = tester.find_by_name(ui, 'float_value')
slider = number_field.locate(Slider())
text = number_field.locate(Textbox())
for _ in range(10):
slider.perform(KeyClick('Right'))
displayed = text.inspect(DisplayedText())
self.assertEqual(model.float_value, 1.0)
self.assertEqual(displayed, str(model.float_value))
slider.perform(KeyClick('Page Up'))
displayed = text.inspect(DisplayedText())
self.assertEqual(model.float_value, 10.0)
self.assertEqual(displayed, str(model.float_value))
def test_format_func(self):
def num_to_time(num):
minutes = int((num / 60))
if (minutes < 10):
minutes_str = ('0' + str(minutes))
else:
minutes_str = str(minutes)
seconds = (num % 60)
if (seconds < 10):
seconds_str = ('0' + str(seconds))
else:
seconds_str = str(seconds)
return ((minutes_str + ':') + seconds_str)
model = RangeModel()
view = View(Item('float_value', editor=RangeEditor(format_func=num_to_time)))
tester = UITester()
with tester.create_ui(model, dict(view=view)) as ui:
float_value_field = tester.find_by_name(ui, 'float_value')
float_value_text = float_value_field.locate(Textbox())
self.assertEqual(float_value_text.inspect(DisplayedText()), '00:00.1')
def test_editor_factory_format(self):
model = RangeModel()
with self.assertRaises(TraitError):
view = View(Item('float_value', editor=RangeEditor(format='%s ...')))
def test_editor_factory_format_str(self):
model = RangeModel()
view = View(Item('float_value', editor=RangeEditor(format_str='%s ...')))
tester = UITester()
with tester.create_ui(model, dict(view=view)) as ui:
float_value_field = tester.find_by_name(ui, 'float_value')
float_value_text = float_value_field.locate(Textbox())
self.assertEqual(float_value_text.inspect(DisplayedText()), '0.1 ...')
def test_editor_format_str(self):
model = RangeModel()
view = View(Item('float_value', editor=RangeEditor(format_str='%s ...')))
tester = UITester()
with tester.create_ui(model, dict(view=view)) as ui:
float_value_field = tester.find_by_name(ui, 'float_value')
with self.assertRaises(TraitError):
float_value_field._target.format = '%s +++'
_toolkit([ToolkitName.qt])
def test_set_text_out_of_range(self):
model = RangeModel()
view = View(Item('float_value', editor=RangeEditor(mode='text', low=0.0, high=1)))
tester = UITester()
with tester.create_ui(model, dict(view=view)) as ui:
float_value_field = tester.find_by_name(ui, 'float_value')
for _ in range(3):
float_value_field.perform(KeyClick('Backspace'))
float_value_field.perform(KeySequence('2.0'))
float_value_field.perform(KeyClick('Enter'))
self.assertTrue((0.0 <= model.float_value <= 1))
_toolkit([ToolkitName.qt])
def test_modify_out_of_range(self):
obj = RangeExcludeLow()
tester = UITester(auto_process_events=False)
with tester.create_ui(obj) as ui:
number_field = tester.find_by_name(ui, 'x')
text = number_field.locate(Textbox())
def set_out_of_range():
text.perform(KeyClick('Backspace'))
text.perform(KeyClick('0'))
text.perform(KeyClick('Enter'))
mdtester = ModalDialogTester(set_out_of_range)
mdtester.open_and_run((lambda x: x.close(accept=True)))
_toolkit([ToolkitName.qt])
def test_modify_out_of_range_with_slider(self):
obj = RangeExcludeLow()
tester = UITester(auto_process_events=False)
with tester.create_ui(obj) as ui:
number_field = tester.find_by_name(ui, 'x')
slider = number_field.locate(Slider())
def move_slider_out_of_range():
slider.perform(KeyClick('Page Down'))
mdtester = ModalDialogTester(move_slider_out_of_range)
mdtester.open_and_run((lambda x: x.click_button(OK))) |
def extractAsherahBlue(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('Juvenile Medical God' in item['tags']):
return buildReleaseMessageWithType(item, 'Shaonian Yixian', vol, chp, frag=frag, postfix=postfix)
return False |
class TestThriftFunctionCall(ThriftArgsTestCase, TestCase):
def setUp(self) -> None:
self.method_name = 'WorkflowService::StartWorkflowExecution'
self.service_name = 'cadence-frontend'
self.single_frame = ThriftFunctionCall.create(self.service_name, self.method_name, bytes(100))
self.multiple_frames = ThriftFunctionCall.create(self.service_name, self.method_name, bytes(100000))
balance = ThriftFunctionCall.create(self.service_name, self.method_name, bytes()).build_frames(1)[0].space_available()
self.single_frame_last_argument_frame_boundary = ThriftFunctionCall.create(self.service_name, self.method_name, bytes(balance))
self.multiple_frames_last_argument_one_byte_over = ThriftFunctionCall.create(self.service_name, self.method_name, bytes((balance + 1)))
first_frame = ThriftFunctionCall.create(self.service_name, ' ', bytes(100)).build_frames(1)[0]
balance = (((65535 - first_frame.get_size()) + first_frame.args[1].size()) + first_frame.args[2].size())
self.multiple_frames_first_argument_frame_boundary = ThriftFunctionCall.create(self.service_name, (' ' * balance), bytes(100))
def validate_equal(self, original_call: ThriftFunctionCall):
call = ThriftFunctionCall()
for f in original_call.build_frames(1):
call.process_frame(f)
self.assertEqual(1, call.message_id)
self.assertEqual(self.service_name, call.service)
self.assertEqual(original_call.method_name, call.method_name)
self.assertEqual(original_call.thrift_payload, call.thrift_payload)
self.assertEqual(original_call.tchannel_headers, call.tchannel_headers)
self.assertEqual(original_call.application_headers, call.application_headers)
self.assertEqual(original_call.ttl, call.ttl) |
_handler(func=(lambda message: (message.chat.type == 'private')), content_types=['text', 'photo', 'document'])
def echo_all(message):
while True:
mysql.start_bot(message.chat.id)
user_id = message.chat.id
message = message
banned = mysql.user_tables(user_id)['banned']
ticket_status = mysql.user_tables(user_id)['open_ticket']
ticket_spam = mysql.user_tables(user_id)['open_ticket_spam']
if (banned == 1):
return
elif msg.spam_handler_warning(bot, user_id, message):
return
elif msg.bad_words_handler(bot, message):
return
elif msg.spam_handler_blocked(bot, user_id, message):
return
elif (ticket_status == 0):
mysql.open_ticket(user_id)
continue
else:
msg.fwd_handler(user_id, bot, message)
return |
def get_portstats(self, port_num):
entries = get_port_stats(self, port_num)
rx_pkts = 0
tx_pkts = 0
rx_byts = 0
tx_byts = 0
rx_drp = 0
tx_drp = 0
rx_err = 0
tx_err = 0
rx_fr_err = 0
rx_ovr_err = 0
rx_crc_err = 0
collisions = 0
tx_err = 0
for obj in entries:
rx_pkts += obj.rx_packets
tx_pkts += obj.tx_packets
rx_byts += obj.rx_bytes
tx_byts += obj.tx_bytes
rx_drp += obj.rx_dropped
tx_drp += obj.tx_dropped
rx_err += obj.rx_errors
rx_fr_err += obj.rx_frame_err
rx_ovr_err += obj.rx_over_err
rx_crc_err += obj.rx_crc_err
collisions += obj.collisions
tx_err += obj.tx_errors
return (rx_pkts, tx_pkts, rx_byts, tx_byts, rx_drp, tx_drp, rx_err, tx_err, rx_fr_err, rx_ovr_err, rx_crc_err, collisions, tx_err) |
.parametrize('m,script_hex', [(2, '006b6376a914e91d89f4b52fe2a04d1d225e14dbe868d824a92e88ad6c8b6b686376ad879479f3b80c938045a68c1da1c02715f88ad6c8b6b686c52a2'), (3, '006b6376a914e91d89f4b52fe2a04d1d225e14dbe868d824a92e88ad6c8b6b686376ad879479f3b80c938045a68c1da1c02715f88ad6c8b6b686376a914fa95306e6d18d4508f555bab2c22d124f4e009f588ad6c8b6b686c53a2')])
def test_accumulator_multisig_scriptpubkey_ofM(m, script_hex):
public_keys = []
for i in range(m):
public_keys.append(PrivateKey.from_hex(private_keys_hex[i]).public_key)
output = AccumulatorMultiSigOutput(public_keys, m)
assert (output.to_script_bytes().hex() == script_hex) |
def test_ge_batchrequest_pandas_config():
task_object = GreatExpectationsTask(name='test2', datasource_name='data', inputs=kwtypes(data=str), expectation_suite_name='test.demo', data_connector_name='my_data_connector', task_config=BatchRequestConfig(data_connector_query={'batch_filter_parameters': {'year': '2019', 'month': '01'}, 'limit': 10}))
task_object(data='my_assets') |
class Decoder(nn.Module):
def __init__(self, batch_size, height, width, in_channels=3, out_channels=3, up_block_types=('UpDecoderBlock2D',), block_out_channels=(64,), layers_per_block=2, act_fn='silu'):
super().__init__()
self.layers_per_block = layers_per_block
self.conv_in = nn.Conv2dBias(in_channels, block_out_channels[(- 1)], kernel_size=3, stride=1, padding=1)
self.mid_block = UNetMidBlock2D(batch_size, height, width, in_channels=block_out_channels[(- 1)], resnet_eps=1e-06, resnet_act_fn=act_fn, output_scale_factor=1, resnet_time_scale_shift='default', attn_num_head_channels=None, resnet_groups=32, temb_channels=None)
self.up_blocks = nn.ModuleList([])
reversed_block_out_channels = list(reversed(block_out_channels))
output_channel = reversed_block_out_channels[0]
for (i, up_block_type) in enumerate(up_block_types):
prev_output_channel = output_channel
output_channel = reversed_block_out_channels[i]
is_final_block = (i == (len(block_out_channels) - 1))
up_block = get_up_block(up_block_type, num_layers=(self.layers_per_block + 1), in_channels=prev_output_channel, out_channels=output_channel, prev_output_channel=None, temb_channels=None, add_upsample=(not is_final_block), resnet_eps=1e-06, resnet_act_fn=act_fn, attn_num_head_channels=None)
self.up_blocks.append(up_block)
prev_output_channel = output_channel
num_groups_out = 32
self.conv_norm_out = nn.GroupNorm(num_channels=block_out_channels[0], num_groups=num_groups_out, eps=1e-06, use_swish=True)
self.conv_out = nn.Conv2dBias(block_out_channels[0], out_channels, kernel_size=3, padding=1, stride=1)
def forward(self, z) -> Tensor:
sample = z
sample = self.conv_in(sample)
sample = self.mid_block(sample)
for up_block in self.up_blocks:
sample = up_block(sample)
sample = self.conv_norm_out(sample)
sample = self.conv_out(sample)
return sample |
def stable_diffusion_inference(p):
prefix = re.sub('[\\\\/:*?\\"<>|]', '', p.prompt).replace(' ', '_').encode('utf-8')[:170].decode('utf-8', 'ignore')
for j in range(p.iters):
result = p.pipeline(**remove_unused_args(p))
for (i, img) in enumerate(result.images):
idx = (((j * p.samples) + i) + 1)
out = f'{prefix}__steps_{p.steps}__scale_{p.scale:.2f}__seed_{p.seed}__n_{idx}.png'
img.save(os.path.join('output', out))
print('completed pipeline:', iso_date_time(), flush=True) |
class OptionSeriesSankeyStatesSelect(Options):
def animation(self) -> 'OptionSeriesSankeyStatesSelectAnimation':
return self._config_sub_data('animation', OptionSeriesSankeyStatesSelectAnimation)
def borderColor(self):
return self._config_get('#000000')
def borderColor(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#cccccc')
def color(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def halo(self) -> 'OptionSeriesSankeyStatesSelectHalo':
return self._config_sub_data('halo', OptionSeriesSankeyStatesSelectHalo)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def marker(self) -> 'OptionSeriesSankeyStatesSelectMarker':
return self._config_sub_data('marker', OptionSeriesSankeyStatesSelectMarker) |
class TestGetBlockStatusWithBlockStartForSingleToken(_TestBaseGetBlockStatus):
def get_block_status(self, *args) -> str:
return get_block_status_with_blockstart_for_single_token(*args)
def test_should_return_blockend_for_single_token(self):
assert (self.get_block_status(0, 1, 'LINESTART') == 'BLOCKSTART') |
class TestTask(BasePyTestCase):
('bodhi.server.tasks.bugs')
('bodhi.server.tasks.buildsys')
('bodhi.server.tasks.initialize_db')
('bodhi.server.tasks.config')
('bodhi.server.tasks.approve_testing.main')
def test_task(self, main_function, config_mock, init_db_mock, buildsys, bugs):
approve_testing_task()
config_mock.load_config.assert_called_with()
init_db_mock.assert_called_with(config_mock)
buildsys.setup_buildsystem.assert_called_with(config_mock)
bugs.set_bugtracker.assert_called_with()
main_function.assert_called_with() |
class GraphicCollection(Graphic):
def __init__(self, name: str=None):
super(GraphicCollection, self).__init__(name)
self._graphics: List[str] = list()
self._graphics_changed: bool = True
self._graphics_array: np.ndarray[Graphic] = None
def graphics(self) -> np.ndarray[Graphic]:
if self._graphics_changed:
proxies = [weakref.proxy(COLLECTION_GRAPHICS[loc]) for loc in self._graphics]
self._graphics_array = np.array(proxies)
self._graphics_array.flags['WRITEABLE'] = False
self._graphics_changed = False
return self._graphics_array
def add_graphic(self, graphic: Graphic, reset_index: False):
if (not (type(graphic).__name__ == self.child_type)):
raise TypeError(f'Can only add graphics of the same type to a collection, You can only add {self.child_type} to a {self.__class__.__name__}, you are trying to add a {graphic.__class__.__name__}.')
loc = hex(id(graphic))
COLLECTION_GRAPHICS[loc] = graphic
self._graphics.append(loc)
if reset_index:
self._reset_index()
elif (graphic.collection_index is None):
graphic.collection_index = len(self)
self.world_object.add(graphic.world_object)
self._graphics_changed = True
def remove_graphic(self, graphic: Graphic, reset_index: True):
self._graphics.remove(graphic.loc)
if reset_index:
self._reset_index()
self.world_object.remove(graphic.world_object)
self._graphics_changed = True
def __getitem__(self, key):
return CollectionIndexer(parent=self, selection=self.graphics[key])
def __del__(self):
self.world_object.clear()
for loc in self._graphics:
del COLLECTION_GRAPHICS[loc]
super().__del__()
def _reset_index(self):
for (new_index, graphic) in enumerate(self._graphics):
graphic.collection_index = new_index
def __len__(self):
return len(self._graphics)
def __repr__(self):
rval = super().__repr__()
return f'''{rval}
Collection of <{len(self._graphics)}> Graphics''' |
def _config_to_hf(cls, curated_config: LlamaConfig) -> Dict[(str, Any)]:
out = config_to_hf(curated_config, [k for (k, _) in HF_CONFIG_KEYS])
if issubclass(cls, DecoderModule):
return HF_SPECIFIC_CONFIG_DECODER.merge(out)
else:
return HF_SPECIFIC_CONFIG_CAUSAL_LM.merge(out) |
_rally_config
def test_create_api_key_per_client(cfg):
port = 19200
it.wait_until_port_is_free(port_number=port)
dist = it.DISTRIBUTIONS[(- 1)]
opts = "use_ssl:true,verify_certs:false,basic_auth_user:'rally',basic_auth_password:'rally-password',create_api_key_per_client:true"
assert (it.race(cfg, f'--distribution-version={dist} --track="geonames" --test-mode --car=4gheap,trial-license,x-pack-security --target-hosts=127.0.0.1:{port} --client-options={opts}') == 0) |
def schema_name(schema: DictSchema, parent_ns: str) -> Tuple[(str, str)]:
try:
name = schema['name']
except KeyError:
raise SchemaParseException(f'"name" is a required field missing from the schema: {schema}')
namespace = schema.get('namespace', parent_ns)
if ('.' in name):
return (name.rsplit('.', 1)[0], name)
elif namespace:
return (namespace, f'{namespace}.{name}')
else:
return ('', name) |
_set_msg_type(ofproto.OFPT_GROUP_MOD)
class OFPGroupMod(MsgBase):
def __init__(self, datapath, command=ofproto.OFPGC_ADD, type_=ofproto.OFPGT_ALL, group_id=0, buckets=None):
buckets = (buckets if buckets else [])
super(OFPGroupMod, self).__init__(datapath)
self.command = command
self.type = type_
self.group_id = group_id
self.buckets = buckets
def _serialize_body(self):
msg_pack_into(ofproto.OFP_GROUP_MOD_PACK_STR, self.buf, ofproto.OFP_HEADER_SIZE, self.command, self.type, self.group_id)
offset = ofproto.OFP_GROUP_MOD_SIZE
for b in self.buckets:
b.serialize(self.buf, offset)
offset += b.len |
class OptionSeriesGaugeSonificationTracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
def search_by_term(q, obj_types, include):
results = []
for cls in [VTM, VMP, VMPP, AMP, AMPP]:
if (obj_types and (cls.obj_type not in obj_types)):
continue
qs = cls.objects
if ('invalid' not in include):
qs = qs.valid()
if ('unavailable' not in include):
qs = qs.available()
if ('no_bnf_code' not in include):
qs = qs.with_bnf_code()
qs = qs.search(q)
objs = list(qs)
if objs:
results.append({'cls': cls, 'objs': objs})
return results |
def extractYuzukiteaWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class ESP32S2ROM(ESP32ROM):
CHIP_NAME = 'ESP32-S2'
IMAGE_CHIP_ID = 2
IROM_MAP_START =
IROM_MAP_END =
DROM_MAP_START =
DROM_MAP_END =
CHIP_DETECT_MAGIC_VALUE = [1990]
SPI_REG_BASE =
SPI_USR_OFFS = 24
SPI_USR1_OFFS = 28
SPI_USR2_OFFS = 32
SPI_MOSI_DLEN_OFFS = 36
SPI_MISO_DLEN_OFFS = 40
SPI_W0_OFFS = 88
MAC_EFUSE_REG =
UART_CLKDIV_REG =
SUPPORTS_ENCRYPTED_FLASH = True
FLASH_ENCRYPTED_WRITE_ALIGN = 16
EFUSE_BASE =
EFUSE_RD_REG_BASE = (EFUSE_BASE + 48)
EFUSE_BLOCK1_ADDR = (EFUSE_BASE + 68)
EFUSE_BLOCK2_ADDR = (EFUSE_BASE + 92)
EFUSE_PURPOSE_KEY0_REG = (EFUSE_BASE + 52)
EFUSE_PURPOSE_KEY0_SHIFT = 24
EFUSE_PURPOSE_KEY1_REG = (EFUSE_BASE + 52)
EFUSE_PURPOSE_KEY1_SHIFT = 28
EFUSE_PURPOSE_KEY2_REG = (EFUSE_BASE + 56)
EFUSE_PURPOSE_KEY2_SHIFT = 0
EFUSE_PURPOSE_KEY3_REG = (EFUSE_BASE + 56)
EFUSE_PURPOSE_KEY3_SHIFT = 4
EFUSE_PURPOSE_KEY4_REG = (EFUSE_BASE + 56)
EFUSE_PURPOSE_KEY4_SHIFT = 8
EFUSE_PURPOSE_KEY5_REG = (EFUSE_BASE + 56)
EFUSE_PURPOSE_KEY5_SHIFT = 12
EFUSE_DIS_DOWNLOAD_MANUAL_ENCRYPT_REG = EFUSE_RD_REG_BASE
EFUSE_DIS_DOWNLOAD_MANUAL_ENCRYPT = (1 << 19)
EFUSE_SPI_BOOT_CRYPT_CNT_REG = (EFUSE_BASE + 52)
EFUSE_SPI_BOOT_CRYPT_CNT_MASK = (7 << 18)
EFUSE_SECURE_BOOT_EN_REG = (EFUSE_BASE + 56)
EFUSE_SECURE_BOOT_EN_MASK = (1 << 20)
EFUSE_RD_REPEAT_DATA3_REG = (EFUSE_BASE + 60)
EFUSE_RD_REPEAT_DATA3_REG_FLASH_TYPE_MASK = (1 << 9)
PURPOSE_VAL_XTS_AES256_KEY_1 = 2
PURPOSE_VAL_XTS_AES256_KEY_2 = 3
PURPOSE_VAL_XTS_AES128_KEY = 4
UARTDEV_BUF_NO =
UARTDEV_BUF_NO_USB_OTG = 2
USB_RAM_BLOCK = 2048
GPIO_STRAP_REG =
GPIO_STRAP_SPI_BOOT_MASK = 8
RTC_CNTL_OPTION1_REG =
RTC_CNTL_FORCE_DOWNLOAD_BOOT_MASK = 1
MEMORY_MAP = [[0, 65536, 'PADDING'], [, , 'DROM'], [, , 'EXTRAM_DATA'], [, , 'RTC_DRAM'], [, , 'BYTE_ACCESSIBLE'], [, , 'MEM_INTERNAL'], [, , 'DRAM'], [, , 'IROM_MASK'], [, , 'IRAM'], [, , 'RTC_IRAM'], [, , 'IROM'], [, , 'RTC_DATA']]
UF2_FAMILY_ID =
def get_pkg_version(self):
num_word = 4
return ((self.read_reg((self.EFUSE_BLOCK1_ADDR + (4 * num_word))) >> 0) & 15)
def get_minor_chip_version(self):
hi_num_word = 3
hi = ((self.read_reg((self.EFUSE_BLOCK1_ADDR + (4 * hi_num_word))) >> 20) & 1)
low_num_word = 4
low = ((self.read_reg((self.EFUSE_BLOCK1_ADDR + (4 * low_num_word))) >> 4) & 7)
return ((hi << 3) + low)
def get_major_chip_version(self):
num_word = 3
return ((self.read_reg((self.EFUSE_BLOCK1_ADDR + (4 * num_word))) >> 18) & 3)
def get_flash_version(self):
num_word = 3
return ((self.read_reg((self.EFUSE_BLOCK1_ADDR + (4 * num_word))) >> 21) & 15)
def get_flash_cap(self):
return self.get_flash_version()
def get_psram_version(self):
num_word = 3
return ((self.read_reg((self.EFUSE_BLOCK1_ADDR + (4 * num_word))) >> 28) & 15)
def get_psram_cap(self):
return self.get_psram_version()
def get_block2_version(self):
num_word = 4
return ((self.read_reg((self.EFUSE_BLOCK2_ADDR + (4 * num_word))) >> 4) & 7)
def get_chip_description(self):
chip_name = {0: 'ESP32-S2', 1: 'ESP32-S2FH2', 2: 'ESP32-S2FH4', 102: 'ESP32-S2FNR2', 100: 'ESP32-S2R2'}.get((self.get_flash_cap() + (self.get_psram_cap() * 100)), 'unknown ESP32-S2')
major_rev = self.get_major_chip_version()
minor_rev = self.get_minor_chip_version()
return f'{chip_name} (revision v{major_rev}.{minor_rev})'
def get_chip_features(self):
features = ['WiFi']
if self.secure_download_mode:
features += ['Secure Download Mode Enabled']
flash_version = {0: 'No Embedded Flash', 1: 'Embedded Flash 2MB', 2: 'Embedded Flash 4MB'}.get(self.get_flash_cap(), 'Unknown Embedded Flash')
features += [flash_version]
psram_version = {0: 'No Embedded PSRAM', 1: 'Embedded PSRAM 2MB', 2: 'Embedded PSRAM 4MB'}.get(self.get_psram_cap(), 'Unknown Embedded PSRAM')
features += [psram_version]
block2_version = {0: 'No calibration in BLK2 of efuse', 1: 'ADC and temperature sensor calibration in BLK2 of efuse V1', 2: 'ADC and temperature sensor calibration in BLK2 of efuse V2'}.get(self.get_block2_version(), 'Unknown Calibration in BLK2')
features += [block2_version]
return features
def get_crystal_freq(self):
return 40
def override_vddsdio(self, new_voltage):
raise NotImplementedInROMError('VDD_SDIO overrides are not supported for ESP32-S2')
def read_mac(self, mac_type='BASE_MAC'):
if (mac_type != 'BASE_MAC'):
return None
mac0 = self.read_reg(self.MAC_EFUSE_REG)
mac1 = self.read_reg((self.MAC_EFUSE_REG + 4))
bitstring = struct.pack('>II', mac1, mac0)[2:]
return tuple(bitstring)
def flash_type(self):
return (1 if (self.read_reg(self.EFUSE_RD_REPEAT_DATA3_REG) & self.EFUSE_RD_REPEAT_DATA3_REG_FLASH_TYPE_MASK) else 0)
def get_flash_crypt_config(self):
return None
def get_secure_boot_enabled(self):
return (self.read_reg(self.EFUSE_SECURE_BOOT_EN_REG) & self.EFUSE_SECURE_BOOT_EN_MASK)
def get_key_block_purpose(self, key_block):
if ((key_block < 0) or (key_block > 5)):
raise FatalError('Valid key block numbers must be in range 0-5')
(reg, shift) = [(self.EFUSE_PURPOSE_KEY0_REG, self.EFUSE_PURPOSE_KEY0_SHIFT), (self.EFUSE_PURPOSE_KEY1_REG, self.EFUSE_PURPOSE_KEY1_SHIFT), (self.EFUSE_PURPOSE_KEY2_REG, self.EFUSE_PURPOSE_KEY2_SHIFT), (self.EFUSE_PURPOSE_KEY3_REG, self.EFUSE_PURPOSE_KEY3_SHIFT), (self.EFUSE_PURPOSE_KEY4_REG, self.EFUSE_PURPOSE_KEY4_SHIFT), (self.EFUSE_PURPOSE_KEY5_REG, self.EFUSE_PURPOSE_KEY5_SHIFT)][key_block]
return ((self.read_reg(reg) >> shift) & 15)
def is_flash_encryption_key_valid(self):
purposes = [self.get_key_block_purpose(b) for b in range(6)]
if any(((p == self.PURPOSE_VAL_XTS_AES128_KEY) for p in purposes)):
return True
return (any(((p == self.PURPOSE_VAL_XTS_AES256_KEY_1) for p in purposes)) and any(((p == self.PURPOSE_VAL_XTS_AES256_KEY_2) for p in purposes)))
def uses_usb_otg(self):
if self.secure_download_mode:
return False
return (self.get_uart_no() == self.UARTDEV_BUF_NO_USB_OTG)
def _post_connect(self):
if self.uses_usb_otg():
self.ESP_RAM_BLOCK = self.USB_RAM_BLOCK
def _check_if_can_reset(self):
if (os.getenv('ESPTOOL_TESTING') is not None):
print('ESPTOOL_TESTING is set, ignoring strapping mode check')
return
strap_reg = self.read_reg(self.GPIO_STRAP_REG)
force_dl_reg = self.read_reg(self.RTC_CNTL_OPTION1_REG)
if (((strap_reg & self.GPIO_STRAP_SPI_BOOT_MASK) == 0) and ((force_dl_reg & self.RTC_CNTL_FORCE_DOWNLOAD_BOOT_MASK) == 0)):
print("WARNING: {} chip was placed into download mode using GPIO0.\nesptool.py can not exit the download mode over USB. To run the app, reset the chip manually.\nTo suppress this note, set --after option to 'no_reset'.".format(self.get_chip_description()))
raise SystemExit(1)
def hard_reset(self):
uses_usb_otg = self.uses_usb_otg()
if uses_usb_otg:
self._check_if_can_reset()
print('Hard resetting via RTS pin...')
HardReset(self._port, uses_usb_otg)()
def change_baud(self, baud):
ESPLoader.change_baud(self, baud)
def check_spi_connection(self, spi_connection):
if (not set(spi_connection).issubset((set(range(0, 22)) | set(range(26, 47))))):
raise FatalError('SPI Pin numbers must be in the range 0-21, or 26-46.')
if any([v for v in spi_connection if (v in [19, 20])]):
print('WARNING: GPIO pins 19 and 20 are used by USB-OTG, consider using other pins for SPI flash connection.') |
def extractKeevareadsBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('bbhi', 'Black Bellied Husbands Indulgence: His Wife is not Allowed to Hide', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def gettaskvaluefromname(taskname):
res = None
try:
taskprop = taskname.split('#')
taskprop[0] = taskprop[0].strip().lower()
taskprop[1] = taskprop[1].strip().lower()
except:
res = None
return res
try:
for s in range(len(Settings.Tasks)):
if (type(Settings.Tasks[s]) is not bool):
if (Settings.Tasks[s].taskname.lower() == taskprop[0]):
for v in range(len(Settings.Tasks[s].valuenames)):
if (Settings.Tasks[s].valuenames[v].lower() == taskprop[1]):
res = Settings.Tasks[s].uservar[v]
break
except:
res = None
return res |
class LeadGenAppointmentSlotsByDay(AbstractObject):
def __init__(self, api=None):
super(LeadGenAppointmentSlotsByDay, self).__init__()
self._isLeadGenAppointmentSlotsByDay = True
self._api = api
class Field(AbstractObject.Field):
appointment_slots = 'appointment_slots'
day = 'day'
_field_types = {'appointment_slots': 'list<LeadGenAppointmentTimeSlot>', 'day': 'string'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
def test_automatically_find_variables_and_gaussian_imputation_on_right_tail(df_na):
imputer = EndTailImputer(imputation_method='gaussian', tail='right', fold=3, variables=None)
X_transformed = imputer.fit_transform(df_na)
X_reference = df_na.copy()
X_reference['Age'] = X_reference['Age'].fillna(58.)
X_reference['Marks'] = X_reference['Marks'].fillna(1.)
assert (imputer.imputation_method == 'gaussian')
assert (imputer.tail == 'right')
assert (imputer.fold == 3)
assert (imputer.variables is None)
assert (imputer.variables_ == ['Age', 'Marks'])
assert (imputer.n_features_in_ == 6)
imputer.imputer_dict_ = {key: round(value, 3) for (key, value) in imputer.imputer_dict_.items()}
assert (imputer.imputer_dict_ == {'Age': 58.949, 'Marks': 1.324})
assert (X_transformed[['Age', 'Marks']].isnull().sum().sum() == 0)
assert (X_transformed[['City', 'Name']].isnull().sum().sum() > 0)
pd.testing.assert_frame_equal(X_transformed, X_reference) |
def test_ttcompile_ttf_compile_default(tmpdir):
inttx = os.path.join('Tests', 'ttx', 'data', 'TestTTF.ttx')
outttf = tmpdir.join('TestTTF.ttf')
default_options = ttx.Options([], 1)
ttx.ttCompile(inttx, str(outttf), default_options)
assert outttf.check(file=True)
ttf = TTFont(str(outttf))
expected_tables = ('head', 'hhea', 'maxp', 'OS/2', 'name', 'cmap', 'hmtx', 'fpgm', 'prep', 'cvt ', 'loca', 'glyf', 'post', 'gasp', 'DSIG')
for table in expected_tables:
assert (table in ttf) |
class BloodHoundSchema(object):
def __init__(self, object):
self.Name = None
self.SchemaIdGuid = None
if (('name' in object.keys()) and ('schemaidguid' in object.keys())):
self.Name = object.get('name').lower()
try:
self.SchemaIdGuid = str(UUID(bytes_le=base64.b64decode(object.get('schemaidguid')))).lower()
logging.debug(f'Reading Schema object {ColorScheme.schema}{self.Name}[/]', extra=OBJ_EXTRA_FMT)
except:
logging.warning(f'Error base64 decoding SchemaIDGUID attribute on Schema {ColorScheme.schema}{self.Name}[/]', extra=OBJ_EXTRA_FMT) |
def cbFun(snmpEngine, sendRequestHandle, errorIndication, errorStatus, errorIndex, varBinds, cbCtx):
if errorIndication:
print(errorIndication)
elif (errorStatus and (errorStatus != 2)):
print(('%s at %s' % (errorStatus.prettyPrint(), ((errorIndex and varBinds[(int(errorIndex) - 1)][0]) or '?'))))
else:
for (oid, val) in varBinds:
print(('%s = %s' % (oid.prettyPrint(), val.prettyPrint()))) |
def get_room_session_stream(db, user=None, **kwargs):
stream = VideoStreamFactoryBase(**kwargs)
room = MicrolocationSubVideoStreamFactory(video_stream=stream)
session = SessionSubFactory(microlocation=room, event=room.event)
if user:
(role, _) = get_or_create(Role, name='owner', title_name='Owner')
UsersEventsRoles(user=user, event=room.event, role=role)
db.session.commit()
return (room, stream, session) |
class OptionSeriesVariwideSonificationContexttracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def extractFreshcottontranslationshomeWpcomstagingCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class rest_server(bsn_tlv):
type = 152
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack('!H', self.type))
packed.append(struct.pack('!H', 0))
length = sum([len(x) for x in packed])
packed[1] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = rest_server()
_type = reader.read('!H')[0]
assert (_type == 152)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
return True
def pretty_print(self, q):
q.text('rest_server {')
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}') |
class SimpleRouter(BaseRouter):
routes = [Route(url='^{prefix}{trailing_slash}$', mapping={'get': 'list', 'post': 'create'}, name='{basename}-list', detail=False, initkwargs={'suffix': 'List'}), DynamicRoute(url='^{prefix}/{url_path}{trailing_slash}$', name='{basename}-{url_name}', detail=False, initkwargs={}), Route(url='^{prefix}/{lookup}{trailing_slash}$', mapping={'get': 'retrieve', 'put': 'update', 'patch': 'partial_update', 'delete': 'destroy'}, name='{basename}-detail', detail=True, initkwargs={'suffix': 'Instance'}), DynamicRoute(url='^{prefix}/{lookup}/{url_path}{trailing_slash}$', name='{basename}-{url_name}', detail=True, initkwargs={})]
def __init__(self, trailing_slash=True, use_regex_path=True):
self.trailing_slash = ('/' if trailing_slash else '')
self._use_regex = use_regex_path
if use_regex_path:
self._base_pattern = '(?P<{lookup_prefix}{lookup_url_kwarg}>{lookup_value})'
self._default_value_pattern = '[^/.]+'
self._url_conf = re_path
else:
self._base_pattern = '<{lookup_value}:{lookup_prefix}{lookup_url_kwarg}>'
self._default_value_pattern = 'str'
self._url_conf = path
_routes = []
for route in self.routes:
url_param = route.url
if (url_param[0] == '^'):
url_param = url_param[1:]
if (url_param[(- 1)] == '$'):
url_param = url_param[:(- 1)]
_routes.append(route._replace(url=url_param))
self.routes = _routes
super().__init__()
def get_default_basename(self, viewset):
queryset = getattr(viewset, 'queryset', None)
assert (queryset is not None), '`basename` argument not specified, and could not automatically determine the name from the viewset, as it does not have a `.queryset` attribute.'
return queryset.model._meta.object_name.lower()
def get_routes(self, viewset):
known_actions = list(flatten([route.mapping.values() for route in self.routes if isinstance(route, Route)]))
extra_actions = viewset.get_extra_actions()
not_allowed = [action.__name__ for action in extra_actions if (action.__name__ in known_actions)]
if not_allowed:
msg = 'Cannot use the decorator on the following methods, as they are existing routes: %s'
raise ImproperlyConfigured((msg % ', '.join(not_allowed)))
detail_actions = [action for action in extra_actions if action.detail]
list_actions = [action for action in extra_actions if (not action.detail)]
routes = []
for route in self.routes:
if (isinstance(route, DynamicRoute) and route.detail):
routes += [self._get_dynamic_route(route, action) for action in detail_actions]
elif (isinstance(route, DynamicRoute) and (not route.detail)):
routes += [self._get_dynamic_route(route, action) for action in list_actions]
else:
routes.append(route)
return routes
def _get_dynamic_route(self, route, action):
initkwargs = route.initkwargs.copy()
initkwargs.update(action.kwargs)
url_path = escape_curly_brackets(action.url_path)
return Route(url=route.url.replace('{url_path}', url_path), mapping=action.mapping, name=route.name.replace('{url_name}', action.url_name), detail=route.detail, initkwargs=initkwargs)
def get_method_map(self, viewset, method_map):
bound_methods = {}
for (method, action) in method_map.items():
if hasattr(viewset, action):
bound_methods[method] = action
return bound_methods
def get_lookup_regex(self, viewset, lookup_prefix=''):
lookup_field = getattr(viewset, 'lookup_field', 'pk')
lookup_url_kwarg = (getattr(viewset, 'lookup_url_kwarg', None) or lookup_field)
lookup_value = None
if (not self._use_regex):
lookup_value = getattr(viewset, 'lookup_value_converter', None)
if (lookup_value is None):
lookup_value = getattr(viewset, 'lookup_value_regex', self._default_value_pattern)
return self._base_pattern.format(lookup_prefix=lookup_prefix, lookup_url_kwarg=lookup_url_kwarg, lookup_value=lookup_value)
def get_urls(self):
ret = []
for (prefix, viewset, basename) in self.registry:
lookup = self.get_lookup_regex(viewset)
routes = self.get_routes(viewset)
for route in routes:
mapping = self.get_method_map(viewset, route.mapping)
if (not mapping):
continue
regex = route.url.format(prefix=prefix, lookup=lookup, trailing_slash=self.trailing_slash)
if (not prefix):
if (self._url_conf is path):
if (regex[0] == '/'):
regex = regex[1:]
elif (regex[:2] == '^/'):
regex = ('^' + regex[2:])
initkwargs = route.initkwargs.copy()
initkwargs.update({'basename': basename, 'detail': route.detail})
view = viewset.as_view(mapping, **initkwargs)
name = route.name.format(basename=basename)
ret.append(self._url_conf(regex, view, name=name))
return ret |
.skipif(('pandas' not in sys.modules), reason='Pandas is not installed.')
def test_wf_schema_to_df():
import pandas as pd
from flytekit.types.schema import FlyteSchema
schema1 = FlyteSchema[kwtypes(x=int, y=str)]
def t1() -> schema1:
s = schema1()
s.open().write(pd.DataFrame(data={'x': [1, 2], 'y': ['3', '4']}))
return s
def t2(df: pd.DataFrame) -> int:
return len(df.columns.values)
def wf() -> int:
return t2(df=t1())
x = wf()
assert (x == 2) |
_all_methods(bind_proxy)
class ProjectProxy(BaseProxy):
def get(self, ownername, projectname):
endpoint = '/project'
params = {'ownername': ownername, 'projectname': projectname}
response = self.request.send(endpoint=endpoint, params=params)
return munchify(response)
def get_list(self, ownername=None, pagination=None):
endpoint = '/project/list'
params = {'ownername': ownername}
params.update((pagination or {}))
response = self.request.send(endpoint=endpoint, params=params)
return munchify(response)
def search(self, query, pagination=None):
endpoint = '/project/search'
params = {'query': query}
params.update((pagination or {}))
response = self.request.send(endpoint=endpoint, params=params)
return munchify(response)
def add(self, ownername, projectname, chroots, description=None, instructions=None, homepage=None, contact=None, additional_repos=None, unlisted_on_hp=False, enable_net=False, persistent=False, auto_prune=True, use_bootstrap_container=None, devel_mode=False, delete_after_days=None, multilib=False, module_hotfixes=False, bootstrap=None, bootstrap_image=None, isolation=None, follow_fedora_branching=True, fedora_review=None, appstream=False, runtime_dependencies=None, packit_forge_projects_allowed=None, repo_priority=None):
endpoint = '/project/add/{ownername}'
params = {'ownername': ownername}
data = {'name': projectname, 'chroots': chroots, 'description': description, 'instructions': instructions, 'homepage': homepage, 'contact': contact, 'additional_repos': additional_repos, 'unlisted_on_hp': unlisted_on_hp, 'enable_net': enable_net, 'persistent': persistent, 'auto_prune': auto_prune, 'bootstrap': bootstrap, 'bootstrap_image': bootstrap_image, 'isolation': isolation, 'follow_fedora_branching': follow_fedora_branching, 'devel_mode': devel_mode, 'delete_after_days': delete_after_days, 'multilib': multilib, 'module_hotfixes': module_hotfixes, 'fedora_review': fedora_review, 'appstream': appstream, 'runtime_dependencies': runtime_dependencies, 'packit_forge_projects_allowed': packit_forge_projects_allowed, 'repo_priority': repo_priority}
_compat_use_bootstrap_container(data, use_bootstrap_container)
response = self.request.send(endpoint=endpoint, method=POST, params=params, data=data, auth=self.auth)
return munchify(response)
def edit(self, ownername, projectname, chroots=None, description=None, instructions=None, homepage=None, contact=None, additional_repos=None, unlisted_on_hp=None, enable_net=None, auto_prune=None, use_bootstrap_container=None, devel_mode=None, delete_after_days=None, multilib=None, module_hotfixes=None, bootstrap=None, bootstrap_image=None, isolation=None, follow_fedora_branching=None, fedora_review=None, appstream=None, runtime_dependencies=None, packit_forge_projects_allowed=None, repo_priority=None):
endpoint = '/project/edit/{ownername}/{projectname}'
params = {'ownername': ownername, 'projectname': projectname}
data = {'chroots': chroots, 'description': description, 'instructions': instructions, 'homepage': homepage, 'contact': contact, 'repos': additional_repos, 'unlisted_on_hp': unlisted_on_hp, 'enable_net': enable_net, 'auto_prune': auto_prune, 'bootstrap': bootstrap, 'isolation': isolation, 'follow_fedora_branching': follow_fedora_branching, 'bootstrap_image': bootstrap_image, 'devel_mode': devel_mode, 'delete_after_days': delete_after_days, 'multilib': multilib, 'module_hotfixes': module_hotfixes, 'fedora_review': fedora_review, 'appstream': appstream, 'runtime_dependencies': runtime_dependencies, 'packit_forge_projects_allowed': packit_forge_projects_allowed, 'repo_priority': repo_priority}
_compat_use_bootstrap_container(data, use_bootstrap_container)
response = self.request.send(endpoint=endpoint, method=POST, params=params, data=data, auth=self.auth)
return munchify(response)
def delete(self, ownername, projectname):
endpoint = '/project/delete/{ownername}/{projectname}'
params = {'ownername': ownername, 'projectname': projectname}
data = {'verify': True}
response = self.request.send(endpoint=endpoint, method=POST, params=params, data=data, auth=self.auth)
return munchify(response)
def fork(self, ownername, projectname, dstownername, dstprojectname, confirm=False):
endpoint = '/project/fork/{ownername}/{projectname}'
params = {'ownername': ownername, 'projectname': projectname}
data = {'name': dstprojectname, 'ownername': dstownername, 'confirm': confirm}
response = self.request.send(endpoint=endpoint, method=POST, params=params, data=data, auth=self.auth)
return munchify(response)
def can_build_in(self, who, ownername, projectname):
endpoint = '/project/permissions/can_build_in/{who}/{ownername}/{projectname}/'
params = {'who': who, 'ownername': ownername, 'projectname': projectname}
response = self.request.send(endpoint=endpoint, params=params, auth=self.auth)
return munchify(response).can_build_in
def get_permissions(self, ownername, projectname):
endpoint = '/project/permissions/get/{ownername}/{projectname}/'
params = {'ownername': ownername, 'projectname': projectname}
response = self.request.send(endpoint=endpoint, params=params, auth=self.auth)
return munchify(response)
def set_permissions(self, ownername, projectname, permissions):
endpoint = '/project/permissions/set/{ownername}/{projectname}/'
params = {'ownername': ownername, 'projectname': projectname}
self.request.send(endpoint=endpoint, method=PUT, params=params, data=permissions, auth=self.auth)
def request_permissions(self, ownername, projectname, permissions):
endpoint = '/project/permissions/request/{ownername}/{projectname}/'
params = {'ownername': ownername, 'projectname': projectname}
self.request.send(endpoint=endpoint, method=PUT, params=params, data=permissions, auth=self.auth)
def regenerate_repos(self, ownername, projectname):
endpoint = '/project/regenerate-repos/{ownername}/{projectname}'
params = {'ownername': ownername, 'projectname': projectname}
response = self.request.send(endpoint=endpoint, method=PUT, params=params, auth=self.auth)
return munchify(response) |
class TestData(TypedDict):
__test__ = False
tenants: ModelMapping[Tenant]
clients: ModelMapping[Client]
oauth_providers: ModelMapping[OAuthProvider]
user_fields: ModelMapping[UserField]
users: ModelMapping[User]
user_field_values: ModelMapping[UserFieldValue]
login_sessions: ModelMapping[LoginSession]
registration_sessions: ModelMapping[RegistrationSession]
oauth_sessions: ModelMapping[OAuthSession]
authorization_codes: ModelMapping[AuthorizationCode]
email_verifications: ModelMapping[EmailVerification]
refresh_tokens: ModelMapping[RefreshToken]
session_tokens: ModelMapping[SessionToken]
oauth_accounts: ModelMapping[OAuthAccount]
grants: ModelMapping[Grant]
permissions: ModelMapping[Permission]
roles: ModelMapping[Role]
user_permissions: ModelMapping[UserPermission]
user_roles: ModelMapping[UserRole]
email_templates: ModelMapping[EmailTemplate]
themes: ModelMapping[Theme]
webhooks: ModelMapping[Webhook]
webhook_logs: ModelMapping[WebhookLog]
email_domains: ModelMapping[EmailDomain] |
def validate_is_node(node):
if (node == BLANK_NODE):
return
elif (len(node) == 2):
(key, value) = node
validate_is_bytes(key)
if isinstance(value, list):
validate_is_node(value)
else:
validate_is_bytes(value)
elif (len(node) == 17):
validate_is_bytes(node[16])
for sub_node in node[:16]:
if (sub_node == BLANK_NODE):
continue
elif isinstance(sub_node, list):
validate_is_node(sub_node)
else:
validate_is_bytes(sub_node)
validate_length(sub_node, 32)
else:
raise ValidationError(f'Invalid Node: {node}') |
class CfgFormatJson(CfgFormatBase):
def create_cfg_from_str(self, url):
if url.startswith('json:'):
url = url[5:]
jdict = json.loads(url)
if (not isinstance(jdict, dict)):
raise CfgEx('Given JSON string encodes no dictionary.')
return jdict
def create_cfg_from_file(self, url):
if url.startswith('file://'):
url = url[7:]
with open(url, 'r') as jfd:
jdict = json.load(jfd)
if (not isinstance(jdict, dict)):
raise CfgEx('Given JSON string encodes no dictionary.')
return jdict |
class LoopIR_Rewrite():
def apply_proc(self, old):
return (self.map_proc(old) or old)
def apply_fnarg(self, old):
return (self.map_fnarg(old) or old)
def apply_stmts(self, old):
if ((new := self.map_stmts(old)) is not None):
return new
return old
def apply_exprs(self, old):
if ((new := self.map_exprs(old)) is not None):
return new
return old
def apply_s(self, old):
if ((new := self.map_s(old)) is not None):
return new
return [old]
def apply_e(self, old):
return (self.map_e(old) or old)
def apply_w_access(self, old):
return (self.map_w_access(old) or old)
def apply_t(self, old):
return (self.map_t(old) or old)
def map_proc(self, p):
new_args = self._map_list(self.map_fnarg, p.args)
new_preds = self.map_exprs(p.preds)
new_body = self.map_stmts(p.body)
if any(((new_args is not None), (new_preds is not None), (new_body is not None))):
new_preds = (new_preds or p.preds)
new_preds = [p for p in new_preds if (not (isinstance(p, LoopIR.Const) and p.val))]
return p.update(args=(new_args or p.args), preds=new_preds, body=(new_body or p.body))
return None
def map_fnarg(self, a):
if (t := self.map_t(a.type)):
return a.update(type=t)
return None
def map_stmts(self, stmts):
return self._map_list(self.map_s, stmts)
def map_exprs(self, exprs):
return self._map_list(self.map_e, exprs)
def map_s(self, s):
if isinstance(s, (LoopIR.Assign, LoopIR.Reduce)):
new_type = self.map_t(s.type)
new_idx = self.map_exprs(s.idx)
new_rhs = self.map_e(s.rhs)
if any((new_type, (new_idx is not None), new_rhs)):
return [s.update(type=(new_type or s.type), idx=(new_idx or s.idx), rhs=(new_rhs or s.rhs))]
elif isinstance(s, (LoopIR.WriteConfig, LoopIR.WindowStmt)):
new_rhs = self.map_e(s.rhs)
if new_rhs:
return [s.update(rhs=(new_rhs or s.rhs))]
elif isinstance(s, LoopIR.If):
new_cond = self.map_e(s.cond)
new_body = self.map_stmts(s.body)
new_orelse = self.map_stmts(s.orelse)
if any((new_cond, (new_body is not None), (new_orelse is not None))):
return [s.update(cond=(new_cond or s.cond), body=(new_body or s.body), orelse=(new_orelse or s.orelse))]
elif isinstance(s, LoopIR.For):
new_lo = self.map_e(s.lo)
new_hi = self.map_e(s.hi)
new_body = self.map_stmts(s.body)
if any((new_lo, new_hi, (new_body is not None))):
return [s.update(lo=(new_lo or s.lo), hi=(new_hi or s.hi), body=(new_body or s.body))]
elif isinstance(s, LoopIR.Call):
new_args = self.map_exprs(s.args)
if (new_args is not None):
return [s.update(args=(new_args or s.args))]
elif isinstance(s, LoopIR.Alloc):
new_type = self.map_t(s.type)
if new_type:
return [s.update(type=(new_type or s.type))]
elif isinstance(s, LoopIR.Pass):
return None
else:
raise NotImplementedError(f'bad case {type(s)}')
return None
def map_e(self, e):
if isinstance(e, LoopIR.Read):
new_type = self.map_t(e.type)
new_idx = self.map_exprs(e.idx)
if any((new_type, (new_idx is not None))):
return e.update(idx=(new_idx or e.idx), type=(new_type or e.type))
elif isinstance(e, LoopIR.BinOp):
new_lhs = self.map_e(e.lhs)
new_rhs = self.map_e(e.rhs)
new_type = self.map_t(e.type)
if any((new_lhs, new_rhs, new_type)):
return e.update(lhs=(new_lhs or e.lhs), rhs=(new_rhs or e.rhs), type=(new_type or e.type))
elif isinstance(e, LoopIR.BuiltIn):
new_type = self.map_t(e.type)
new_args = self.map_exprs(e.args)
if any((new_type, (new_args is not None))):
return e.update(args=(new_args or e.args), type=(new_type or e.type))
elif isinstance(e, LoopIR.USub):
new_arg = self.map_e(e.arg)
new_type = self.map_t(e.type)
if any((new_arg, new_type)):
return e.update(arg=(new_arg or e.arg), type=(new_type or e.type))
elif isinstance(e, LoopIR.WindowExpr):
new_idx = self._map_list(self.map_w_access, e.idx)
new_type = self.map_t(e.type)
if any(((new_idx is not None), new_type)):
return e.update(idx=(new_idx or e.idx), type=(new_type or e.type))
elif isinstance(e, LoopIR.ReadConfig):
if (new_type := self.map_t(e.type)):
return e.update(type=(new_type or e.type))
elif isinstance(e, (LoopIR.Const, LoopIR.StrideExpr)):
return None
else:
raise NotImplementedError(f'bad case {type(e)}')
return None
def map_w_access(self, w):
if isinstance(w, LoopIR.Interval):
new_lo = self.map_e(w.lo)
new_hi = self.map_e(w.hi)
if (new_lo or new_hi):
return w.update(lo=(new_lo or w.lo), hi=(new_hi or w.hi))
elif (new_pt := self.map_e(w.pt)):
return w.update(pt=(new_pt or w.pt))
return None
def map_t(self, t):
if isinstance(t, T.Tensor):
new_hi = self.map_exprs(t.hi)
new_type = self.map_t(t.type)
if ((new_hi is not None) or new_type):
return t.update(hi=(new_hi or t.hi), type=(new_type or t.type))
elif isinstance(t, T.Window):
new_src_type = self.map_t(t.src_type)
new_as_tensor = self.map_t(t.as_tensor)
new_idx = self._map_list(self.map_w_access, t.idx)
if (new_src_type or new_as_tensor or (new_idx is not None)):
return t.update(src_type=(new_src_type or t.src_type), as_tensor=(new_as_tensor or t.as_tensor), idx=(new_idx or t.idx))
return None
def _map_list(fn, nodes):
new_stmts = []
needs_update = False
for s in nodes:
s2 = fn(s)
if (s2 is None):
new_stmts.append(s)
else:
needs_update = True
if isinstance(s2, list):
new_stmts.extend(s2)
else:
new_stmts.append(s2)
if (not needs_update):
return None
return new_stmts |
class Test_icmpv6_echo_reply(Test_icmpv6_echo_request):
def setUp(self):
self.type_ = 129
self.csum = 42098
self.buf = b'\x81\x00\xa4rv \x00\x00'
def test_default_args(self):
prev = ipv6(nxt=inet.IPPROTO_ICMPV6)
ic = icmpv6.icmpv6(type_=icmpv6.ICMPV6_ECHO_REPLY, data=icmpv6.echo())
prev.serialize(ic, None)
buf = ic.serialize(bytearray(), prev)
res = struct.unpack(icmpv6.icmpv6._PACK_STR, six.binary_type(buf[:4]))
eq_(res[0], icmpv6.ICMPV6_ECHO_REPLY)
eq_(res[1], 0)
eq_(res[2], icmpv6_csum(prev, buf))
res = struct.unpack(icmpv6.echo._PACK_STR, six.binary_type(buf[4:]))
eq_(res[0], 0)
eq_(res[1], 0) |
class DatatableQueryParametersGetter():
def __init__(self, default_columns: list[str], params: (list[str] | None)=None) -> None:
self.default_columns = default_columns
self.params = (params or [])
def __call__(self, request: Request, pagination: Pagination=Depends(get_pagination), ordering: Ordering=Depends(OrderingGetter()), columns: (str | None)=Query(None)) -> DatatableQueryParameters:
columns_list = self.default_columns
if (columns is not None):
columns_list = columns.lower().split(',')
params = {}
for param in self.params:
try:
params[param] = request.query_params[param]
except KeyError:
pass
return DatatableQueryParameters(pagination, ordering, columns_list, params) |
class OptionPlotoptionsBubbleSonificationTracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
class Features(str, Enum):
AMIIBO = 'Amiibo Supported'
DEMO = 'Demo Available'
DLC = 'DLC Available'
GAME_VOUCHER = 'Game Voucher Eligible'
ONLINE_PLAY = 'Online Play'
SAVE_DATA_CLOUD = 'Save Data Cloud Supported'
VOICE_CHAT = 'Voice Chat Supported'
def __str__(self):
return str(self.value) |
def _get_error_names(mod, type_, code):
t_name = _get_value_name(mod, type_, 'OFPET_')
if (t_name == 'Unknown'):
return ('Unknown', 'Unknown')
if (t_name == 'OFPET_FLOW_MONITOR_FAILED'):
c_name_p = 'OFPMOFC_'
else:
c_name_p = 'OFP'
for m in re.findall('_(.)', t_name):
c_name_p += m.upper()
c_name_p += 'C_'
c_name = _get_value_name(mod, code, c_name_p)
return (t_name, c_name) |
def test_adding_a_pod_affinity_rule():
config = '\npodAffinity:\n requiredDuringSchedulingIgnoredDuringExecution:\n - labelSelector:\n matchExpressions:\n - key: app\n operator: In\n values:\n - elasticsearch\n topologyKey: kubernetes.io/hostname\n'
r = helm_template(config)
assert (r['statefulset'][name]['spec']['template']['spec']['affinity']['podAffinity']['requiredDuringSchedulingIgnoredDuringExecution'][0]['topologyKey'] == 'kubernetes.io/hostname') |
class StorageNodeMonHistorySerializer(MonHistorySerializer):
zpool = s.CharField(required=True)
def validate(self, attrs):
zpool = attrs.get('zpool')
assert zpool
if (zpool in self.obj.zpools):
self.item_id = zpool
else:
raise s.ValidationError(_('Zpool not defined on compute node.'))
return attrs |
def SparseMatFromDict(nr, nc, aDict):
from . import superluWrappers
indeces = list(aDict.keys())
indeces.sort()
nnz = len(indeces)
nzval = numpy.zeros((nnz,), 'd')
rowptr = numpy.zeros(((nr + 1),), 'i')
colind = numpy.zeros((nnz,), 'i')
i = 0
k = 0
rowptr[i] = 0
for ij in indeces:
nzval[k] = aDict[ij]
colind[k] = ij[1]
if (ij[0] > i):
i += 1
rowptr[i] = k
k += 1
rowptr[(i + 1)] = k
return (SparseMat(nr, nc, nnz, nzval, colind, rowptr), nzval) |
class StackableHoleEdgeTop(StackableBaseEdge):
char = 'S'
description = 'Stackable edge with finger holes (top)'
bottom = False
def startwidth(self) -> float:
return (self.settings.thickness + self.settings.holedistance)
def __call__(self, length, **kw):
s = self.settings
self.boxes.fingerHolesAt(0, (s.holedistance + (0.5 * self.boxes.thickness)), length, 0)
super().__call__(length, **kw) |
class CampaignGroupCollaborativeAdsPartnerInfo(AbstractObject):
def __init__(self, api=None):
super(CampaignGroupCollaborativeAdsPartnerInfo, self).__init__()
self._isCampaignGroupCollaborativeAdsPartnerInfo = True
self._api = api
class Field(AbstractObject.Field):
pass
_field_types = {}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
class TestNull(util.ColorAsserts, unittest.TestCase):
def test_null_input(self):
c = Color('lch99o', [90, 50, NaN], 1)
self.assertTrue(c.is_nan('hue'))
def test_none_input(self):
c = Color('color(--lch99o 90% 0 none / 1)')
self.assertTrue(c.is_nan('hue'))
def test_near_zero_null(self):
c = Color('color(--lch99o 90% 0. 120 / 1)').convert('din99o').convert('lch99o')
self.assertTrue(c.is_nan('hue'))
def test_from_din99o(self):
c1 = Color('color(--din99o 90% 0 0)')
c2 = c1.convert('lch99o')
self.assertColorEqual(c2, Color('color(--lch99o 90% 0 0)'))
self.assertTrue(c2.is_nan('hue'))
def test_achromatic_hue(self):
for space in ('srgb', 'display-p3', 'rec2020', 'a98-rgb', 'prophoto-rgb'):
for x in range(0, 256):
color = Color('color({space} {num:f} {num:f} {num:f})'.format(space=space, num=(x / 255)))
color2 = color.convert('lch99o')
self.assertTrue(color2.is_nan('hue')) |
def _check_find_width_args(data, direction, threshold, min_width, max_width, delta):
_check_data(data)
if (direction not in Direction):
raise TypeError(f"'direction' should be a Direction object, not a {type(direction)}.")
if ((not isinstance(threshold, int)) and (not isinstance(threshold, float))):
raise TypeError(f"'threshold' should be an int or a float, not a {type(threshold)}.")
if (not isinstance(min_width, int)):
raise TypeError(f"'min_width' should be an int, not a {type(min_width)}.")
if (min_width <= 0):
raise ValueError(f"'min_width' should be bigger than 0, here it is {min_width}.")
if (max_width is not None):
if (not isinstance(max_width, int)):
raise TypeError(f"'max_width' should be an int, not a {type(max_width)}.")
if (max_width <= 0):
raise ValueError(f"'max_width' should be bigger than 0, here it is {max_width}.")
if (delta is not None):
if (not isinstance(delta, int)):
raise TypeError(f"'delta' should be an int, not a {type(delta)}.")
if (delta <= 0):
raise ValueError(f"'delta' should be bigger than 0, here it is {delta}.")
if ((max_width is None) and (delta is not None) and (min_width <= delta)):
raise ValueError("'delta' argument should be lower than 'min_width'")
if ((max_width is not None) and (delta is not None)):
_warnings.warn("'max_width' and 'delta' arguments are specified. 'delta will be ignored'") |
class TestMathOps(unittest.TestCase):
def test_math_ops(self):
solution = Solution()
self.assertRaises(TypeError, solution.insert, None)
solution.insert(5)
solution.insert(2)
solution.insert(7)
solution.insert(9)
solution.insert(9)
solution.insert(2)
solution.insert(9)
solution.insert(4)
solution.insert(3)
solution.insert(3)
solution.insert(2)
self.assertEqual(solution.max, 9)
self.assertEqual(solution.min, 2)
self.assertEqual(solution.mean, 5)
self.assertTrue((solution.mode in (2, 9)))
print('Success: test_math_ops') |
def _update_package(module, conda, installed, name):
if (not installed):
module.fail_json(msg="can't update a package that is not installed")
command = [conda, 'update', '--dry-run', name]
command = _add_channels_to_command(command, module.params['channels'])
command = _add_extras_to_command(command, module.params['extra_args'])
(rc, stdout, stderr) = module.run_command(command)
if (rc != 0):
module.fail_json(msg="can't update a package that is not installed")
if ('requested packages already installed' in stdout):
module.exit_json(changed=False, name=name)
if module.check_mode:
module.exit_json(changed=True, name=name)
command = [conda, 'update', '--yes', name]
command = _add_channels_to_command(command, module.params['channels'])
command = _add_extras_to_command(command, module.params['extra_args'])
(rc, stdout, stderr) = module.run_command(command)
if (rc != 0):
module.fail_json(msg=('failed to update package ' + name))
module.exit_json(changed=True, name=name, stdout=stdout, stderr=stderr) |
class OptionPlotoptionsParetoSonificationDefaultinstrumentoptionsMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def get_format_string_version(ghidra_analysis, key_string):
result = {}
address = ghidra_analysis.flat_api.find(key_string)
if (address is None):
logging.error('key string address not found')
return result
logging.info('found address of key string: {}'.format(address))
reference_list = ghidra_analysis.flat_api.getReferencesTo(address)
if (reference_list is None):
logging.warning('found no references to address')
return result
logging.info('found references to address:')
basic_block_list = []
for reference in set(reference_list):
logging.info(' \t{}'.format(reference))
source_addr = reference.getFromAddress()
func = ghidra_analysis.flat_api.getFunctionBefore(source_addr)
logging.info(' \tin function: {}'.format(func))
basic_block = find_basic_block_containing(ghidra_analysis, func, source_addr)
if (not basic_block):
logging.warning(' \taddress not in function -> skipping')
continue
if (basic_block in basic_block_list):
logging.info(' \tskipping duplicate basic block')
continue
basic_block_list.append(basic_block)
format_string_function_calls = get_format_string_function_calls(ghidra_analysis, basic_block)
result.setdefault(func, []).extend(format_string_function_calls)
return result |
class BotFlow():
def __init__(self, bot, name=None):
super().__init__()
self._bot = bot
self.is_activated = False
self._name = name
def name(self) -> str:
return self._name
def activate(self) -> None:
self._bot.inject_flows_from(self)
self.is_activated = True
def deactivate(self) -> None:
self._bot.remove_flows_from(self)
self.is_activated = False
def get_command(self, command_name: str):
self._bot.all_commands.get(command_name, None) |
def attach_profile_to_role(client, role_name='forrest_unicorn_role', profile_name='forrest_unicorn_profile'):
current_instance_profiles = resource_action(client, action='list_instance_profiles_for_role', log_format='Found Instance Profiles for %(RoleName)s.', RoleName=role_name)['InstanceProfiles']
for profile in current_instance_profiles:
if (profile['InstanceProfileName'] == profile_name):
LOG.info('Found Instance Profile attached to Role: %s -> %s', profile_name, role_name)
break
else:
for remove_profile in current_instance_profiles:
resource_action(client, action='remove_role_from_instance_profile', log_format='Removed Instance Profile from Role: %(InstanceProfileName)s -> %(RoleName)s', InstanceProfileName=remove_profile['InstanceProfileName'], RoleName=role_name)
resource_action(client, action='add_role_to_instance_profile', log_format='Added Instance Profile to Role: %(InstanceProfileName)s -> %(RoleName)s', InstanceProfileName=profile_name, RoleName=role_name)
return True |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.