code stringlengths 281 23.7M |
|---|
class OrderBase(OrderOrSelection):
def __init__(self, kwargs, remapping):
self.actions = self.build_actions(kwargs)
self.remapping = remapping
def build_actions(self, kwargs):
raise NotImplementedError()
def compare_elements(self, a, b):
assert callable(self.remapping), (type(self.remapping), self.remapping)
a_metadata = self.remapping(a.metadata)
b_metadata = self.remapping(b.metadata)
for (k, v) in self.actions.items():
n = v(a_metadata(k), b_metadata(k))
if (n != 0):
return n
return 0 |
_registry.register_policy
class EAIPolicy(NetPolicy):
def __init__(self, observation_space: spaces.Dict, action_space, input_image_size, backbone_config, hidden_size: int=512, rnn_type: str='GRU', num_recurrent_layers: int=1, use_augmentations: bool=False, use_augmentations_test_time: bool=False, run_type: str='train', freeze_backbone: bool=False, freeze_batchnorm: bool=False, global_pool: bool=False, use_cls: bool=False, policy_config: DictConfig=None, aux_loss_config: Optional[DictConfig]=None, **kwargs):
super().__init__(EAINet(observation_space=observation_space, action_space=action_space, input_image_size=input_image_size, backbone_config=backbone_config, hidden_size=hidden_size, rnn_type=rnn_type, num_recurrent_layers=num_recurrent_layers, use_augmentations=use_augmentations, use_augmentations_test_time=use_augmentations_test_time, run_type=run_type, freeze_backbone=freeze_backbone, freeze_batchnorm=freeze_batchnorm, global_pool=global_pool, use_cls=use_cls), action_space=action_space, policy_config=policy_config, aux_loss_config=aux_loss_config)
def from_config(cls, config: DictConfig, observation_space: spaces.Dict, action_space, **kwargs):
return cls(observation_space=observation_space, action_space=action_space, hidden_size=config.habitat_baselines.rl.policy.hidden_size, rnn_type=config.habitat_baselines.rl.policy.rnn_type, num_recurrent_layers=config.habitat_baselines.rl.policy.num_recurrent_layers, backbone_config=config.model, freeze_backbone=config.habitat_baselines.rl.policy.freeze_backbone, freeze_batchnorm=config.habitat_baselines.rl.policy.freeze_backbone, aux_loss_config=config.habitat_baselines.rl.auxiliary_losses, input_image_size=config.habitat_baselines.rl.policy.input_image_size, use_augmentations=config.habitat_baselines.rl.policy.use_augmentations, use_augmentations_test_time=config.habitat_baselines.rl.policy.use_augmentations_test_time, run_type=config.RUN_TYPE, policy_config=config.habitat_baselines.rl.policy, global_pool=config.habitat_baselines.rl.policy.global_pool, use_cls=config.habitat_baselines.rl.policy.use_cls) |
class OptionPlotoptionsTimelineSonificationTracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def test_flows_stop(testbot):
assert ('c' in testbot.exec_command('!c'))
flow_message = testbot.bot.pop_message()
assert ('You are in the flow w2' in flow_message)
assert ('w2 stopped' in testbot.exec_command('!flows stop w2'))
assert (len(testbot.bot.flow_executor.in_flight) == 0) |
def extractPathOfTranslation(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if item['title'].startswith('Path of Discord Episode'):
return None
snames = ["Emperor's Domination", 'Martial God Realm', 'Big Life', "I'm a Villain", 'Grasping Evil', 'The Human Emperor', "Post-80's Cultivation Journal"]
tlut = {tmp.lower(): tmp for tmp in snames}
ltags = [tmp.lower() for tmp in item['tags']]
for (key, value) in tlut.items():
if (key in ltags):
return buildReleaseMessageWithType(item, value, vol, chp, frag=frag, postfix=postfix)
chp_prefixes = [("He's the Legendary Guard, Isn't He?", True), ("Black Iron's Glory", True), ('Spiritual furnace', True), ('Possessing Nothing', True), ('Lord of the Star Ocean', True), ('The Ancestor of our Sect Isnt Acting like an Elder', True), ('Dragon-Marked War God', False), ('The Daoist Seal', True), ('Eternal Life', True), ('When God Made Me', True), ('Big Life', True), ('Deva Wizard', True), ('Urban Banished Immortal', True), ('The Prodigy Daughter Of The Medicine God', True), ('Emperor of Tomorrow', True), ('ID The Greatest Fusion Fantasy', True), ('God Hunter', True), ('Immortal', True), ('Martial Emperor Reborn ', True), ('Martial God Conqueror', True), ('My Wife Is a Beautiful CEO', True), ('World Defying Dan God', True), ('Game Market 1983', False), ('Spirit Vessel', False), ('Instant Kill', False), ('My Daoist Life', False), ('Tales of the Reincarnated Lord', False), ('Cohen of the Rebellion', False), ('Post-80s Cultivation Journal', False), ('Immortal', False), ('Everlasting Immortal Firmament -', False), ('The Great Game', False), ('Grasping Evil', False), ('My Cold and Beautiful Wife', False), ('The Daoist Seal', False)]
for (series, require_chp) in chp_prefixes:
if (item['title'].lower().startswith(series.lower()) and ((not require_chp) or ('chapter' in item['title'].lower()))):
return buildReleaseMessageWithType(item, series, vol, chp, frag=frag, postfix=postfix)
return False |
def analyze_called_function(ghidra_analysis, func, index, prev):
result = []
high_func = decompile_function(ghidra_analysis, func)
pcode_ops = high_func.getPcodeOps()
for pcode_op in iter_array(pcode_ops, ghidra_analysis.monitor):
if (pcode_op.getOpcode() != PcodeOp.RETURN):
continue
return_value = pcode_op.getInput(1)
if (return_value is None):
print('--> Could not resolve return value from {}'.format(func.getName()))
continue
pc_address = pcode_op.getSeqnum().getTarget()
(_, sources_pcode_ops) = get_call_site_pcode_ops(ghidra_analysis, func)
relevant_sources = get_relevant_sources(ghidra_analysis, func, pc_address, sources_pcode_ops)
result.extend(process_one_varnode(ghidra_analysis, func, index, return_value, relevant_sources, prev))
return result |
class TransformTest(unittest.TestCase):
def test_constant(self):
event = Event.sequence(array).constant(42)
self.assertEqual(event.run(), ([42] * len(array)))
def test_previous(self):
event = Event.sequence(array).previous(2)
self.assertEqual(event.run(), array[:(- 2)])
def test_iterate(self):
event = Event.sequence(array).iterate([5, 4, 3, 2, 1])
self.assertEqual(event.run(), [5, 4, 3, 2, 1])
def test_count(self):
s = 'abcdefghij'
event = Event.sequence(s).count()
self.assertEqual(event.run(), array[:len(s)])
def test_enumerate(self):
s = 'abcdefghij'
event = Event.sequence(s).enumerate()
self.assertEqual(event.run(), list(enumerate(s)))
def test_timestamp(self):
interval = 0.002
event = Event.sequence(array, interval=interval).timestamp()
times = event.pluck(0).run()
std = np.std((np.diff(times) - interval))
self.assertLess(std, interval)
def test_partial(self):
event = Event.sequence(array).partial(42)
self.assertEqual(event.run(), [(42, i) for i in array])
def test_partial_right(self):
event = Event.sequence(array).partial_right(42)
self.assertEqual(event.run(), [(i, 42) for i in array])
def test_star(self):
def f(i, j):
r.append((i, j))
r = []
event = Event.sequence(array).map((lambda i: (i, i))).star().connect(f)
self.assertEqual(event.run(), r)
def test_pack(self):
event = Event.sequence(array).pack()
self.assertEqual(event.run(), [(i,) for i in array])
def test_pluck(self):
Person = namedtuple('Person', 'name address')
Address = namedtuple('Address', 'city street number zipcode')
data = [Person('Max', Address('Delft', 'Levelstreet', '3', '2333AS')), Person('Elena', Address('Leiden', 'Punt', '122', '2412DE')), Person('Fem', Address('Rotterdam', 'Burgundy', '12', '3001RT'))]
def event():
return Event.sequence(data)
self.assertEqual(event().pluck('0.name', '.address.street').run(), [(d.name, d.address.street) for d in data])
def test_sync_map(self):
event = Event.sequence(array).map((lambda x: (x * x)))
self.assertEqual(event.run(), [(i * i) for i in array])
def test_sync_star_map(self):
event = Event.sequence(array)
event = event.map((lambda i: (i, i))).star().map((lambda x, y: ((x / 2) - y)))
self.assertEqual(event.run(), [((x / 2) - y) for (x, y) in zip(array, array)])
def test_async_map(self):
async def coro(x):
(await asyncio.sleep(0.1))
return (x * x)
event = Event.sequence(array).map(coro)
self.assertEqual(event.run(), [(i * i) for i in array])
def test_async_map_unordered(self):
class A():
def __init__(self):
self.t = 0.1
async def coro(self, x):
self.t -= 0.01
(await asyncio.sleep(self.t))
return (x * x)
a = A()
event = Event.range(10).map(a.coro, ordered=False)
result = set(event.run())
expected = set(((i * i) for i in reversed(range(10))))
self.assertEqual(result, expected)
def test_mergemap(self):
marbles = ['A B C D', '_1 2 3 4', '__K L M N']
event = Event.range(3).mergemap((lambda v: Event.marble(marbles[v])))
self.assertEqual(event.run(), ['A', '1', 'K', 'B', '2', 'L', '3', 'C', 'M', '4', 'D', 'N'])
def test_mergemap2(self):
a = ['ABC', 'UVW', 'XYZ']
event = Event.range(3, interval=0.01).mergemap((lambda v: Event.sequence(a[v], (0.05 * v))))
self.assertEqual(event.run(), ['A', 'B', 'C', 'U', 'X', 'V', 'W', 'Y', 'Z'])
def test_concatmap(self):
marbles = ['A B C D', '_ 1 2 3 4', '__ K L M N']
event = Event.range(3).concatmap((lambda v: Event.marble(marbles[v])))
self.assertEqual(event.run(), ['A', 'B', '1', '2', '3', 'K', 'L', 'M', 'N'])
def test_chainmap(self):
marbles = ['A B C D ', '_ 1 2 3 4', '__ K L M N']
event = Event.range(3).chainmap((lambda v: Event.marble(marbles[v])))
self.assertEqual(event.run(), ['A', 'B', 'C', 'D', '1', '2', '3', '4', 'K', 'L', 'M', 'N'])
def test_switchmap(self):
marbles = ['A B C D ', '_ K L M N', '__ 1 2 3 4']
event = Event.range(3).switchmap((lambda v: Event.marble(marbles[v])))
self.assertEqual(event.run(), ['A', 'B', '1', '2', 'K', 'L', 'M', 'N']) |
def extractMysakuratranslationsOrg(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def is_data_dir_initialized(trinity_config: TrinityConfig) -> bool:
if (not os.path.exists(trinity_config.data_dir)):
return False
if (not os.path.exists(trinity_config.pid_dir)):
return False
if (not os.path.exists(trinity_config.ipc_dir)):
return False
if (not os.path.exists(trinity_config.enr_db_dir)):
return False
if (not trinity_config.logfile_path.parent.exists()):
return False
elif (not trinity_config.logfile_path.exists()):
return False
if (trinity_config.nodekey_path is None):
pass
elif (not os.path.exists(trinity_config.nodekey_path)):
return False
if (trinity_config.nodekey is None):
return False
return True |
def main(args=None):
check_python_version()
parser = Parser()
if (args is None):
args = sys.argv[1:]
try:
(cmd_name, cmd_args) = parser.parseopts(args)
except TodoError as err:
sys.stderr.write('{}\n'.format(err))
sys.exit(1)
return commands_dict[cmd_name]() |
class FailmanagerComplex(unittest.TestCase):
def setUp(self):
super(FailmanagerComplex, self).setUp()
self.__failManager = FailManager()
self.__saved_ll = failmanager.logLevel
failmanager.logLevel = 3
def tearDown(self):
super(FailmanagerComplex, self).tearDown()
failmanager.logLevel = self.__saved_ll
def _ip_range(maxips):
class _ip(list):
def __str__(self):
return '.'.join(map(str, self))
def __repr__(self):
return str(self)
def __key__(self):
return str(self)
def __hash__(self):
return int(((((self[0] << 24) | (self[1] << 16)) | (self[2] << 8)) | self[3]))
i = 0
c = [127, 0, 0, 0]
while (i < maxips):
for n in range(3, 0, (- 1)):
if (c[n] < 255):
c[n] += 1
break
c[n] = 0
(yield (i, _ip(c)))
i += 1
def testCheckIPGenerator(self):
for (i, ip) in self._ip_range((65536 if (not unittest.F2B.fast) else 1000)):
if (i == 254):
self.assertEqual(str(ip), '127.0.0.255')
elif (i == 255):
self.assertEqual(str(ip), '127.0.1.0')
elif (i == 1000):
self.assertEqual(str(ip), '127.0.3.233')
elif (i == 65534):
self.assertEqual(str(ip), '127.0.255.255')
elif (i == 65535):
self.assertEqual(str(ip), '127.1.0.0') |
class FxTask():
def __init__(self, config, network):
self.config = config
self.network = network
self.ccy = self.get_currency()
self.fetch_history = False
self.refresh_event = app_state.async_.event()
self.history_used_spot = False
self.ccy_combo = None
self.hist_checkbox = None
self.cache_dir = os.path.join(config.path, 'cache')
self.set_exchange(self.config_exchange())
if (not os.path.exists(self.cache_dir)):
os.mkdir(self.cache_dir)
def get_currencies(self):
h = self.get_history_config()
d = get_exchanges_by_ccy(h)
return sorted(d.keys())
def get_exchanges_by_ccy(self, ccy, h):
d = get_exchanges_by_ccy(h)
return d.get(ccy, [])
def ccy_amount_str(self, amount, commas, default_prec=2):
prec = CCY_PRECISIONS.get(self.ccy, default_prec)
fmt_str = ('{:%s.%df}' % ((',' if commas else ''), max(0, prec)))
try:
rounded_amount = round(amount, prec)
except decimal.InvalidOperation:
rounded_amount = amount
return fmt_str.format(rounded_amount)
async def refresh_loop(self):
while True:
async with ignore_after(150):
(await self.refresh_event.wait())
self.refresh_event.clear()
if (not self.is_enabled()):
continue
if (self.fetch_history and self.show_history()):
self.fetch_history = False
(await self.exchange.get_historical_rates(self.ccy, self.cache_dir))
if self.network:
self.network.trigger_callback(NetworkEventNames.HISTORICAL_EXCHANGE_RATES)
(await self.exchange.update(self.ccy))
if self.network:
self.network.trigger_callback(NetworkEventNames.EXCHANGE_RATE_QUOTES)
def is_enabled(self):
return bool(self.config.get('use_exchange_rate'))
def set_enabled(self, enabled):
return self.config.set_key('use_exchange_rate', enabled)
def get_history_config(self):
return bool(self.config.get('history_rates'))
def set_history_config(self, enabled):
self.config.set_key('history_rates', enabled)
if (self.is_enabled() and enabled):
self.trigger_history_refresh()
def get_fiat_address_config(self):
return bool(self.config.get('fiat_address'))
def set_fiat_address_config(self, b):
self.config.set_key('fiat_address', b)
def get_currency(self):
return self.config.get('currency', 'EUR')
def config_exchange(self):
return self.config.get('use_exchange', 'CoinGecko')
def show_history(self):
return (self.is_enabled() and self.get_history_config() and (self.ccy in self.exchange.history_ccys()))
def trigger_history_refresh(self):
self.fetch_history = True
self.refresh_event.set()
def set_currency(self, ccy):
if (self.get_currency() != ccy):
self.ccy = ccy
self.config.set_key('currency', ccy, True)
self.trigger_history_refresh()
def set_exchange(self, name):
class_ = globals().get(name, CoinGecko)
logger.debug('using exchange %s', name)
if (self.config_exchange() != name):
self.config.set_key('use_exchange', name, True)
self.exchange = class_()
self.trigger_history_refresh()
def exchange_rate(self):
rate = self.exchange.quotes.get(self.ccy)
if rate:
return Decimal(rate)
def format_amount_and_units(self, btc_balance):
amount_str = self.format_amount(btc_balance)
return ('' if (not amount_str) else ('%s %s' % (amount_str, self.ccy)))
def format_amount(self, btc_balance):
rate = self.exchange_rate()
return ('' if (rate is None) else self.value_str(btc_balance, rate))
def get_fiat_status(self, btc_balance, base_unit, decimal_point):
rate = self.exchange_rate()
if (rate is None):
return (None, None)
default_prec = 2
if (base_unit == 'bits'):
default_prec = 4
bitcoin_value = f'1 {base_unit}'
fiat_value = (f'{self.value_str((COIN / (10 ** (8 - decimal_point))), rate, default_prec)} ' + f'{self.ccy}')
return (bitcoin_value, fiat_value)
def value_str(self, satoshis, rate, default_prec=2):
if (satoshis is None):
return _('Unknown')
if rate:
value = ((Decimal(satoshis) / COIN) * Decimal(rate))
return ('%s' % self.ccy_amount_str(value, True, default_prec))
return _('No data')
def history_rate(self, d_t):
rate = self.exchange.historical_rate(self.ccy, d_t)
if ((rate is None) and ((datetime.datetime.today().date() - d_t.date()).days <= 2)):
rate = self.exchange.quotes.get(self.ccy)
self.history_used_spot = True
return (Decimal(rate) if (rate is not None) else None)
def historical_value_str(self, satoshis, d_t):
rate = self.history_rate(d_t)
return self.value_str(satoshis, rate)
def historical_value(self, satoshis, d_t):
rate = self.history_rate(d_t)
if rate:
return ((Decimal(satoshis) / COIN) * Decimal(rate))
def timestamp_rate(self, timestamp):
from .util import timestamp_to_datetime
date = timestamp_to_datetime(timestamp)
return self.history_rate(date) |
class OptionSeriesPyramid3dSonificationDefaultspeechoptionsMapping(Options):
def pitch(self) -> 'OptionSeriesPyramid3dSonificationDefaultspeechoptionsMappingPitch':
return self._config_sub_data('pitch', OptionSeriesPyramid3dSonificationDefaultspeechoptionsMappingPitch)
def playDelay(self) -> 'OptionSeriesPyramid3dSonificationDefaultspeechoptionsMappingPlaydelay':
return self._config_sub_data('playDelay', OptionSeriesPyramid3dSonificationDefaultspeechoptionsMappingPlaydelay)
def rate(self) -> 'OptionSeriesPyramid3dSonificationDefaultspeechoptionsMappingRate':
return self._config_sub_data('rate', OptionSeriesPyramid3dSonificationDefaultspeechoptionsMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionSeriesPyramid3dSonificationDefaultspeechoptionsMappingTime':
return self._config_sub_data('time', OptionSeriesPyramid3dSonificationDefaultspeechoptionsMappingTime)
def volume(self) -> 'OptionSeriesPyramid3dSonificationDefaultspeechoptionsMappingVolume':
return self._config_sub_data('volume', OptionSeriesPyramid3dSonificationDefaultspeechoptionsMappingVolume) |
class C3Stanford(OptChart.OptionsChart):
def axis(self) -> OptionAxis:
return self._config_sub_data('axis', OptionAxis)
def point(self) -> OptionsPoints:
return self._config_sub_data('point', OptionsPoints)
def data(self) -> C3StanfordData:
return self._config_sub_data('data', C3StanfordData)
def grid(self) -> OptionsGrid:
return self._config_sub_data('grid', OptionsGrid)
def zoom(self) -> OptionsZoom:
return self._config_sub_data('zoom', OptionsZoom) |
class MockEdge():
BasicBlock = namedtuple('BasicBlock', ['index', 'source_block'])
fakeblock = namedtuple('fakeblock', 'start')
def __init__(self, source: int, target: int, _type: BranchType):
self.source = self.BasicBlock(index=source, source_block=self.fakeblock(start=source))
self.target = self.BasicBlock(index=target, source_block=self.fakeblock(start=target))
self.type = _type |
class TestAdobeASCII85(TestUnpackerBase):
def test_unpacker_selection_generic(self):
self.check_unpacker_selection('firmware/adobe85', 'Adobe ASCII85')
def test_extraction(self):
(files, meta_data) = self.unpacker.extract_files_from_file(Path(TEST_DATA_DIR, 'testfile.adobe85'), self.tmp_dir.name)
assert (len(files) == 1)
content = Path(files[0]).read_bytes()
assert (b'test for a FACT plugin' in content)
assert ('Success' in meta_data['output'])
('base64.a85decode', i_always_crash)
def test_extraction_decoding_error():
file_path = Path(TEST_DATA_DIR, 'testfile.adobe85')
with TemporaryDirectory() as tmp_dir:
meta_data = unpack_function(file_path, tmp_dir)
assert ('Unknown' in meta_data['output'])
('pathlib.Path.open', i_always_crash_file_not_found)
def test_extraction_filenotfound_error():
file_path = Path(TEST_DATA_DIR, 'testfile2.adobe85')
with TemporaryDirectory() as tmp_dir:
meta_data = unpack_function(file_path, tmp_dir)
assert ('Failed to open file' in meta_data['output']) |
def test_dolt_table_read_task_config(db, dolt_config):
def my_dolt(t: DoltTable) -> str:
df = t.data
return df.name.values[0]
def my_table() -> DoltTable:
dolt_config.tablename = 'bar'
t = DoltTable(config=dolt_config)
return t
def my_wf() -> str:
t = my_table()
return my_dolt(t=t)
x = my_wf()
assert (x == 'Dilly') |
def __stopOnIOError(logSys=None, logHndlr=None):
if (logSys and len(logSys.handlers)):
logSys.removeHandler(logSys.handlers[0])
if logHndlr:
logHndlr.close = (lambda : None)
logging.StreamHandler.flush = (lambda self: None)
if logging.exitOnIOError:
try:
sys.stderr.close()
except:
pass
sys.exit(0) |
class TestIntegerField(FieldValues):
valid_inputs = {'1': 1, '0': 0, 1: 1, 0: 0, 1.0: 1, 0.0: 0, '1.0': 1}
invalid_inputs = {0.5: ['A valid integer is required.'], 'abc': ['A valid integer is required.'], '0.5': ['A valid integer is required.']}
outputs = {'1': 1, '0': 0, 1: 1, 0: 0, 1.0: 1, 0.0: 0}
field = serializers.IntegerField() |
def test_incorrect_values_in_forward_init_file_fails(tmp_path):
(tmp_path / 'forward_init_1').write_text('incorrect', encoding='utf-8')
with pytest.raises(ValueError, match=f"{(tmp_path / 'forward_init_1')} did not contain numbers, got object"):
GenKwConfig('GEN_KW', True, None, None, [], str((tmp_path / 'forward_init_%d'))).read_from_runpath(tmp_path, 1) |
class GlobalNamespace():
_request_uuid = contextvars.ContextVar('request_uuid', default='N/A')
_api_captured_time_ms = contextvars.ContextVar('api_captured_time_ms', default=CapturedTimeMS())
def set_request_uuid(cls, uuid: str) -> None:
cls._request_uuid.set(uuid)
def get_request_uuid(cls) -> str:
return cls._request_uuid.get()
def set_api_captured_time_ms(cls, api_captured_time_ms: CapturedTimeMS) -> None:
cls._api_captured_time_ms.set(api_captured_time_ms)
def get_api_captured_time_ms(cls) -> CapturedTimeMS:
return cls._api_captured_time_ms.get() |
class AnyReply(base_tests.SimpleDataPlane):
def runTest(self):
request = ofp.message.nicira_controller_role_request(role=NX_ROLE_MASTER)
(response, pkt) = self.controller.transact(request)
self.assertTrue((response is not None), 'No reply to Nicira role request')
if isinstance(response, ofp.message.nicira_controller_role_reply):
logging.info('Role reply received')
logging.info(response.show())
self.assertEquals(response.role, NX_ROLE_MASTER)
elif isinstance(response, ofp.message.bad_request_error_msg):
logging.info('Error message received')
logging.info(response.show())
self.assertEquals(response.code, ofp.OFPBRC_BAD_VENDOR)
else:
raise AssertionError('Unexpected reply type') |
class OptionSeriesHistogramSonificationDefaultinstrumentoptions(Options):
def activeWhen(self) -> 'OptionSeriesHistogramSonificationDefaultinstrumentoptionsActivewhen':
return self._config_sub_data('activeWhen', OptionSeriesHistogramSonificationDefaultinstrumentoptionsActivewhen)
def instrument(self):
return self._config_get('piano')
def instrument(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionSeriesHistogramSonificationDefaultinstrumentoptionsMapping':
return self._config_sub_data('mapping', OptionSeriesHistogramSonificationDefaultinstrumentoptionsMapping)
def midiName(self):
return self._config_get(None)
def midiName(self, text: str):
self._config(text, js_type=False)
def pointGrouping(self) -> 'OptionSeriesHistogramSonificationDefaultinstrumentoptionsPointgrouping':
return self._config_sub_data('pointGrouping', OptionSeriesHistogramSonificationDefaultinstrumentoptionsPointgrouping)
def roundToMusicalNotes(self):
return self._config_get(True)
def roundToMusicalNotes(self, flag: bool):
self._config(flag, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def type(self):
return self._config_get('instrument')
def type(self, text: str):
self._config(text, js_type=False) |
class ServiceMappingPluginView(ModelViewSet):
queryset = ServiceMappingPluginModel.objects.all()
serializer_class = ServiceMappingPluginSerializer
response_data = dict()
def list(self, request, *args, **kwargs):
return super().list(self, request, args, kwargs)
def create(self, request, *args, **kwargs):
return super().create(request, *args, **kwargs)
def retrieve(self, request, *args, **kwargs):
return super().retrieve(request, *args, **kwargs)
def update(self, request, *args, **kwargs):
return super().update(request, *args, **kwargs)
def destroy(self, request, *args, **kwargs):
self_object = self.get_object()
file = self_object.pluginFile
if file:
file_folder = os.path.join(settings.PLUGIN_ROOT, self_object.name)
shutil.rmtree(file_folder)
file.delete()
super().destroy(self, request, args, kwargs)
self.response_data['status'] = PluginOperationStatus.DELETE
return JsonResponse(self.response_data, status=200)
(detail=False, methods=['get'], url_path='download/(?P<name>(.)*)/(?P<filename>(.)*)')
def download(self, request, *args, **kwargs):
try:
plugin_obj = ServiceMappingPluginModel.objects.get(name=kwargs['name'])
with plugin_obj.pluginFile.open() as f:
response = HttpResponse(f.read(), content_type='application/zip')
response['Content-Disposition'] = ('inline; filename=' + kwargs['filename'])
return response
except IOError:
raise Http404 |
class Ebml():
def __init__(self, location, tags):
self.tags = tags
self.open(location)
def __del__(self):
self.close()
def open(self, location):
self.file = f = open(location, 'rb')
f = self.file
f.seek(0, 2)
self.size = f.tell()
f.seek(0, 0)
def seek(self, offset, mode):
self.file.seek(offset, mode)
def tell(self):
return self.file.tell()
def read(self, length):
return self.file.read(length)
def close(self):
self.file.close()
def readID(self):
b = self.read(1)
b1 = ord(b)
if (b1 & 128):
return b
elif (b1 & 64):
return unpack('>H', (b + self.read(1)))[0]
elif (b1 & 32):
return unpack('>L', ((b'\x00' + b) + self.read(2)))[0]
elif (b1 & 16):
return unpack('>L', (b + self.read(3)))[0]
else:
raise EbmlException(('invalid element ID (leading byte 0x%02X)' % b1))
def readSize(self):
b1 = ord(self.read(1))
if (b1 & 128):
return (b1 & 127)
elif (b1 & 64):
return unpack('>H', (bchr((b1 & 63)) + self.read(1)))[0]
elif (b1 & 32):
return unpack('>L', ((b'\x00' + bchr((b1 & 31))) + self.read(2)))[0]
elif (b1 & 16):
return unpack('>L', (bchr((b1 & 15)) + self.read(3)))[0]
elif (b1 & 4096):
return unpack('>Q', ((b'\x00\x00\x00' + bchr((b1 & 7))) + self.read(4)))[0]
elif (b1 & 4):
return unpack('>Q', ((b'\x00\x00' + bchr((b1 & 3))) + self.read(5)))[0]
elif (b1 & 2):
return unpack('>Q', ((b'\x00' + bchr((b1 & 1))) + self.read(6)))[0]
elif (b1 & 1):
return unpack('>Q', (b'\x00' + self.read(7)))[0]
else:
assert (b1 == 0)
raise EbmlException('undefined element size')
def readInteger(self, length, signed):
if (length == 1):
value = ord(self.read(1))
elif (length == 2):
value = unpack('>H', self.read(2))[0]
elif (length == 3):
value = unpack('>L', (b'\x00' + self.read(3)))[0]
elif (length == 4):
value = unpack('>L', self.read(4))[0]
elif (length == 5):
value = unpack('>Q', (b'\x00\x00\x00' + self.read(5)))[0]
elif (length == 6):
value = unpack('>Q', (b'\x00\x00' + self.read(6)))[0]
elif (length == 7):
value = unpack('>Q', (b'\x00' + self.read(7)))[0]
elif (length == 8):
value = unpack('>Q', self.read(8))[0]
else:
raise EbmlException(("don't know how to read %r-byte integer" % length))
if signed:
nbits = ((8 - length) + (8 * (length - 1)))
if (value >= (1 << (nbits - 1))):
value -= (1 << nbits)
return value
def readFloat(self, length):
if (length == 4):
return unpack('>f', self.read(4))[0]
elif (length == 8):
return unpack('>d', self.read(8))[0]
else:
raise EbmlException(("don't know how to read %r-byte float" % length))
def parse(self, from_=0, to=None):
if (to is None):
to = self.size
self.seek(from_, 0)
node = {}
while (self.tell() < to):
try:
id = self.readID()
except EbmlException as e:
warn(EbmlWarning(e))
return node
size = self.readSize()
if (size == 127):
warn(EbmlWarning("don't know how to handle unknown-sized element"))
size = (to - self.tell())
try:
(key, type_) = self.tags[id]
except KeyError:
self.seek(size, 1)
continue
try:
if (type_ is SINT):
value = self.readInteger(size, True)
elif (type_ is UINT):
value = self.readInteger(size, False)
elif (type_ is FLOAT):
value = self.readFloat(size)
elif (type_ is STRING):
value = self.read(size).decode('ascii')
elif (type_ is UTF8):
value = self.read(size).decode('utf-8')
elif (type_ is DATE):
us = (self.readInteger(size, True) / 1000.0)
from datetime import datetime, timedelta
value = (datetime(2001, 1, 1) + timedelta(microseconds=us))
elif (type_ is MASTER):
tell = self.tell()
value = self.parse(tell, (tell + size))
elif (type_ is BINARY):
value = BinaryData(self.read(size))
else:
assert False, type_
except (EbmlException, UnicodeDecodeError) as e:
warn(EbmlWarning(e))
else:
try:
parentval = node[key]
except KeyError:
parentval = node[key] = []
parentval.append(value)
return node |
def extractElementalCobalt(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if (item['title'].lower().startswith('arifureta chapter') or ('Arifureta Translation' in item['tags'])):
return buildReleaseMessageWithType(item, 'Arifureta Shokugyou de Sekai Saikyou', vol, chp, frag=frag, postfix=postfix)
if item['title'].lower().startswith('reincarnated as a villager strongest slow-life'):
return buildReleaseMessageWithType(item, 'Reincarnated as a Villager ~ Strongest Slow-life', vol, chp, frag=frag, postfix=postfix)
if item['title'].lower().startswith('requiem to the stars'):
return buildReleaseMessageWithType(item, 'Requiem to the Stars', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if ('Hawtness' in item['tags']):
return buildReleaseMessageWithType(item, 'Hawtness', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if ('Time and Place' in item['tags']):
return buildReleaseMessageWithType(item, 'Time and Place', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if ('Tales of an Enchantress' in item['tags']):
return buildReleaseMessageWithType(item, 'Tales of an Enchantress', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False |
def filter_limit_to_closed_periods(submission_query_path: str='') -> Q:
q = Q()
for sub in final_submissions_for_all_fy():
q |= ((Q(**{f'{submission_query_path}submission__reporting_fiscal_year': sub.fiscal_year}) & Q(**{f'{submission_query_path}submission__quarter_format_flag': sub.is_quarter})) & Q(**{f'{submission_query_path}submission__reporting_fiscal_period__lte': sub.fiscal_period}))
if (not q):
q = Q(pk__isnull=True)
return q |
class vetor():
def __init__(self, x, y):
self.x = x
self.y = y
def __add__(self, vetor):
return ((self.x + vetor.x), (self.y + vetor.y))
def __mul__(self, n):
return ((self.x * n), (self.y * n))
def __rmul__(self, n):
return ((self.x * n), (self.y * n)) |
class ModelNotInLocal(object):
def __init__(self, model_id):
self.model_id = model_id
def echo(self):
echo('Model {0} could not be found in local device'.format(self.model_id), fg='red')
echo('Please fetch the model from the Ersilia Model Hub: ersilia fetch {0}'.format(self.model_id))
sys.exit(0) |
def _add_preset_from_firmware(plugin_dict, fw: Firmware):
preset_name = fw.get_hid()
previously_processed_plugins = list(fw.processed_analysis.keys())
with suppress(ValueError):
plugin_dict.pop('unpacker')
previously_processed_plugins.remove('unpacker')
for plugin in previously_processed_plugins:
if (plugin in plugin_dict):
plugin_dict[plugin][2][preset_name] = True
else:
logging.warning(f'Previously used analysis plugin {plugin} not found for update preset')
return preset_name |
class FaucetConfigReloadTestBase(FaucetTest):
N_UNTAGGED = 4
N_TAGGED = 0
LINKS_PER_HOST = 1
CONFIG_GLOBAL = '\nvlans:\n 100:\n description: "untagged"\n 200:\n description: "untagged"\n'
CONFIG = '\n interfaces:\n %(port_1)d:\n native_vlan: 100\n acl_in: allow\n %(port_2)d:\n native_vlan: 100\n %(port_3)d:\n native_vlan: 100\n %(port_4)d:\n native_vlan: 100\n tagged_vlans: [200]\n'
ACL = '\nacls:\n 1:\n - rule:\n description: "rule 1"\n cookie: COOKIE\n dl_type: 0x800\n ip_proto: 6\n tcp_dst: 5001\n actions:\n allow: 0\n - rule:\n cookie: COOKIE\n dl_type: 0x800\n ip_proto: 6\n tcp_dst: 5002\n actions:\n allow: 1\n - rule:\n cookie: COOKIE\n actions:\n allow: 1\n 2:\n - rule:\n cookie: COOKIE\n dl_type: 0x800\n ip_proto: 6\n tcp_dst: 5001\n actions:\n allow: 1\n - rule:\n cookie: COOKIE\n dl_type: 0x800\n ip_proto: 6\n tcp_dst: 5002\n actions:\n allow: 0\n - rule:\n cookie: COOKIE\n actions:\n allow: 1\n 3:\n - rule:\n cookie: COOKIE\n dl_type: 0x800\n ip_proto: 6\n tcp_dst: 5003\n actions:\n allow: 0\n 4:\n - rule:\n cookie: COOKIE\n dl_type: 0x800\n ip_proto: 6\n tcp_dst: 5002\n actions:\n allow: 1\n - rule:\n cookie: COOKIE\n dl_type: 0x800\n ip_proto: 6\n tcp_dst: 5001\n actions:\n allow: 0\n deny:\n - rule:\n cookie: COOKIE\n dl_type: 0x800\n ip_proto: 6\n tcp_dst: 65535\n actions:\n allow: 0\n - rule:\n cookie: COOKIE\n actions:\n allow: 0\n allow:\n - rule:\n cookie: COOKIE\n dl_type: 0x800\n ip_proto: 6\n tcp_dst: 65535\n actions:\n allow: 1\n - rule:\n cookie: COOKIE\n actions:\n allow: 1\n'
ACL_COOKIE = None
def setUp(self):
super().setUp()
self.ACL_COOKIE = random.randint(1, ((2 ** 16) - 1))
self.ACL = self.ACL.replace('COOKIE', str(self.ACL_COOKIE))
self.acl_config_file = ('%s/acl.yaml' % self.tmpdir)
with open(self.acl_config_file, 'w', encoding='utf-8') as config_file:
config_file.write(self.ACL)
self.CONFIG = '\n'.join((self.CONFIG, ('include:\n - %s' % self.acl_config_file)))
self.topo = self.topo_class(self.OVS_TYPE, self.ports_sock, self._test_name(), [self.dpid], n_tagged=self.N_TAGGED, n_untagged=self.N_UNTAGGED, links_per_host=self.LINKS_PER_HOST, hw_dpid=self.hw_dpid)
self.start_net() |
class OptionPlotoptionsSolidgaugeAccessibility(Options):
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def descriptionFormat(self):
return self._config_get(None)
def descriptionFormat(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def exposeAsGroupOnly(self):
return self._config_get(None)
def exposeAsGroupOnly(self, flag: bool):
self._config(flag, js_type=False)
def keyboardNavigation(self) -> 'OptionPlotoptionsSolidgaugeAccessibilityKeyboardnavigation':
return self._config_sub_data('keyboardNavigation', OptionPlotoptionsSolidgaugeAccessibilityKeyboardnavigation)
def point(self) -> 'OptionPlotoptionsSolidgaugeAccessibilityPoint':
return self._config_sub_data('point', OptionPlotoptionsSolidgaugeAccessibilityPoint) |
def get_relevant_parameters(feature_data, iob_type):
all_features = set([f for data in feature_data.values() for f in data.keys()])
parameters = []
is_diff = ('DIFF_' in iob_type)
for feature in all_features:
for iosettings in feature_data:
if (iosettings.is_diff == is_diff):
base_iob_type = iob_type.replace('DIFF_', '')
if (base_iob_type in feature_data[iosettings][feature]):
parameters.append((feature, feature.replace('.', '_')))
break
return parameters |
_dict
def normalize_bytecode_object(obj: Dict[(str, Any)]) -> Iterable[Tuple[(str, Any)]]:
try:
link_references = obj['linkReferences']
except KeyError:
link_references = None
try:
bytecode = obj['object']
except KeyError:
raise ManifestBuildingError("'object' key not found in bytecode data from compiler output. Please make sure your solidity compiler output is valid.")
if link_references:
(yield ('linkReferences', process_link_references(link_references, bytecode)))
(yield ('bytecode', process_bytecode(link_references, bytecode)))
else:
(yield ('bytecode', add_0x_prefix(bytecode))) |
def data_tree_map(func: Callable[([TypeStr, Any], Tuple[(TypeStr, Any)])], data_tree: Any) -> 'ABITypedData':
def map_to_typed_data(elements: Any) -> 'ABITypedData':
if (isinstance(elements, ABITypedData) and (elements.abi_type is not None)):
return ABITypedData(func(*elements))
else:
return elements
return recursive_map(map_to_typed_data, data_tree) |
def downgrade():
with op.batch_alter_table('Projects', schema=None) as batch_op:
batch_op.add_column(sa.Column('repository_id', sa.INTEGER(), autoincrement=False, nullable=True))
op.execute('update "Projects" set repository_id = ( select repo_id from "Project_Repositories" where project_id = "Projects".id limit 1 )')
op.drop_table('Project_Repositories') |
def test_workflow_activity_exception():
factory = WorkerFactory('localhost', 7933, DOMAIN)
worker = factory.new_worker(TASK_LIST)
activities_impl = GreetingActivitiesImpl()
worker.register_activities_implementation(activities_impl, 'GreetingActivities')
worker.register_workflow_implementation_type(TestActivityExceptionWorkflowImpl)
factory.start()
client = WorkflowClient.new_client(domain=DOMAIN)
workflow: TestActivityExceptionWorkflow = client.new_workflow_stub(TestActivityExceptionWorkflow)
workflow_ex = None
try:
workflow.get_greetings('Bob')
except Exception as ex:
workflow_ex = ex
assert isinstance(workflow_ex, WorkflowFailureException)
assert isinstance(workflow_ex.__cause__, ActivityFailureException)
assert isinstance(workflow_ex.__cause__.__cause__, ComposeGreetingException)
assert exception_caught
assert isinstance(exception_caught, ActivityFailureException)
assert isinstance(exception_caught.get_cause(), ComposeGreetingException)
exception_caught.set_cause()
cause = exception_caught.__cause__
assert isinstance(cause, ComposeGreetingException)
assert (cause.args == ('Failed to compose greeting',))
tb = ''.join(traceback.format_exception(type(cause), cause, cause.__traceback__))
assert ('SOURCE OF EXCEPTION' in tb)
tb = ''.join(traceback.format_exception(type(exception_caught), exception_caught, exception_caught.__traceback__))
assert ('SOURCE OF EXCEPTION' in tb)
assert ('WORKFLOW METHOD INVOKING ACTIVITY' in tb)
tb = ''.join(traceback.format_exception(type(workflow_ex), workflow_ex, workflow_ex.__traceback__))
assert ('SOURCE OF EXCEPTION' in tb)
assert ('WORKFLOW METHOD INVOKING ACTIVITY' in tb)
print('Stopping workers')
worker.stop() |
class Rodata(unittest.TestCase):
def test_disassemble_data(self):
test_init()
common_seg_rodata = CommonSegRodata(rom_start=0, rom_end=256, type='.rodata', name='MyRodata', vram_start=1024, args=None, yaml=None)
rom_data = []
for i in range(256):
rom_data.append(i)
common_seg_rodata.disassemble_data(bytes(rom_data))
assert (common_seg_rodata.spim_section is not None)
assert (common_seg_rodata.spim_section.get_section().words[0] == 66051)
assert (symbols.get_all_symbols()[0].vram_start == 1024)
assert (symbols.get_all_symbols()[0].segment == common_seg_rodata)
assert (symbols.get_all_symbols()[0].linker_section == '.rodata')
def test_get_possible_text_subsegment_for_symbol(self):
context = spimdisasm.common.Context()
result_symbol_addr = 732
rodata_sym = spimdisasm.mips.symbols.SymbolRodata(context=context, vromStart=256, vromEnd=512, inFileOffset=0, vram=256, words=[0, 1, 2, 3, 4, 5, 6, 7], segmentVromStart=0, overlayCategory=None)
rodata_sym.contextSym.forceMigration = True
context_sym = spimdisasm.common.ContextSymbol(address=0)
context_sym.address = result_symbol_addr
rodata_sym.contextSym.referenceFunctions = {context_sym}
test_init()
common_seg_rodata = CommonSegRodata(rom_start=0, rom_end=256, type='.rodata', name='MyRodata', vram_start=1024, args=None, yaml=None)
common_seg_rodata.parent = CommonSegCode(rom_start=0, rom_end=512, type='code', name='MyCode', vram_start=256, args=[], yaml=get_yaml())
result = common_seg_rodata.get_possible_text_subsegment_for_symbol(rodata_sym)
assert (result is not None)
assert (type(result[0]) == CommonSegC)
assert (result[1].address == result_symbol_addr) |
def build_data_provider(data_config, drop_last: bool=False):
train_dataset = Sent140Dataset(data_root='leaf/data/sent140/data/train/all_data_0_01_keep_1_train_9.json', max_seq_len=data_config.max_seq_len)
test_dataset = Sent140Dataset(data_root='leaf/data/sent140/data/test/all_data_0_01_keep_1_test_9.json', max_seq_len=data_config.max_seq_len)
dataloader = LEAFDataLoader(train_dataset, test_dataset, test_dataset, batch_size=data_config.local_batch_size, drop_last=drop_last)
data_provider = DataProvider(dataloader)
return (data_provider, train_dataset.num_letters) |
def _generate_private_key(type_: str, file: Optional[str]=None, password: Optional[str]=None) -> Dict[(str, str)]:
keys = {}
if ((type_ == 'all') and (file is not None)):
raise click.ClickException('Type all cannot be used in combination with file.')
types = (list(crypto_registry.supported_ids) if (type_ == 'all') else [type_])
for type__ in types:
private_key_file = (PRIVATE_KEY_PATH_SCHEMA.format(type__) if (file is None) else file)
if _can_write(private_key_file):
create_private_key(type__, private_key_file, password)
keys[type__] = private_key_file
return keys |
class TTGlyphSetTest(object):
def getpath(testfile):
path = os.path.dirname(__file__)
return os.path.join(path, 'data', testfile)
.parametrize('fontfile, location, expected', [('I.ttf', None, [('moveTo', ((175, 0),)), ('lineTo', ((367, 0),)), ('lineTo', ((367, 1456),)), ('lineTo', ((175, 1456),)), ('closePath', ())]), ('I.ttf', {}, [('moveTo', ((175, 0),)), ('lineTo', ((367, 0),)), ('lineTo', ((367, 1456),)), ('lineTo', ((175, 1456),)), ('closePath', ())]), ('I.ttf', {'wght': 100}, [('moveTo', ((175, 0),)), ('lineTo', ((271, 0),)), ('lineTo', ((271, 1456),)), ('lineTo', ((175, 1456),)), ('closePath', ())]), ('I.ttf', {'wght': 1000}, [('moveTo', ((128, 0),)), ('lineTo', ((550, 0),)), ('lineTo', ((550, 1456),)), ('lineTo', ((128, 1456),)), ('closePath', ())]), ('I.ttf', {'wght': 1000, 'wdth': 25}, [('moveTo', ((140, 0),)), ('lineTo', ((553, 0),)), ('lineTo', ((553, 1456),)), ('lineTo', ((140, 1456),)), ('closePath', ())]), ('I.ttf', {'wght': 1000, 'wdth': 50}, [('moveTo', ((136, 0),)), ('lineTo', ((552, 0),)), ('lineTo', ((552, 1456),)), ('lineTo', ((136, 1456),)), ('closePath', ())]), ('I.otf', {'wght': 1000}, [('moveTo', ((179, 74),)), ('lineTo', ((28, 59),)), ('lineTo', ((28, 0),)), ('lineTo', ((367, 0),)), ('lineTo', ((367, 59),)), ('lineTo', ((212, 74),)), ('lineTo', ((179, 74),)), ('closePath', ()), ('moveTo', ((179, 578),)), ('lineTo', ((212, 578),)), ('lineTo', ((367, 593),)), ('lineTo', ((367, 652),)), ('lineTo', ((28, 652),)), ('lineTo', ((28, 593),)), ('lineTo', ((179, 578),)), ('closePath', ()), ('moveTo', ((98, 310),)), ('curveTo', ((98, 205), (98, 101), (95, 0))), ('lineTo', ((299, 0),)), ('curveTo', ((296, 103), (296, 207), (296, 311))), ('lineTo', ((296, 342),)), ('curveTo', ((296, 447), (296, 551), (299, 652))), ('lineTo', ((95, 652),)), ('curveTo', ((98, 549), (98, 445), (98, 342))), ('lineTo', ((98, 310),)), ('closePath', ())]), ('issue2824.ttf', None, [('moveTo', ((309, 180),)), ('qCurveTo', ((274, 151), (187, 136), (104, 166), (74, 201))), ('qCurveTo', ((45, 236), (30, 323), (59, 407), (95, 436))), ('qCurveTo', ((130, 466), (217, 480), (301, 451), (330, 415))), ('qCurveTo', ((360, 380), (374, 293), (345, 210), (309, 180))), ('closePath', ())])])
def test_glyphset(self, fontfile, location, expected):
font = TTFont(self.getpath(fontfile))
glyphset = font.getGlyphSet(location=location)
assert isinstance(glyphset, ttGlyphSet._TTGlyphSet)
assert (list(glyphset.keys()) == ['.notdef', 'I'])
assert ('I' in glyphset)
with pytest.deprecated_call():
assert glyphset.has_key('I')
assert (len(glyphset) == 2)
pen = RecordingPen()
glyph = glyphset['I']
assert (glyphset.get('foobar') is None)
assert isinstance(glyph, ttGlyphSet._TTGlyph)
is_glyf = fontfile.endswith('.ttf')
glyphType = (ttGlyphSet._TTGlyphGlyf if is_glyf else ttGlyphSet._TTGlyphCFF)
assert isinstance(glyph, glyphType)
glyph.draw(pen)
actual = pen.value
assert (actual == expected), (location, actual, expected)
.parametrize('fontfile, locations, factor, expected', [('I.ttf', ({'wght': 400}, {'wght': 1000}), 0.5, [('moveTo', ((151.5, 0.0),)), ('lineTo', ((458.5, 0.0),)), ('lineTo', ((458.5, 1456.0),)), ('lineTo', ((151.5, 1456.0),)), ('closePath', ())]), ('I.ttf', ({'wght': 400}, {'wght': 1000}), 0.25, [('moveTo', ((163.25, 0.0),)), ('lineTo', ((412.75, 0.0),)), ('lineTo', ((412.75, 1456.0),)), ('lineTo', ((163.25, 1456.0),)), ('closePath', ())])])
def test_lerp_glyphset(self, fontfile, locations, factor, expected):
font = TTFont(self.getpath(fontfile))
glyphset1 = font.getGlyphSet(location=locations[0])
glyphset2 = font.getGlyphSet(location=locations[1])
glyphset = LerpGlyphSet(glyphset1, glyphset2, factor)
assert ('I' in glyphset)
pen = RecordingPen()
glyph = glyphset['I']
assert (glyphset.get('foobar') is None)
glyph.draw(pen)
actual = pen.value
assert (actual == expected), (locations, actual, expected)
def test_glyphset_varComposite_components(self):
font = TTFont(self.getpath('varc-ac00-ac01.ttf'))
glyphset = font.getGlyphSet()
pen = RecordingPen()
glyph = glyphset['uniAC00']
glyph.draw(pen)
actual = pen.value
expected = [('addVarComponent', ('glyph00003', DecomposedTransform(460.0, 676.0, 0, 1, 1, 0, 0, 0, 0), {'0000': 0., '0001': 0., '0002': 0., '0003': 0.})), ('addVarComponent', ('glyph00004', DecomposedTransform(932.0, 382.0, 0, 1, 1, 0, 0, 0, 0), {'0000': 0., '0001': 0., '0002': 0., '0003': 0., '0004': 0.}))]
assert (actual == expected), (actual, expected)
def test_glyphset_varComposite1(self):
font = TTFont(self.getpath('varc-ac00-ac01.ttf'))
glyphset = font.getGlyphSet(location={'wght': 600})
pen = DecomposingRecordingPen(glyphset)
glyph = glyphset['uniAC00']
glyph.draw(pen)
actual = pen.value
expected = [('moveTo', ((432, 678),)), ('lineTo', ((432, 620),)), ('qCurveTo', ((419, 620), (374, 621), (324, 619), (275, 618), (237, 617), (228, 616))), ('qCurveTo', ((218, 616), (188, 612), (160, 605), (149, 601))), ('qCurveTo', ((127, 611), (83, 639), (67, 654))), ('qCurveTo', ((64, 657), (63, 662), (64, 666))), ('lineTo', ((72, 678),)), ('qCurveTo', ((93, 674), (144, 672), (164, 672))), ('qCurveTo', ((173, 672), (213, 672), (266, 673), (323, 674), (377, 675), (421, 678), (432, 678))), ('closePath', ()), ('moveTo', ((525, 619),)), ('lineTo', ((412, 620),)), ('lineTo', ((429, 678),)), ('lineTo', ((466, 697),)), ('qCurveTo', ((470, 698), (482, 698), (486, 697))), ('qCurveTo', ((494, 693), (515, 682), (536, 670), (541, 667))), ('qCurveTo', ((545, 663), (545, 656), (543, 652))), ('lineTo', ((525, 619),)), ('closePath', ()), ('moveTo', ((63, 118),)), ('lineTo', ((47, 135),)), ('qCurveTo', ((42, 141), (48, 146))), ('qCurveTo', ((135, 213), (278, 373), (383, 541), (412, 620))), ('lineTo', ((471, 642),)), ('lineTo', ((525, 619),)), ('qCurveTo', ((496, 529), (365, 342), (183, 179), (75, 121))), ('qCurveTo', ((72, 119), (65, 118), (63, 118))), ('closePath', ()), ('moveTo', ((925, 372),)), ('lineTo', ((739, 368),)), ('lineTo', ((739, 427),)), ('lineTo', ((822, 430),)), ('lineTo', ((854, 451),)), ('qCurveTo', ((878, 453), (930, 449), (944, 445))), ('qCurveTo', ((961, 441), (962, 426))), ('qCurveTo', ((964, 411), (956, 386), (951, 381))), ('qCurveTo', ((947, 376), (931, 372), (925, 372))), ('closePath', ()), ('moveTo', ((729, (- 113)),)), ('lineTo', ((674, (- 113)),)), ('qCurveTo', ((671, (- 98)), (669, (- 42)), (666, 22), (665, 83), (665, 102))), ('lineTo', ((665, 763),)), ('qCurveTo', ((654, 780), (608, 810), (582, 820))), ('lineTo', ((593, 850),)), ('qCurveTo', ((594, 852), (599, 856), (607, 856))), ('qCurveTo', ((628, 855), (684, 846), (736, 834), (752, 827))), ('qCurveTo', ((766, 818), (766, 802))), ('lineTo', ((762, 745),)), ('lineTo', ((762, 134),)), ('qCurveTo', ((762, 107), (757, 43), (749, (- 25)), (737, (- 87)), (729, (- 113)))), ('closePath', ())]
actual = [(op, tuple(((otRound(pt[0]), otRound(pt[1])) for pt in args))) for (op, args) in actual]
assert (actual == expected), (actual, expected)
pen = DecomposingRecordingPen(glyphset)
glyph.draw(pen)
actual = pen.value
actual = [(op, tuple(((otRound(pt[0]), otRound(pt[1])) for pt in args))) for (op, args) in actual]
assert (actual == expected), (actual, expected)
pen = RecordingPointPen()
glyph.drawPoints(pen)
assert pen.value
def test_glyphset_varComposite2(self):
font = TTFont(self.getpath('varc-6868.ttf'))
glyphset = font.getGlyphSet(location={'wght': 600})
pen = DecomposingRecordingPen(glyphset)
glyph = glyphset['uni6868']
glyph.draw(pen)
actual = pen.value
expected = [('moveTo', ((460, 565),)), ('qCurveTo', ((482, 577), (526, 603), (568, 632), (607, 663), (644, 698), (678, 735), (708, 775), (721, 796))), ('lineTo', ((632, 835),)), ('qCurveTo', ((621, 817), (595, 784), (566, 753), (534, 724), (499, 698), (462, 675), (423, 653), (403, 644))), ('closePath', ()), ('moveTo', ((616, 765),)), ('lineTo', ((590, 682),)), ('lineTo', ((830, 682),)), ('lineTo', ((833, 682),)), ('lineTo', ((828, 693),)), ('qCurveTo', ((817, 671), (775, 620), (709, 571), (615, 525), (492, 490), (413, 480))), ('lineTo', ((454, 386),)), ('qCurveTo', ((544, 403), (687, 455), (798, 519), (877, 590), (926, 655), (937, 684))), ('lineTo', ((937, 765),)), ('closePath', ()), ('moveTo', ((723, 555),)), ('qCurveTo', ((713, 563), (693, 579), (672, 595), (651, 610), (629, 625), (606, 638), (583, 651), (572, 657))), ('lineTo', ((514, 590),)), ('qCurveTo', ((525, 584), (547, 572), (568, 559), (589, 545), (609, 531), (629, 516), (648, 500), (657, 492))), ('closePath', ()), ('moveTo', ((387, 375),)), ('lineTo', ((387, 830),)), ('lineTo', ((289, 830),)), ('lineTo', ((289, 375),)), ('closePath', ()), ('moveTo', ((96, 383),)), ('qCurveTo', ((116, 390), (156, 408), (194, 427), (231, 449), (268, 472), (302, 497), (335, 525), (351, 539))), ('lineTo', ((307, 610),)), ('qCurveTo', ((291, 597), (257, 572), (221, 549), (185, 528), (147, 509), (108, 492), (69, 476), (48, 469))), ('closePath', ()), ('moveTo', ((290, 653),)), ('qCurveTo', ((281, 664), (261, 687), (240, 708), (219, 729), (196, 749), (173, 768), (148, 786), (136, 794))), ('lineTo', ((69, 727),)), ('qCurveTo', ((81, 719), (105, 702), (129, 684), (151, 665), (173, 645), (193, 625), (213, 604), (222, 593))), ('closePath', ()), ('moveTo', ((913, (- 57)),)), ('lineTo', ((953, 30),)), ('qCurveTo', ((919, 41), (854, 67), (790, 98), (729, 134), (671, 173), (616, 217), (564, 264), (540, 290))), ('lineTo', ((522, 286),)), ('qCurveTo', ((511, 267), (498, 235), (493, 213), (492, 206))), ('lineTo', ((515, 209),)), ('qCurveTo', ((569, 146), (695, 44), (835, (- 32)), (913, (- 57)))), ('closePath', ()), ('moveTo', ((474, 274),)), ('lineTo', ((452, 284),)), ('qCurveTo', ((428, 260), (377, 214), (323, 172), (266, 135), (206, 101), (144, 71), (80, 46), (47, 36))), ('lineTo', ((89, (- 53)),)), ('qCurveTo', ((163, (- 29)), (299, 46), (423, 142), (476, 201))), ('lineTo', ((498, 196),)), ('qCurveTo', ((498, 203), (494, 225), (482, 255), (474, 274))), ('closePath', ()), ('moveTo', ((450, 250),)), ('lineTo', ((550, 250),)), ('lineTo', ((550, 379),)), ('lineTo', ((450, 379),)), ('closePath', ()), ('moveTo', ((68, 215),)), ('lineTo', ((932, 215),)), ('lineTo', ((932, 305),)), ('lineTo', ((68, 305),)), ('closePath', ()), ('moveTo', ((450, (- 71)),)), ('lineTo', ((550, (- 71)),)), ('lineTo', ((550, (- 71)),)), ('lineTo', ((550, 267),)), ('lineTo', ((450, 267),)), ('lineTo', ((450, (- 71)),)), ('closePath', ())]
actual = [(op, tuple(((otRound(pt[0]), otRound(pt[1])) for pt in args))) for (op, args) in actual]
assert (actual == expected), (actual, expected)
pen = RecordingPointPen()
glyph.drawPoints(pen)
assert pen.value
def test_cubic_glyf(self):
font = TTFont(self.getpath('dot-cubic.ttf'))
glyphset = font.getGlyphSet()
expected = [('moveTo', ((76, 181),)), ('curveTo', ((103, 181), (125, 158), (125, 131))), ('curveTo', ((125, 104), (103, 82), (76, 82))), ('curveTo', ((48, 82), (26, 104), (26, 131))), ('curveTo', ((26, 158), (48, 181), (76, 181))), ('closePath', ())]
pen = RecordingPen()
glyphset['one'].draw(pen)
assert (pen.value == expected)
expectedPoints = [('beginPath', (), {}), ('addPoint', ((76, 181), 'curve', False, None), {}), ('addPoint', ((103, 181), None, False, None), {}), ('addPoint', ((125, 158), None, False, None), {}), ('addPoint', ((125, 104), None, False, None), {}), ('addPoint', ((103, 82), None, False, None), {}), ('addPoint', ((76, 82), 'curve', False, None), {}), ('addPoint', ((48, 82), None, False, None), {}), ('addPoint', ((26, 104), None, False, None), {}), ('addPoint', ((26, 158), None, False, None), {}), ('addPoint', ((48, 181), None, False, None), {}), ('endPath', (), {})]
pen = RecordingPointPen()
glyphset['one'].drawPoints(pen)
assert (pen.value == expectedPoints)
pen = RecordingPen()
glyphset['two'].draw(pen)
assert (pen.value == expected)
expectedPoints = [('beginPath', (), {}), ('addPoint', ((26, 158), None, False, None), {}), ('addPoint', ((48, 181), None, False, None), {}), ('addPoint', ((76, 181), 'curve', False, None), {}), ('addPoint', ((103, 181), None, False, None), {}), ('addPoint', ((125, 158), None, False, None), {}), ('addPoint', ((125, 104), None, False, None), {}), ('addPoint', ((103, 82), None, False, None), {}), ('addPoint', ((76, 82), 'curve', False, None), {}), ('addPoint', ((48, 82), None, False, None), {}), ('addPoint', ((26, 104), None, False, None), {}), ('endPath', (), {})]
pen = RecordingPointPen()
glyphset['two'].drawPoints(pen)
assert (pen.value == expectedPoints)
pen = RecordingPen()
glyphset['three'].draw(pen)
assert (pen.value == expected)
expectedPoints = [('beginPath', (), {}), ('addPoint', ((48, 82), None, False, None), {}), ('addPoint', ((26, 104), None, False, None), {}), ('addPoint', ((26, 158), None, False, None), {}), ('addPoint', ((48, 181), None, False, None), {}), ('addPoint', ((76, 181), 'curve', False, None), {}), ('addPoint', ((103, 181), None, False, None), {}), ('addPoint', ((125, 158), None, False, None), {}), ('addPoint', ((125, 104), None, False, None), {}), ('addPoint', ((103, 82), None, False, None), {}), ('addPoint', ((76, 82), 'curve', False, None), {}), ('endPath', (), {})]
pen = RecordingPointPen()
glyphset['three'].drawPoints(pen)
assert (pen.value == expectedPoints)
pen = RecordingPen()
glyphset['four'].draw(pen)
assert (pen.value == [('moveTo', ((75.5, 181),)), ('curveTo', ((103, 181), (125, 158), (125, 131))), ('curveTo', ((125, 104), (103, 82), (75.5, 82))), ('curveTo', ((48, 82), (26, 104), (26, 131))), ('curveTo', ((26, 158), (48, 181), (75.5, 181))), ('closePath', ())])
expectedPoints = [('beginPath', (), {}), ('addPoint', ((103, 181), None, False, None), {}), ('addPoint', ((125, 158), None, False, None), {}), ('addPoint', ((125, 104), None, False, None), {}), ('addPoint', ((103, 82), None, False, None), {}), ('addPoint', ((48, 82), None, False, None), {}), ('addPoint', ((26, 104), None, False, None), {}), ('addPoint', ((26, 158), None, False, None), {}), ('addPoint', ((48, 181), None, False, None), {}), ('endPath', (), {})]
pen = RecordingPointPen()
glyphset['four'].drawPoints(pen)
print(pen.value)
assert (pen.value == expectedPoints) |
class Time(TVal):
__slots__ = ['secs', 'nsecs']
def __init__(self, secs=0, nsecs=0):
super(Time, self).__init__(secs, nsecs)
if (self.secs < 0):
raise TypeError('time values must be positive')
def __getstate__(self):
return [self.secs, self.nsecs]
def __setstate__(self, state):
(self.secs, self.nsecs) = state
def to_time(self):
return self.to_sec()
def __hash__(self):
return super(Time, self).__hash__()
def __repr__(self):
return ('genpy.Time[%d]' % self.to_nsec())
def __add__(self, other):
if (not isinstance(other, Duration)):
return NotImplemented
return self.__class__((self.secs + other.secs), (self.nsecs + other.nsecs))
__radd__ = __add__
def __sub__(self, other):
if isinstance(other, Time):
return Duration((self.secs - other.secs), (self.nsecs - other.nsecs))
elif isinstance(other, Duration):
return self.__class__((self.secs - other.secs), (self.nsecs - other.nsecs))
else:
return NotImplemented
def __cmp__(self, other):
if (not isinstance(other, Time)):
raise TypeError('cannot compare to non-Time')
a = self.to_nsec()
b = other.to_nsec()
return ((a > b) - (a < b))
def __eq__(self, other):
if (not isinstance(other, Time)):
return False
return ((self.secs == other.secs) and (self.nsecs == other.nsecs)) |
def test_pathlib_path(asgi, patch_open):
patch_open()
sr = create_sr(asgi, '/static/', pathlib.Path('/var/www/statics'))
req_path = '/static/css/test.css'
req = _util.create_req(asgi, host='test.com', path=req_path, root_path='statics')
resp = _util.create_resp(asgi)
if asgi:
async def run():
(await sr(req, resp))
return (await resp.stream.read())
body = falcon.async_to_sync(run)
else:
sr(req, resp)
body = resp.stream.read()
assert (body.decode() == os.path.normpath('/var/www/statics/css/test.css')) |
def check_has_ids(node, raise_error):
is_paragraph = (nodes.paragraph is type(node))
if (is_paragraph and (nodes.section is type(node.parent))):
should_have_id(node, 'paragraph', raise_error)
elif (is_paragraph and (nodes.list_item is type(node.parent))):
should_have_id(node, 'list item', raise_error)
elif (is_paragraph and (nodes.entry is type(node.parent))):
if (node.parent.parent.index(node.parent) == 0):
should_have_id(node, 'first cell of a table row', raise_error)
else:
should_not_have_id(node, 'second or later cell of a table row', raise_error)
elif (nodes.section is type(node)):
if (not any((name.startswith('fls_') for name in node['names']))):
raise_error('section should have an id', location=node)
else:
should_not_have_id(node, type(node).__name__, raise_error)
for child in node.children:
check_has_ids(child, raise_error) |
class DNSClientLoggerTestCase(HTTPProxyTestCase):
def setUp(self):
super().setUp()
.object(server_protocol.DNSClient, 'query')
( 'on_answer')
('dohproxy.
_run_loop
async def test_mock_dnsclient_assigned_logger(self, MockedDNSClient, Mockedon_answer, Mockedquery):
' Test that when MockedDNSClient is created with the doh- logger and DEBUG level\n '
Mockedquery.return_value = self.dnsq
Mockedon_answer.return_value = aio body=b'Done')
params = utils.build_query_params(self.dnsq.to_wire())
request = (await self.client.request('GET', self.endpoint, params=params))
request.remote = '127.0.0.1'
app = (await self.get_application())
(await app.resolve(request, self.dnsq))
mylogger = utils.configure_logger(name='doh- level='DEBUG')
MockedDNSClient.assert_called_with(app.upstream_resolver, app.upstream_port, logger=mylogger)
def test_dnsclient_none_logger(self):
dnsclient = DNSClient('', 80)
self.assertEqual(dnsclient.logger.level, 10)
self.assertEqual(dnsclient.logger.name, 'DNSClient')
def test_dnsclient_assigned_logger(self):
mylogger = logging.getLogger('mylogger')
level = 'ERROR'
mylogger.setLevel(level)
dnsclient = DNSClient('', 80, logger=mylogger)
self.assertEqual(dnsclient.logger.level, 40)
self.assertEqual(dnsclient.logger.name, 'mylogger') |
class OptionSeriesLollipopMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def states(self) -> 'OptionSeriesLollipopMarkerStates':
return self._config_sub_data('states', OptionSeriesLollipopMarkerStates)
def symbol(self):
return self._config_get(None)
def symbol(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
.parametrize('tx_params,contract_name,expected', (({'gas': 210000}, 'no_receive', 'fallback'), ({'gas': 210000, 'value': 2}, 'no_receive', ''), ({'value': 2, 'gas': 210000, 'data': '0x477a5c98'}, 'no_receive', ''), ({'gas': 210000, 'data': '0x477a5c98'}, 'no_receive', 'fallback'), ({'data': '0x477a5c98'}, 'receive', 'fallback'), ({'value': 2}, 'receive', 'receive')))
def test_call_receive_fallback_function(w3, tx_params, expected, call, receive_function_contract, no_receive_function_contract, contract_name):
if (contract_name == 'receive'):
contract = receive_function_contract
elif (contract_name == 'no_receive'):
contract = no_receive_function_contract
else:
raise AssertionError('contract must be either receive or no_receive')
initial_value = call(contract=contract, contract_function='getText')
assert (initial_value == '')
to = {'to': contract.address}
merged = {**to, **tx_params}
w3.eth.send_transaction(merged)
final_value = call(contract=contract, contract_function='getText')
assert (final_value == expected) |
class FidesUser(Base):
username = Column(CIText, unique=True, index=True)
first_name = Column(String, nullable=True)
last_name = Column(String, nullable=True)
hashed_password = Column(String, nullable=False)
salt = Column(String, nullable=False)
last_login_at = Column(DateTime(timezone=True), nullable=True)
password_reset_at = Column(DateTime(timezone=True), nullable=True)
audit_logs = relationship(AuditLog, backref='fides_user', lazy='dynamic', passive_deletes='all', primaryjoin='foreign(AuditLog.user_id)==FidesUser.id')
client = relationship('ClientDetail', backref='user', cascade='all, delete', uselist=False)
systems = relationship('System', secondary='systemmanager', back_populates='data_stewards')
def system_ids(self) -> List[str]:
return [system.id for system in self.systems]
def hash_password(cls, password: str, encoding: str='UTF-8') -> tuple[(str, str)]:
salt = generate_salt()
hashed_password = hash_with_salt(password.encode(encoding), salt.encode(encoding))
return (hashed_password, salt)
def create(cls, db: Session, data: dict[(str, Any)], check_name: bool=True) -> FidesUser:
(hashed_password, salt) = FidesUser.hash_password(data['password'])
user = super().create(db, data={'salt': salt, 'hashed_password': hashed_password, 'username': data['username'], 'first_name': data.get('first_name'), 'last_name': data.get('last_name')}, check_name=check_name)
return user
def credentials_valid(self, password: str, encoding: str='UTF-8') -> bool:
provided_password_hash = hash_with_salt(password.encode(encoding), self.salt.encode(encoding))
return (provided_password_hash == self.hashed_password)
def update_password(self, db: Session, new_password: str) -> None:
(hashed_password, salt) = FidesUser.hash_password(new_password)
self.hashed_password = hashed_password
self.salt = salt
self.password_reset_at = datetime.utcnow()
self.save(db)
def set_as_system_manager(self, db: Session, system: System) -> None:
if (not (type(system).__name__ == 'System')):
raise SystemManagerException('Must pass in a system to set user as system manager.')
if (self in system.data_stewards):
raise SystemManagerException(f"User '{self.username}' is already a system manager of '{system.name}'.")
self.systems.append(system)
self.save(db=db)
if self.client:
self.client.update(db=db, data={'systems': self.system_ids})
def remove_as_system_manager(self, db: Session, system: System) -> None:
if (not (type(system).__name__ == 'System')):
raise SystemManagerException('Must pass in a system to remove user as system manager.')
try:
self.systems.remove(system)
self.save(db=db)
except ValueError:
raise SystemManagerException(f"User '{self.username}' is not a manager of system '{system.name}'.")
if self.client:
self.client.update(db=db, data={'systems': self.system_ids}) |
class CopyStyleToDialog(QDialog):
def __init__(self, parent, current_key, key_defs):
QWidget.__init__(self, parent)
self.setMinimumWidth(450)
self.setMinimumHeight(200)
self._dynamic = False
self.setWindowTitle(f'Copy the style of {current_key} to other keys')
self.activateWindow()
layout = QFormLayout(self)
self._filter_popup = FilterPopup(self, key_defs)
self._filter_popup.filterSettingsChanged.connect(self.filterSettingsChanged)
filter_popup_button = QToolButton()
filter_popup_button.setIcon(QIcon('img:filter_list.svg'))
filter_popup_button.clicked.connect(self._filter_popup.show)
self._list_model = FilterableKwListModel(key_defs)
self._list_model.unselectAll()
self._cl = CheckList(self._list_model, custom_filter_button=filter_popup_button)
layout.addWidget(self._cl)
apply_button = QPushButton('Apply')
apply_button.clicked.connect(self.accept)
apply_button.setDefault(True)
close_button = QPushButton('Close')
close_button.setToolTip('Hide this dialog')
close_button.clicked.connect(self.reject)
button_layout = QHBoxLayout()
button_layout.addStretch()
button_layout.addWidget(apply_button)
button_layout.addWidget(close_button)
layout.addRow(button_layout)
def getSelectedKeys(self):
return self._list_model.getSelectedItems()
def filterSettingsChanged(self, item):
for (value, visible) in item.items():
self._list_model.setFilterOnMetadata('data_origin', value, visible)
self._cl.modelChanged() |
class GameData():
myplayer = 0
mysoldier = 0
myteamid = 0
myvehicle = 0
myviewmatrix = 0
mytransform = 0
mydrag = 0
mygravity = 0
myinitialspeed = 0
mypositionoffset = 0
def __init__(self):
self.soldiers = []
def AddSoldier(self, soldier):
self.soldiers += [soldier]
def ClearSoldiers(self):
self.soldiers = [] |
class RubygemsBackendtests(DatabaseTestCase):
def setUp(self):
super().setUp()
create_distro(self.session)
self.create_project()
def create_project(self):
project = models.Project(name='bio', homepage=' backend=BACKEND)
self.session.add(project)
self.session.commit()
project = models.Project(name='biofoobar', homepage=' backend=BACKEND)
self.session.add(project)
self.session.commit()
def test_get_version(self):
pid = 1
project = models.Project.get(self.session, pid)
exp = '1.5.1'
obs = backend.RubygemsBackend.get_version(project)
self.assertEqual(obs, exp)
pid = 2
project = models.Project.get(self.session, pid)
self.assertRaises(AnityaPluginException, backend.RubygemsBackend.get_version, project)
def test_get_version_url(self):
project = models.Project(name='test', homepage=' backend=BACKEND)
exp = '
obs = backend.RubygemsBackend.get_version_url(project)
self.assertEqual(obs, exp)
def test_get_versions(self):
pid = 1
project = models.Project.get(self.session, pid)
exp = ['1.5.1']
obs = backend.RubygemsBackend.get_ordered_versions(project)
self.assertEqual(obs, exp)
pid = 2
project = models.Project.get(self.session, pid)
self.assertRaises(AnityaPluginException, backend.RubygemsBackend.get_version, project)
def test_rubygems_get_versions_not_modified(self):
pid = 1
project = models.Project.get(self.session, pid)
exp_url = '
with mock.patch('anitya.lib.backends.BaseBackend.call_url') as m_call:
m_call.return_value = mock.Mock(status_code=304)
versions = backend.RubygemsBackend.get_versions(project)
m_call.assert_called_with(exp_url, last_change=None)
self.assertEqual(versions, [])
def test_rubygems_check_feed(self):
generator = backend.RubygemsBackend.check_feed()
items = list(generator)
self.assertEqual(items[0], ('mathrix-rails', ' 'Rubygems', '1.0.0'))
self.assertEqual(items[1], ('zipcoder', ' 'Rubygems', '0.2.0')) |
def check_backend():
from traitsui.toolkit import toolkit
from traits.etsconfig.api import ETSConfig
from mayavi.tools.engine_manager import options
toolkit()
if (((options.backend != 'test') and (not options.offscreen)) and ((ETSConfig.toolkit in ('null', '')) and (env_toolkit != 'null'))):
msg = 'Could not import backend for traitsui. Make sure you\n have a suitable UI toolkit like PyQt/PySide or wxPython\n installed.'
raise ImportError(msg) |
def describe_note(x, machine):
n_desc = x['n_desc']
desc = ''
if (x['n_type'] == 'NT_GNU_ABI_TAG'):
if (x['n_name'] == 'Android'):
desc = ('\n description data: %s ' % bytes2hex(x['n_descdata']))
else:
desc = ('\n OS: %s, ABI: %d.%d.%d' % (_DESCR_NOTE_ABI_TAG_OS.get(n_desc['abi_os'], _unknown), n_desc['abi_major'], n_desc['abi_minor'], n_desc['abi_tiny']))
elif (x['n_type'] == 'NT_GNU_BUILD_ID'):
desc = ('\n Build ID: %s' % n_desc)
elif (x['n_type'] == 'NT_GNU_GOLD_VERSION'):
desc = ('\n Version: %s' % n_desc)
elif (x['n_type'] == 'NT_GNU_PROPERTY_TYPE_0'):
desc = ('\n Properties: ' + describe_note_gnu_properties(x['n_desc'], machine))
else:
desc = '\n description data: {}'.format(bytes2hex(n_desc))
if ((x['n_type'] == 'NT_GNU_ABI_TAG') and (x['n_name'] == 'Android')):
note_type = 'NT_VERSION'
note_type_desc = 'version'
else:
note_type = (x['n_type'] if isinstance(x['n_type'], str) else 'Unknown note type:')
note_type_desc = (('0x%.8x' % x['n_type']) if isinstance(x['n_type'], int) else _DESCR_NOTE_N_TYPE.get(x['n_type'], _unknown))
return ('%s (%s)%s' % (note_type, note_type_desc, desc)) |
class OptionSeriesNetworkgraphSonificationDefaultspeechoptionsMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def submit_slurm_job(config_dict: Optional[Dict]=None, parser: Optional[ArgumentParser]=None) -> None:
if ((config_dict is not None) and ('slurm' in config_dict)):
raise RuntimeError('--slurm is only available as a CLI argument')
sweep_options = [[[key, v] for v in value] for (key, value) in config_dict.items() if isinstance(value, list)]
sweep_config_dict_list = []
if (len(sweep_options) > 0):
for option_list in itertools.product(*sweep_options):
sweep_config_dict_list.append({k: v for (k, v) in option_list})
for x in sweep_options:
if (x[0][0] in config_dict):
del config_dict[x[0][0]]
cli_arguments = cli_argument_list(config_dict)
parser = options.general_parser(config_dict, parser)
options.add_evaluator_args(parser)
options.add_scorer_args(parser, cli_arguments)
options.add_slurm_args(parser)
options.add_dataloader_args(parser, cli_arguments)
system_class = get_agent_class(config_dict)
system_class.add_args(parser)
args = parser.parse_args(cli_argument_list(config_dict))
args.output = os.path.abspath(args.output)
assert mkdir_output_dir(args.output)
if (args.agent is None):
args.agent = sys.argv[0]
os.system(f'cp {args.agent} {args.output}/agent.py')
_args = [sys.argv[0]]
for arg in sys.argv[1:]:
if (str(arg).isdigit() or str(arg).startswith('--')):
_args.append(arg)
else:
_args.append(f'"{arg}"')
command = ' '.join(_args).strip()
command = re.sub('(--slurm\\S*(\\s+[^-]\\S+)*)', '', command).strip()
if (subprocess.check_output(['which', 'simuleval']).decode().strip() in command):
command = re.sub('--agent\\s+\\S+', f'--agent {args.output}/agent.py', command).strip()
else:
command = re.sub('[^\\"\'\\s]+\\.py', f'{os.path.abspath(args.output)}/agent.py', command).strip()
sweep_command = ''
sbatch_job_array_head = ''
job_array_configs = ''
if (len(sweep_config_dict_list) > 0):
job_array_configs = 'declare -A JobArrayConfigs\n'
for (i, sub_config_dict) in enumerate(sweep_config_dict_list):
sub_config_string = ' '.join([f"--{k.replace('_', '-')} {v}" for (k, v) in sub_config_dict.items()])
job_array_configs += f'''JobArrayConfigs[{i}]="{sub_config_string}"
'''
job_array_configs += '\ndeclare -A JobArrayString\n'
for (i, sub_config_dict) in enumerate(sweep_config_dict_list):
sub_config_string = '.'.join([str(v) for (k, v) in sub_config_dict.items()])
job_array_configs += f'''JobArrayString[{i}]="{sub_config_string}"
'''
sweep_command = '${JobArrayConfigs[$SLURM_ARRAY_TASK_ID]}'
sbatch_job_array_head = f'#SBATCH --array=0-{(len(sweep_config_dict_list) - 1)}'
output_dir = (f'{args.output}' + '/results/${JobArrayString[$SLURM_ARRAY_TASK_ID]}')
log_path = f'{args.output}/logs/slurm-%A_%a.log'
else:
output_dir = args.output
log_path = f'{args.output}/slurm-%j.log'
if ('--output' in command):
command = re.sub('--output\\s+\\S+', f'--output {output_dir}', command).strip()
else:
command += f' --output {output_dir}'
command = command.replace('--', '\\\n\t--')
script = f'''#!/bin/bash
#SBATCH --time={args.slurm_time}
#SBATCH --partition={args.slurm_partition}
#SBATCH --nodes=1
#SBATCH --gpus-per-node=1
#SBATCH --ntasks-per-node=8
#SBATCH --output="{log_path}"
#SBATCH --job-name="{args.slurm_job_name}"
{sbatch_job_array_head}
{job_array_configs}
mkdir -p {args.output}/logs
cd {os.path.abspath(args.output)}
GPU_ID=$SLURM_LOCALID
# Change to local a gpu id for debugging, e.g.
# GPU_ID=0
CUDA_VISIBLE_DEVICES=$GPU_ID {command} {sweep_command}
'''
script_file = os.path.join(args.output, 'script.sh')
with open(script_file, 'w') as f:
f.writelines(script)
process = subprocess.Popen(['sbatch', script_file], stderr=subprocess.PIPE, stdout=subprocess.PIPE)
(stdout, stderr) = process.communicate()
logger.info('Using slurm.')
logger.info(f"sbatch stdout: {stdout.decode('utf-8').strip()}")
stderr = stderr.decode('utf-8').strip()
if (len(stderr) > 0):
logger.info(f'sbatch stderr: {stderr.strip()}') |
class OptionSeriesSankeyDataDatalabelsTextpath(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
def test_cube_attr_rms_two_surfaces_compare_window(load_cube_rsgy1):
xs1 = xtgeo.surface_from_file(RTOP1)
xs2 = xs1.copy()
xs2.values += 30
kube = load_cube_rsgy1
xss1 = xs1.copy()
xss1.slice_cube_window(kube, other=xs2, other_position='below', attribute='rms', sampling='trilinear')
xss2 = xs1.copy()
xss2.values += 15
xss2.slice_cube_window(kube, zrange=15, attribute='rms', sampling='trilinear')
assert (xss1.values.mean() == xss2.values.mean()) |
def gen_function(func_attrs, problem_args_template, exec_cond_template, dim_info_dict, problem_args_template_cutlass_3x=None, extra_code=''):
input_ndims = len(func_attrs['input_accessors'][0].original_shapes)
weight_ndims = len(func_attrs['input_accessors'][1].original_shapes)
output_ndims = len(func_attrs['output_accessors'][0].original_shapes)
backend_spec = CUDASpec()
elem_input_type = backend_spec.dtype_to_lib_type(func_attrs['inputs'][0]._attrs['dtype'])
elem_output_type = backend_spec.dtype_to_lib_type(func_attrs['outputs'][0]._attrs['dtype'])
problem_args = problem_args_template.render(elem_input_type=elem_input_type, elem_output_type=elem_output_type)
problem_args_cutlass_3x = ''
if (problem_args_template_cutlass_3x is not None):
problem_args_cutlass_3x = problem_args_template_cutlass_3x.render(elem_input_type=elem_input_type, elem_output_type=elem_output_type)
extra_code_header = EXTRA_CODE_HEADER.render(elem_input_type=elem_input_type, elem_output_type=elem_output_type)
return common.gen_function(func_attrs=func_attrs, src_template=common_bias.SRC_TEMPLATE, exec_cond_template=exec_cond_template, problem_args=problem_args, problem_args_cutlass_3x=problem_args_cutlass_3x, input_ndims=input_ndims, weight_ndims=weight_ndims, output_ndims=output_ndims, dim_info_dict=dim_info_dict, support_split_k=True, output_addr_calculator=common.OUTPUT_ADDR_CALCULATOR.render(stride_dim='N', output_accessor=func_attrs['output_accessors'][0]), extra_code='\n\n'.join([extra_code_header, extra_code])) |
def answer(question):
if (not bool(question[8:(- 1)].strip().lower().split())):
raise ValueError('syntax error')
elif (not question.startswith('What is ')):
raise ValueError('unknown operation')
else:
words = question[8:(- 1)].strip().lower().split()
words.reverse()
try:
main_value = int(words.pop())
except ValueError as error:
raise ValueError('syntax error') from error
while words:
operation = [words.pop()]
while words:
try:
next_to_evaluate = words.pop()
second_value = int(next_to_evaluate)
break
except ValueError as error:
if (next_to_evaluate == operation[(- 1)]):
raise ValueError('syntax error') from error
else:
operation.append(next_to_evaluate)
else:
if ((operation[(- 1)] not in VALID_OPERATIONS) and (not operation[(- 1)].isdigit())):
raise ValueError('unknown operation')
else:
raise ValueError('syntax error')
operation = ' '.join(operation)
try:
main_value = VALID_OPERATIONS[operation](main_value, second_value)
except KeyError as error:
raise ValueError('syntax error') from error
return main_value |
def test_account_level_failure(client, download_test_data):
download_generation.retrieve_db_string = Mock(return_value=get_database_dsn_string())
resp = client.post('/api/v2/download/accounts/', content_type='application/json', data=json.dumps({'account_level': 'not_tas_or_fa', 'filters': {'submission_types': ['account_balances'], 'fy': '2017', 'quarter': '4'}, 'file_format': 'csv'}))
assert (resp.status_code == status.HTTP_400_BAD_REQUEST) |
def print_strategy(strategy: Dict[(str, Dict[(str, int)])]):
for (info_set, action_to_probabilities) in sorted(strategy.items()):
norm = sum(list(action_to_probabilities.values()))
tqdm.write(f'{info_set}')
for (action, probability) in action_to_probabilities.items():
tqdm.write(f' - {action}: {(probability / norm):.2f}') |
def strongly_connected_components(digraph):
index = [0]
stack_of_nodes = []
indizes = {}
lowlinks = {}
scc = []
def trajan(v):
indizes[v] = index[0]
lowlinks[v] = index[0]
index[0] += 1
stack_of_nodes.append(v)
for vl in v.outgoing:
if (vl not in indizes):
trajan(vl)
lowlinks[v] = min(lowlinks[v], lowlinks[vl])
elif (vl in stack_of_nodes):
lowlinks[v] = min(lowlinks[v], lowlinks[vl])
if (lowlinks[v] == indizes[v]):
new_scc = []
while stack_of_nodes:
vv = stack_of_nodes.pop()
new_scc.append(vv)
if (vv == v):
break
scc.append(new_scc)
for v in digraph.nodes:
if (v not in indizes):
trajan(v)
return scc |
class gemm(Operator):
def __init__(self):
super().__init__()
self._attrs['op'] = 'gemm'
self._attrs['has_profiler'] = True
self._attrs['f_ab_alignment'] = None
self._attrs['epilogue_alignment'] = 1
self._attrs['epilogue'] = 'LinearCombination'
self._attrs['workspace'] = 0
self._attrs['split_k'] = 1
self._attrs['num_sources'] = 0
self._attrs['alpha'] = 1.0
self._attrs['permute_shape'] = ''
self.exec_cond_template = EXEC_COND_TEMPLATE
def _extract_epilogue_alignment(self, output_shape: List[Any], dynamic_profiling_strategy=None) -> None:
epilogue_dim = output_shape[(- 1)]
if isinstance(epilogue_dim, int):
shape = epilogue_dim
elif (not isinstance(epilogue_dim, IntImm)):
if (dynamic_profiling_strategy is None):
return
elif (dynamic_profiling_strategy == DynamicProfileStrategy.MAX):
shape = epilogue_dim.upper_bound()
elif (dynamic_profiling_strategy == DynamicProfileStrategy.MIN):
shape = epilogue_dim.lower_bound()
else:
raise RuntimeError(f'Unsupported dynamic profiling strategy: {dynamic_profiling_strategy}')
else:
shape = epilogue_dim._attrs['values'][0]
dtype = self._attrs['inputs'][0].dtype()
self._attrs['epilogue_alignment'] = alignment.find_max_alignment(shape, dtype)
def _infer_shapes(self, a: Tensor, b: Tensor):
raise NotImplementedError('_infer_shapes() is not implemented!')
def _gen_exec_key(self, name_value_mapping):
key_strs = []
for (name, values) in name_value_mapping.items():
if (len(values) == 1):
key_strs.append(f'{name} == {values[0]}')
elif (len(values) > 1):
key_strs.append(f'{name} >= {values[0]} && {name} <= {values[(- 1)]}')
else:
raise RuntimeError('Gemm input has empty dim values: {}'.format(values))
return ' && '.join(key_strs)
def _extract_dims(self, for_profiling: bool=False) -> Dict[(str, List[DimInfo])]:
raise NotImplementedError('extract_dims() is not implemented!')
def _extract_exec_path(self, dynamic_profiling_strategy):
dim_info_dict: Dict[(str, List[DimInfo])] = self._extract_dims()
dim_dict: Dict[(str, List[IntVar])] = {}
for (name, dim_infos) in dim_info_dict.items():
dim_info = None
for d in dim_infos:
if d.placeholder:
continue
if (dim_info is None):
dim_info = d
elif (d.source == Source.INPUT):
dim_info = d
assert (dim_info is not None), f"Couldn't find valid dim info for dim {name}"
tensor_list = (self._attrs['inputs'] if (dim_info.source == Source.INPUT) else self._attrs['outputs'])
if ((dim_info.source == Source.INPUT) and ('input_accessors' in self._attrs)):
dim_dict[name] = _to_list(itemgetter(*dim_info.dim_idx)(self._attrs['input_accessors'][dim_info.tensor_idx].original_shapes))
elif ((dim_info.source == Source.OUTPUT) and ('output_accessors' in self._attrs)):
dim_dict[name] = _to_list(itemgetter(*dim_info.dim_idx)(self._attrs['output_accessors'][dim_info.tensor_idx].original_shapes))
else:
dim_dict[name] = _to_list(itemgetter(*dim_info.dim_idx)(tensor_list[dim_info.tensor_idx]._attrs['shape']))
shape_values_dict = {}
for (name, dims) in dim_dict.items():
min_value = math.prod([dim.lower_bound() for dim in dims])
max_value = math.prod([dim.upper_bound() for dim in dims])
shape_values_dict[name] = sorted({min_value, max_value})
self._attrs['exec_path'] = OrderedDict()
if (dynamic_profiling_strategy == DynamicProfileStrategy.MAX):
max_values = {name: [max(shape_values)] for (name, shape_values) in shape_values_dict.items()}
exec_item = ExecItem(profiling_key=self._gen_exec_key(max_values), exec_cond=self._gen_exec_key(shape_values_dict), algo='')
self._attrs['exec_path'][exec_item.profiling_key] = exec_item
elif (dynamic_profiling_strategy == DynamicProfileStrategy.MIN):
min_values = {name: [min(shape_values)] for (name, shape_values) in shape_values_dict.items()}
exec_item = ExecItem(profiling_key=self._gen_exec_key(min_values), exec_cond=self._gen_exec_key(shape_values_dict), algo='')
self._attrs['exec_path'][exec_item.profiling_key] = exec_item
else:
raise NotImplementedError('Gemm only supports MIN or MAX dynamic profiling! Current dynamic_profiling_strategy: {}'.format(dynamic_profiling_strategy))
def _get_profiler_filename(self):
target = backend.target.Target.current()
op_type = self._attrs['op']
all_op_names = list(self._attrs['op_instance'].keys())
encoded_str = sha1(';'.join(all_op_names).encode('utf-8')).hexdigest()
if target.use_dummy_profiling_results():
return f'{op_type}_{encoded_str}'
else:
cache_ver = target.get_profile_cache_version('gemm')
return f'{op_type}_{encoded_str}_{cache_ver}'
def _should_build_profiler(self, workloads: List[str], new_op_instance: OrderedDict):
if environ.force_profiler_cache():
return False
target = backend.target.Target.current()
build_profiler = True
if (not target.use_dummy_profiling_results()):
tmp_key = next(iter(new_op_instance.keys()))
tmp_op = new_op_instance[tmp_key]
build_profiler = False
for wkl in workloads:
exec_entry_sha1 = sha1(wkl.encode('utf-8')).hexdigest()
query = GemmQueryEntry(dtype_a=(tmp_op.A.element.value - 1), dtype_b=(tmp_op.B.element.value - 1), dtype_c=(tmp_op.C.element.value - 1), dtype_acc=(tmp_op.accumulator_type().value - 1), major_a=tmp_op.A.layout.value, major_b=tmp_op.B.layout.value, major_c=tmp_op.C.layout.value, op_type=self._attrs['op'], device=target._arch, epilogue=tmp_op.epilogue_functor.value, exec_entry_sha1=exec_entry_sha1, pshape=self._attrs['permute_shape'])
cache_value = target.query_profile_cache('gemm', query.__dict__)
if ((cache_value is not None) and (not target.force_profile())):
_LOGGER.info(f"Load profiling result for {self._attrs['name']} from cache: {cache_value}")
(best_algo, workspace, split_k) = cache_value
self._attrs['exec_path'][wkl].algo = best_algo
self._attrs['workspace'] = max(self._attrs['workspace'], workspace)
self._attrs['split_k'] = split_k
else:
build_profiler = True
return build_profiler
def gen_profiler(self, workdir: str=None, dynamic_profiling_strategy=DynamicProfileStrategy.MAX) -> None:
target = backend.target.Target.current()
func_key = '{target}.{op}.config'.format(target=target.name(), op=self._attrs['op'])
func = registry.get(func_key)
func(self._attrs, dtype=self._attrs['inputs'][0]._attrs['dtype'])
self._extract_exec_path(dynamic_profiling_strategy)
workloads = list(self._attrs['exec_path'].keys())
ab_alignments = sorted({self._get_ab_alignment(wkl) for wkl in workloads})
assert (1 == len(ab_alignments)), f'ab_alignments should be the same among all workloads, got ab_alignments={ab_alignments!r}'
func_key = '{target}.{op}.filter'.format(target=target.name(), op=self._attrs['op'])
if ('layout' not in self._attrs):
output_shape = self._attrs['output_accessors'][0].original_shapes
self._extract_epilogue_alignment(output_shape, dynamic_profiling_strategy)
if (not self._attrs['op_instance']):
raise RuntimeError(f"No GEMM op instances were generated for {self._attrs['op']}.")
filter_func = registry.get(func_key)
new_op_instance = OrderedDict(((k, v) for (k, v) in self._attrs['op_instance'].items() if filter_func(k, self._attrs, ab_alignments[0])))
_LOGGER.debug(f"Filtered profiler kernels for {self._attrs['op']}: reduced the number of generated kernels from {len(self._attrs['op_instance'])} to {len(new_op_instance)}")
self._attrs['op_instance'] = new_op_instance
if (not self._attrs['op_instance']):
raise RuntimeError(f"No GEMM op instances are left after filtering for {self._attrs['op']}. This is probably due to incompatible alignment requirements.")
build_profiler = self._should_build_profiler(workloads, new_op_instance)
if build_profiler:
func_key = '{target}.{op}.gen_profiler'.format(target=target.name(), op=self._attrs['op'])
func = registry.get(func_key)
profiler_filename = self._get_profiler_filename()
_LOGGER.info(f'generating profiler_filename={profiler_filename!r}')
return func(self._attrs, workdir, profiler_filename, self._extract_dims(for_profiling=True))
def _gen_profile_cmd(self, profiler_prefix, profiler_filename, exec_key, fbuild_cmd):
exe_path = os.path.join(profiler_prefix, profiler_filename)
if (not _check_with_retries(condition=(lambda : os.access(exe_path, os.X_OK)), max_attempts=3, delay_seconds=5)):
raise RuntimeError(('Profiler %s is not executable' % exe_path))
cmd_args = fbuild_cmd(exec_key)
cmd = [exe_path]
cmd.extend(cmd_args)
command = [str(x) for x in cmd]
if (self._attrs.get('shape') is not None):
if (backend.target.Target.current().name() == 'rocm'):
for x in self._attrs['shape']:
command.append(str(x))
return command
def _split_k_search_space(self, M, N, K):
space = [1]
if (backend.target.Target.current().name() == 'rocm'):
return set(space)
factor = (K // max(M, N))
low_range = max(1, (factor // 4))
high_range = min(factor, 32)
if (low_range == 1):
low_range += 1
space += list(range(low_range, high_range, 2))
_LOGGER.debug(f'profiling split-k for gemm instance M={M}, N={N}, K={K} in {set(space)}')
return set(space)
def _get_ab_alignment(self, exec_key):
if self._attrs['op'].startswith('group_gemm'):
(all_m, all_n, all_k) = group_gemm_inverse_key_func(exec_key)
all_ab_alignments = [self._attrs['f_ab_alignment'](int(m), int(n), int(k)) for (m, n, k) in zip(all_m, all_n, all_k)]
ab_alignment = min(all_ab_alignments)
else:
(m, n, k) = gemm_inverse_key_func(exec_key)[(- 3):]
ab_alignment = self._attrs['f_ab_alignment'](m, n, k)
if (not alignment.valid_alignment(ab_alignment, self._attrs['inputs'][0].dtype())):
raise RuntimeError(f'A / B ab_alignment={ab_alignment!r} is not valid! The last dimension of each input tensor needs to be divisible by 2.m: {m}, n: {n}, k: {k}.')
return ab_alignment
def _profile_single_workload(self, profiler_prefix, exec_key, profiler_runner, force_cache):
target = backend.target.Target.current()
tmp_key = next(iter(self._attrs['op_instance'].keys()))
tmp_op = self._attrs['op_instance'][tmp_key]
exec_entry_sha1 = sha1(exec_key.encode('utf-8')).hexdigest()
split_k = (1 if (self._attrs['split_k'] is None) else self._attrs['split_k'])
query = GemmQueryEntry(dtype_a=(tmp_op.A.element.value - 1), dtype_b=(tmp_op.B.element.value - 1), dtype_c=(tmp_op.C.element.value - 1), dtype_acc=(tmp_op.accumulator_type().value - 1), major_a=tmp_op.A.layout.value, major_b=tmp_op.B.layout.value, major_c=tmp_op.C.layout.value, op_type=self._attrs['op'], device=target._arch, epilogue=tmp_op.epilogue_functor.value, exec_entry_sha1=exec_entry_sha1, pshape=self._attrs['permute_shape'])
cache_value = target.query_profile_cache('gemm', query.__dict__)
if ((cache_value is not None) and (not target.force_profile())):
_LOGGER.debug(f"Load profiling result for {self._attrs['name']} from cache: {cache_value}")
self._attrs['exec_path'][exec_key].algo = cache_value[0]
self._attrs['workspace'] = max(self._attrs['workspace'], cache_value[1])
self._attrs['split_k'] = cache_value[2]
return
if ((cache_value is None) and force_cache):
op_type = self._attrs['op']
raise RuntimeError('force_cache is enabled but we could not find the following cache ', f'available on device target._arch={target._arch!r}, op_type={op_type!r}, exec_entry_sha1={exec_entry_sha1!r}')
if target.use_dummy_profiling_results():
op_type = self._attrs['op']
raise Exception('This is a CI run but we could not find the following cache ', f'''available on device {target._arch}
''', f'''{op_type} {exec_entry_sha1}.
''', 'Please adjust target.select_minimal_algo function.')
profiler_filename = self._get_profiler_filename()
def _gen_callback(split_k):
def process_result_callback(result, postprocessing_delegate):
postprocessing_delegate.add_instance((result, self._attrs, profiler_filename, exec_key, split_k))
return process_result_callback
command = self._gen_profile_cmd(profiler_prefix, profiler_filename, exec_key)
if (self._attrs['op'].startswith('group_gemm') or self._attrs['op'].startswith('bmm')):
profiler_runner.push(command, _gen_callback(split_k=1))
else:
(m, n, k) = gemm_inverse_key_func(exec_key)[(- 3):]
if ('split_k_hints' in self._attrs):
split_k_search_space = self._attrs['split_k_hints']
else:
split_k_search_space = self._split_k_search_space(m, n, k)
for split_k in split_k_search_space:
gemm_command = (command + [str(split_k)])
profiler_runner.push(gemm_command, _gen_callback(split_k))
def profile(self, profiler_runner, workdir='./'):
workloads = list(self._attrs['exec_path'].keys())
profiler_prefix = os.path.join(workdir, 'profiler', self._attrs['op'])
if ('op_instance' not in self._attrs):
target = backend.target.Target.current()
func_key = '{target}.{op}.config'.format(target=target.name(), op=self._attrs['op'])
func = registry.get(func_key)
func(self._attrs, dtype=self._attrs['inputs'][0]._attrs['dtype'])
target = backend.target.Target.current()
force_cache = environ.force_profiler_cache()
for wkl in workloads:
_LOGGER.info('Profile: {name}: {wkl}'.format(name=self._attrs['name'], wkl=wkl))
if (target.use_dummy_profiling_results() and (not force_cache)):
algo = target.select_minimal_algo(list(self._attrs['op_instance'].keys()))
_LOGGER.info(f'Select minimal algo {algo} for CI')
self._attrs['exec_path'][wkl].algo = algo
self._attrs['workspace'] = 102400
elif (self._attrs['exec_path'][wkl].algo != ''):
return
else:
self._profile_single_workload(profiler_prefix, wkl, profiler_runner, force_cache)
def gen_function(self) -> str:
target = backend.target.Target.current()
func_key = '{target}.{op}.gen_function'.format(target=target.name(), op=self._attrs['op'])
func = registry.get(func_key)
return func(self._attrs, self.exec_cond_template, self._extract_dims())
def _signature(self) -> str:
op_name = (self._attrs['op'] + ('split_' + str(self._attrs['split_k'])))
signature = sha1(op_name.encode('utf-8')).hexdigest()
return signature
def _align_ab(self, a: Tensor, b: Tensor):
return (a, b)
def _sanity_check(self, a: Tensor, b: Tensor):
a_shapes = a._attrs['shape']
if (len(a_shapes) < 2):
raise RuntimeError('gemm operand A should have >= 2 dimensions! Current shape: {}.'.format(a_shapes))
b_shapes = b._attrs['shape']
if (len(b_shapes) != 2):
raise RuntimeError('gemm operand B should have 2 dimensions! Current shape: {}.'.format(b_shapes))
if (not is_same_dtype(a.dtype(), b.dtype())):
raise RuntimeError('gemm operand A and B should have the same data type! Current A: {atype}, B: {btype}.'.format(atype=a.dtype(), btype=b.dtype()))
def __call__(self, a: Tensor, b: Tensor) -> Tensor:
(a, b) = self._align_ab(a, b)
self._attrs['inputs'] = [a, b]
self._attrs['input_accessors'] = [TensorAccessor(a), TensorAccessor(b)]
self._set_depth()
self._sanity_check(a, b)
output_shape = self._infer_shapes(a, b)
self._extract_epilogue_alignment(output_shape)
output = Tensor(output_shape, src_ops={self}, dtype=a.dtype())
self._attrs['outputs'] = [output]
self._attrs['output_accessors'] = [TensorAccessor(output)]
return output |
class OptionPlotoptionsPyramid3dDatalabelsFilter(Options):
def operator(self):
return self._config_get(None)
def operator(self, value: Any):
self._config(value, js_type=False)
def property(self):
return self._config_get(None)
def property(self, text: str):
self._config(text, js_type=False) |
class AMIClientAdapter(object):
def __init__(self, ami_client):
self._ami_client = ami_client
def _action(self, name, _callback=None, variables={}, **kwargs):
action = Action(name, kwargs)
action.variables = variables
return self._ami_client.send_action(action, _callback)
def __getattr__(self, item):
return partial(self._action, item) |
class CallbackData():
target: Any
feature: str
new_data: Any
callback_function: callable = None
def __eq__(self, other):
if (not isinstance(other, CallbackData)):
raise TypeError('Can only compare against other <CallbackData> types')
if (other.target is not self.target):
return False
if (not (other.feature == self.feature)):
return False
if (not (other.new_data == self.new_data)):
return False
if ((self.callback_function is None) and (other.callback_function is None)):
return True
if (other.callback_function is self.callback_function):
return True
else:
return False |
def create_dsr_policy(key: str):
policy_create_data = [{'name': key, 'key': key}]
url = f'{FIDESOPS_V1_API_URL}{ops_urls.POLICY_LIST}'
response = requests.patch(url, headers=oauth_header, json=policy_create_data)
if response.ok:
policies = response.json()['succeeded']
if (len(policies) > 0):
logger.info('Created fidesops policy with key={} via {}', key, url)
return response.json()
raise RuntimeError(f'fidesops policy creation failed! response.status_code={response.status_code}, response.json()={response.json()}') |
class OptionPlotoptionsScatter3dSonificationContexttracksMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OpenBridgeConfiguration(AbstractCrudObject):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isOpenBridgeConfiguration = True
super(OpenBridgeConfiguration, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
access_key = 'access_key'
active = 'active'
endpoint = 'endpoint'
fallback_domain = 'fallback_domain'
fallback_domain_enabled = 'fallback_domain_enabled'
host_business_id = 'host_business_id'
host_external_id = 'host_external_id'
id = 'id'
pixel_id = 'pixel_id'
def get_endpoint(cls):
return 'openbridge_configurations'
def api_create(self, parent_id, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.adobjects.business import Business
return Business(api=self._api, fbid=parent_id).create_open_bridge_configuration(fields, params, batch, success, failure, pending)
def api_delete(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=OpenBridgeConfiguration, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_update(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'access_key': 'string', 'active': 'bool', 'endpoint': 'string', 'fallback_domain': 'string', 'fallback_domain_enabled': 'bool', 'host_business_id': 'unsigned int', 'host_external_id': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=OpenBridgeConfiguration, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {'access_key': 'string', 'active': 'bool', 'endpoint': 'string', 'fallback_domain': 'string', 'fallback_domain_enabled': 'bool', 'host_business_id': 'string', 'host_external_id': 'string', 'id': 'string', 'pixel_id': 'string'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
def test_deepcopy_memo():
provider = providers.Factory(object)
injection = providers.NamedInjection('name', provider)
injection_copy_orig = providers.NamedInjection('name', provider)
injection_copy = providers.deepcopy(injection, {id(injection): injection_copy_orig})
assert (injection_copy is injection_copy_orig)
assert (injection_copy.get_original_value() is injection.get_original_value()) |
def test_keepalive_after_agent_exit(isolated_client):
_client('virtualenv')
def regular_function():
return 42
assert (regular_function() == 42)
with suppress(BaseException):
_client('virtualenv')
def process_crash_without_catching():
import os
os._exit(0)
process_crash_without_catching()
_client('virtualenv')
def regular_function():
return 42
assert (regular_function() == 42) |
class WCAG21Contrast(ColorContrast):
NAME = 'wcag21'
def contrast(self, color1: 'Color', color2: 'Color', **kwargs: Any) -> float:
lum1 = max(0, color1.luminance())
lum2 = max(0, color2.luminance())
if (lum1 > lum2):
(lum1, lum2) = (lum2, lum1)
return ((lum2 + 0.05) / (lum1 + 0.05)) |
def patch_info_in_place(fpatch):
patch_size = file_size(fpatch)
(compression, memory_size, segment_size, shift_size, from_size, to_size) = read_header_in_place(fpatch)
segments = []
patch_reader = None
if (to_size > 0):
patch_reader = PatchReader(fpatch, compression)
for to_pos in range(0, to_size, segment_size):
segment_to_size = min(segment_size, (to_size - to_pos))
dfpatch_size = unpack_size(patch_reader)
if (dfpatch_size > 0):
data_format = unpack_size(patch_reader)
data_format = data_format_number_to_string(data_format)
patch_reader.decompress(dfpatch_size)
else:
data_format = None
info = patch_info_sequential_inner(patch_reader, segment_to_size)
segments.append((dfpatch_size, data_format, info))
return (patch_size, compression, _compression_info(patch_reader), memory_size, segment_size, shift_size, from_size, to_size, segments) |
def deploy(w3, contract_factory, apply_func=identity, args=None):
args = (args or [])
deploy_txn = contract_factory.constructor(*args).transact()
deploy_receipt = w3.eth.wait_for_transaction_receipt(deploy_txn)
assert (deploy_receipt is not None)
address = apply_func(deploy_receipt['contractAddress'])
contract = contract_factory(address=address)
assert (contract.address == address)
assert (len(w3.eth.get_code(contract.address)) > 0)
return contract |
def match_ethernet_src_address(self, of_ports, priority=None):
pkt_MatchSrc = simple_eth_packet(eth_src='00:01:01:01:01:01')
match = parse.packet_to_flow_match(pkt_MatchSrc)
self.assertTrue((match is not None), 'Could not generate flow match from pkt')
match.wildcards = (ofp.OFPFW_ALL ^ ofp.OFPFW_DL_SRC)
match_send_flowadd(self, match, priority, of_ports[1])
return (pkt_MatchSrc, match) |
class BuildrootOverrideMessage(BodhiMessage):
def build(self) -> BuildV1:
return BuildV1(self.body['override']['nvr'])
def submitter(self) -> UserV1:
return UserV1(self.body['override']['submitter']['name'])
def summary(self) -> str:
return self._summary_tmpl.format(submitter=self.submitter.name, build=self.build.nvr)
def __str__(self) -> str:
return self.summary
def url(self) -> str:
return f'
def packages(self) -> typing.Iterable[str]:
return [self.build.package]
def agent_name(self) -> str:
return self.submitter.name
def usernames(self) -> typing.List[str]:
return [self.agent_name] |
class FaqTypeListPost(ResourceList):
def before_post(self, args, kwargs, data):
require_relationship(['event'], data)
if (not has_access('is_coorganizer', event_id=data['event'])):
raise ForbiddenError({'source': ''}, 'Co-organizer access is required.')
methods = ['POST']
schema = FaqTypeSchema
data_layer = {'session': db.session, 'model': FaqType} |
class InstanceEditorDemo(HasTraits):
sample_instance = Instance(SampleClass, ())
inst_group = Group(Item('sample_instance', style='simple', label='Simple', id='simple'), Item('_'), Item('sample_instance', style='custom', label='Custom', id='custom'), Item('_'), Item('sample_instance', style='text', label='Text'), Item('_'), Item('sample_instance', style='readonly', label='ReadOnly'))
traits_view = View(inst_group, title='InstanceEditor', buttons=['OK'], resizable=True) |
def test_custom_specs(app, cli_runner):
endpoint = 'custom_endpoint'
config = dict(Swagger.DEFAULT_CONFIG)
config['specs'][0]['endpoint'] = endpoint
swagger = Swagger(app, config=config)
result = cli_runner.invoke(generate_api_schema, ['-e', endpoint])
assert (result.exit_code == 0)
spec = json.loads(result.output)
assert (spec == swagger.get_apispecs(endpoint)) |
(max_examples=250)
(string_value=st.one_of(st.none(), st.text(min_size=0, max_size=256), st.binary(min_size=0, max_size=256)))
def test_encode_text_string(string_value):
encoder = TextStringEncoder()
if (not is_text(string_value)):
with pytest.raises(EncodingTypeError) as exception_info:
encoder(string_value)
assert ('TextStringEncoder' in str(exception_info.value))
return
string_value_as_bytes = codecs.encode(string_value, 'utf8')
expected_value = ((encode_uint_256(len(string_value_as_bytes)) + (zpad_right(string_value_as_bytes, ceil32(len(string_value_as_bytes))) if string_value else b'')) if string_value else (b'\x00' * 32))
encoded_value = encoder(string_value)
assert (encoded_value == expected_value) |
def item_create(item_model: CollectionItemIn, collection: models.Collection, validate_etag: bool):
etag = item_model.etag
revision_data = item_model.content
uid = item_model.uid
Model = models.CollectionItem
with transaction.atomic():
(instance, created) = Model.objects.get_or_create(uid=uid, collection=collection, defaults=item_model.dict(exclude={'uid', 'etag', 'content'}))
cur_etag = (instance.etag if (not created) else None)
if (cur_etag == revision_data.uid):
return instance
if (validate_etag and (cur_etag != etag)):
raise ValidationError('wrong_etag', 'Wrong etag. Expected {} got {}'.format(cur_etag, etag), status_code=status.HTTP_409_CONFLICT, field=uid)
if (not created):
current_revision = instance.revisions.filter(current=True).select_for_update()[0]
assert (current_revision is not None)
current_revision.current = None
current_revision.save()
try:
process_revisions_for_item(instance, revision_data)
except django_exceptions.ValidationError as e:
transform_validation_error('content', e)
return instance |
def print_error(failedCall, jsErrorline, jsStackTrace, jsErrorMessage, pyErrorline, pyStacktrace):
lines = []
log = (lambda *s: lines.append(' '.join(s)))
log('', chalk.bold(chalk.bgred(' JavaScript Error ')), f"Call to '{failedCall.replace('~~', '')}' failed:")
for (at, line) in pyStacktrace:
if (('javascript' in at) or ('IPython' in at)):
continue
if (not line):
log(' ', chalk.gray(at))
else:
log(chalk.gray('>'), format_line(line))
log(' ', chalk.gray(at))
log(chalk.gray('>'), format_line(pyErrorline))
log('\n... across the bridge ...\n')
for traceline in reversed(jsStackTrace):
log(' ', chalk.gray(traceline))
log(chalk.gray('>'), format_line(jsErrorline))
log('', chalk.bold(jsErrorMessage))
return lines |
class TestLabels(TestCase):
def check(self, saved=False):
script = self.script
e = script.engine
scene = e.current_scene
src = scene.children[0]
mm = src.children[0]
l = mm.children[1]
if (not saved):
np = l.visible_points.get_output_dataset().number_of_points
assert ((np < 35) and (np > 20))
l.visible_points.enabled = True
l.mapper.label_mode = 'label_scalars'
l.label_format = '%.1f'
l.number_of_labels = 45
l.property.color = (0, 0, 0)
l.property.italic = False
np = l.visible_points.get_output_dataset().number_of_points
assert ((np < 60) and (np > 25))
assert l.visible_points.enabled
assert (l.visible_points.get_output_dataset() == l.visible_points.filter.filter.output)
assert (l.property.color == (0, 0, 0))
assert (not l.property.italic)
assert (l.mapper.label_mode == 'label_scalars')
assert (l.label_format == '%.1f')
def test(self):
self.main()
def do(self):
from mayavi.modules.api import ScalarCutPlane
from mayavi.modules.labels import Labels
from mayavi.sources.vtk_xml_file_reader import VTKXMLFileReader
s = self.new_scene()
script = mayavi = self.script
r = VTKXMLFileReader()
r.initialize(get_example_data('fire_ug.vtu'))
script.add_source(r)
cp = ScalarCutPlane()
script.add_module(cp)
l = Labels(object=cp)
script.add_module(l)
s.scene.isometric_view()
GUI.process_events()
self.check(saved=False)
f = BytesIO()
f.name = abspath('test.mv2')
script.save_visualization(f)
f.seek(0)
engine = script.engine
engine.close_scene(s)
script.load_visualization(f)
s = engine.current_scene
s.scene.isometric_view()
s.children[0].pipeline_changed = True
GUI.process_events()
self.check(saved=True)
source = s.children.pop()
s.children.append(source)
GUI.process_events()
s.scene.isometric_view()
self.check(saved=True)
source1 = copy.deepcopy(source)
s.children[0] = source1
GUI.process_events()
s.scene.isometric_view()
self.check(saved=True)
GUI.process_events() |
def extractRaedmoreCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class UnderSample(elmdptt.TaskOneToOne):
label_column = luigi.Parameter()
random_seed = luigi.IntParameter()
under_sampling_type = luigi.EnumParameter(enum=elmdpenum.OverSamplingTypes)
def actual_task_code(self, df: pd.DataFrame):
rseed = None
under_sampler = None
if (self.random_seed != (- 1)):
rseed = self.random_seed
if (self.under_sampling_type is elmdpenum.UnderSamplingTypes.RANDOM_UNDER_SAMPLING):
under_sampler = imbl_us.RandomUnderSampler(random_state=rseed)
elif (self.under_sampling_type is elmdpenum.UnderSamplingTypes.TOMEK_LINKS):
under_sampler = imbl_us.TomekLinks(random_state=rseed)
elif (self.under_sampling_type is elmdpenum.UnderSamplingTypes.NEIGHBOURHOOD_CLEANING_RULE):
under_sampler = imbl_us.NeighbourhoodCleaningRule(random_state=rseed)
elif (self.under_sampling_type is elmdpenum.UnderSamplingTypes.CONDENSED_NEAREST_NEIGHBOUR):
under_sampler = imbl_us.CondensedNearestNeighbour(random_state=rseed)
elif (self.under_sampling_type is elmdpenum.UnderSamplingTypes.EDITED_NEAREST_NEIGHBOUR):
under_sampler = imbl_us.EditedNearestNeighbours(random_state=rseed)
elif (self.under_sampling_type is elmdpenum.UnderSamplingTypes.REPEATED_EDITED_NEAREST_NEIGHBOUR):
under_sampler = imbl_us.RepeatedEditedNearestNeighbours(random_state=rseed)
elif (self.under_sampling_type is elmdpenum.UnderSamplingTypes.ALL_KNN):
under_sampler = imbl_us.AllKNN(random_state=rseed)
elif (self.under_sampling_type is elmdpenum.UnderSamplingTypes.CLUSTER_CENTROIDS_SOFT_VOTING):
under_sampler = imbl_us.ClusterCentroids(random_state=rseed)
elif (self.under_sampling_type is elmdpenum.UnderSamplingTypes.CLUSTER_CENTROIDS_HARD_VOTING):
under_sampler = imbl_us.ClusterCentroids(random_state=rseed, voting='hard')
elif (self.under_sampling_type is elmdpenum.UnderSamplingTypes.NEARMISS_V1):
under_sampler = imbl_us.NearMiss(random_state=rseed, version=1)
elif (self.under_sampling_type is elmdpenum.UnderSamplingTypes.NEARMISS_V2):
under_sampler = imbl_us.NearMiss(random_state=rseed, version=2)
elif (self.under_sampling_type is elmdpenum.UnderSamplingTypes.NEARMISS_V3):
under_sampler = imbl_us.NearMiss(random_state=rseed, version=3)
elif (self.under_sampling_type is elmdpenum.UnderSamplingTypes.ONE_SIDED_SELECTION):
under_sampler = imbl_us.OneSidedSelection(random_state=rseed)
(x, y) = under_sampler.fit_sample(df.drop(self.label_column, axis=1), df[self.label_column])
x[self.label_column] = y
return x |
def set_ends(table):
need_end_idx = (table.end == (- 1))
if need_end_idx.any():
ref_sz = table.loc[(need_end_idx, 'ref')].str.len()
alt_sz = table.loc[(need_end_idx, 'alt')].str.len()
var_sz = (alt_sz - ref_sz)
var_sz = var_sz.clip(lower=0)
table.loc[(need_end_idx, 'end')] = (table.loc[(need_end_idx, 'start')] + var_sz) |
def iter_model_data_with_reset_instruction_iterable(model_data_or_instruction_iterable: Iterable[Union[(LayoutModelData, ExtractInstruction)]]) -> Iterable[Union[(LayoutModelData, ExtractInstruction)]]:
extractor = NameSemanticExtractor()
model_data_or_instruction_list = list(model_data_or_instruction_iterable)
entity_tokens = iter_entity_layout_blocks_for_labeled_layout_tokens([LabeledLayoutToken(label=(get_model_data_label(model_data) or ''), layout_token=model_data.layout_token) for model_data in model_data_or_instruction_list if (isinstance(model_data, LayoutModelData) and (model_data.layout_token is not None))])
LOGGER.debug('entity_tokens: %r', entity_tokens)
reset_token_ids: Set[int] = set()
for (index, semantic_content) in enumerate(extractor.iter_semantic_content_for_entity_blocks(entity_tokens=entity_tokens, name_type=SemanticAuthor)):
if (index == 0):
continue
for semantic_token in semantic_content.iter_tokens():
reset_token_ids.add(id(semantic_token))
break
for model_data_or_instruction in model_data_or_instruction_list:
if isinstance(model_data_or_instruction, LayoutModelData):
model_data = model_data_or_instruction
if (id(model_data.layout_token) in reset_token_ids):
(yield ResetExtractInstruction(ROOT_TRAINING_XML_ELEMENT_PATH[:(- 1)]))
(yield model_data_or_instruction) |
def chunk_sequence(data, indices, *, names=None, max_length=100, min_length=1, max_delay_s=None, max_inter_dist=None, max_total_dist=None):
sort_array = data.get('capture_time', data.get('index'))
if (sort_array is None):
sort_array = (indices if (names is None) else names)
indices = sorted(indices, key=(lambda i: sort_array[i].tolist()))
centers = torch.stack([data['t_c2w'][i][:2] for i in indices]).numpy()
dists = np.linalg.norm(np.diff(centers, axis=0), axis=(- 1))
if ('capture_time' in data):
times = torch.stack([data['capture_time'][i] for i in indices])
times = (times.double() / 1000.0)
delays = np.diff(times, axis=0)
else:
delays = np.zeros_like(dists)
chunks = [[indices[0]]]
dist_total = 0
for (dist, delay, idx) in zip(dists, delays, indices[1:]):
dist_total += dist
if (((max_inter_dist is not None) and (dist > max_inter_dist)) or ((max_total_dist is not None) and (dist_total > max_total_dist)) or ((max_delay_s is not None) and (delay > max_delay_s)) or (len(chunks[(- 1)]) >= max_length)):
chunks.append([])
dist_total = 0
chunks[(- 1)].append(idx)
chunks = list(filter((lambda c: (len(c) >= min_length)), chunks))
chunks = sorted(chunks, key=len, reverse=True)
return chunks |
class Ingest(object):
_parent_service = None
readings_storage_async = None
storage_async = None
_readings_stats = 0
_discarded_readings_stats = 0
_sensor_stats = {}
_stop = False
_started = False
_readings_lists = None
_current_readings_list_index = 0
_insert_readings_tasks = None
_readings_list_batch_size_reached = None
_readings_list_not_empty = None
_readings_lists_not_full = None
_insert_readings_wait_tasks = None
_last_insert_time = 0
_readings_list_size = 0
_readings_buffer_size = 4096
_max_concurrent_readings_inserts = 4
_readings_insert_batch_size = 1024
_readings_insert_batch_timeout_seconds = 1
_max_readings_insert_batch_connection_idle_seconds = 60
_max_readings_insert_batch_reconnect_wait_seconds = 10
_payload_events = []
stats = None
async def _read_config(cls):
category = '{}Advanced'.format(cls._parent_service._name)
default_config = {'readings_buffer_size': {'description': 'Maximum number of readings to buffer in memory', 'displayName': 'Buffer Size', 'type': 'integer', 'default': str(cls._readings_buffer_size)}, 'max_concurrent_readings_inserts': {'description': 'Maximum number of concurrent processes that send batches of readings to storage', 'displayName': 'Max Concurrent Inserts', 'type': 'integer', 'default': str(cls._max_concurrent_readings_inserts)}, 'readings_insert_batch_size': {'description': 'Maximum number of readings in a batch of inserts', 'displayName': 'Batch Size Per Queue', 'type': 'integer', 'default': str(cls._readings_insert_batch_size)}, 'readings_insert_batch_timeout_seconds': {'description': 'Number of seconds to wait for a readings list to reach the minimum batch size', 'displayName': 'Batch Timeout', 'type': 'integer', 'default': str(cls._readings_insert_batch_timeout_seconds)}, 'max_readings_insert_batch_connection_idle_seconds': {'description': 'Close storage connections used to insert readings when idle for this number of seconds', 'displayName': 'Max Idle Time To Close Connection', 'type': 'integer', 'default': str(cls._max_readings_insert_batch_connection_idle_seconds)}, 'max_readings_insert_batch_reconnect_wait_seconds': {'description': 'Maximum number of seconds to wait before reconnecting to storage when inserting readings', 'displayName': 'Max Batch Reconnect Wait Time', 'type': 'integer', 'default': str(cls._max_readings_insert_batch_reconnect_wait_seconds)}}
config_payload = json.dumps({'key': category, 'description': '{} South Service Ingest configuration'.format(cls._parent_service._name), 'value': default_config, 'keep_original_items': True})
cls._parent_service._core_microservice_management_client.create_configuration_category(config_payload)
if ('filter' in cls._parent_service.config):
_LOGGER.warning('South Service [%s] does not support the use of a filter pipeline.', cls._parent_service._name)
config = cls._parent_service._core_microservice_management_client.get_configuration_category(category_name=category)
cls._parent_service._core_microservice_management_client.create_child_category(parent=cls._parent_service._name, children=[category])
cls._readings_buffer_size = int(config['readings_buffer_size']['value'])
cls._max_concurrent_readings_inserts = int(config['max_concurrent_readings_inserts']['value'])
cls._readings_insert_batch_size = int(config['readings_insert_batch_size']['value'])
cls._readings_insert_batch_timeout_seconds = int(config['readings_insert_batch_timeout_seconds']['value'])
cls._max_readings_insert_batch_connection_idle_seconds = int(config['max_readings_insert_batch_connection_idle_seconds']['value'])
cls._max_readings_insert_batch_reconnect_wait_seconds = int(config['max_readings_insert_batch_reconnect_wait_seconds']['value'])
cls._payload_events = []
async def start(cls, parent):
if cls._started:
return
cls._parent_service = parent
cls.readings_storage_async = cls._parent_service._readings_storage_async
cls.storage_async = cls._parent_service._storage_async
(await cls._read_config())
cls._readings_insert_batch_size = (1024 if (not cls._readings_insert_batch_size) else cls._readings_insert_batch_size)
cls._max_concurrent_readings_inserts = (4 if (not cls._max_concurrent_readings_inserts) else cls._max_concurrent_readings_inserts)
cls._readings_list_size = int((cls._readings_buffer_size / cls._max_concurrent_readings_inserts))
if (cls._readings_list_size < cls._readings_insert_batch_size):
cls._readings_list_size = cls._readings_insert_batch_size
_LOGGER.warning('Readings buffer size as configured (%s) is too small; increasing to %s', cls._readings_buffer_size, (cls._readings_list_size * cls._max_concurrent_readings_inserts))
cls._last_insert_time = 0
cls._insert_readings_wait_tasks = []
cls._readings_list_batch_size_reached = []
cls._readings_list_not_empty = []
cls._readings_lists = []
for _ in range(cls._max_concurrent_readings_inserts):
cls._readings_lists.append([])
cls._insert_readings_wait_tasks.append(None)
cls._readings_list_batch_size_reached.append(asyncio.Event())
cls._readings_list_not_empty.append(asyncio.Event())
cls._insert_readings_task = asyncio.ensure_future(cls._insert_readings())
cls._readings_lists_not_full = asyncio.Event()
cls._payload_events = cls._parent_service._core_microservice_management_client.get_asset_tracker_events()['track']
cls.stats = (await statistics.create_statistics(cls.storage_async))
(await cls.stats.register('READINGS', 'Readings received by Fledge'))
(await cls.stats.register('DISCARDED', 'Readings discarded at the input side by Fledge, i.e. discarded before being placed in the buffer. This may be due to some error in the readings themselves.'))
cls._stop = False
cls._started = True
async def stop(cls):
if (cls._stop or (not cls._started)):
return
cls._stop = True
for task in cls._insert_readings_wait_tasks:
if (task is not None):
try:
task.cancel()
except asyncio.CancelledError:
pass
try:
(await cls._insert_readings_task)
cls._insert_readings_task = None
except Exception:
_LOGGER.exception('An exception was raised by Ingest._insert_readings')
cls._insert_readings_wait_tasks = None
cls._insert_readings_tasks = None
cls._readings_lists = None
cls._readings_list_batch_size_reached = None
cls._readings_list_not_empty = None
cls._readings_lists_not_full = None
cls._started = False
def increment_discarded_readings(cls):
cls._discarded_readings_stats += 1
async def _insert_readings(cls):
_LOGGER.info('Insert readings loop started')
list_index = 0
while (list_index <= (cls._max_concurrent_readings_inserts - 1)):
if cls._stop:
readings = 0
for i in range(cls._max_concurrent_readings_inserts):
readings += len(cls._readings_lists[i])
if ((cls._discarded_readings_stats + readings) == 0):
break
list_index += 1
if (list_index > (cls._max_concurrent_readings_inserts - 1)):
list_index = 0
readings_list = cls._readings_lists[list_index]
min_readings_reached = cls._readings_list_batch_size_reached[list_index]
lists_not_full = cls._readings_lists_not_full
while (not cls._stop):
if (len(readings_list) >= cls._readings_insert_batch_size):
break
min_readings_reached.clear()
waiter = asyncio.ensure_future(min_readings_reached.wait())
cls._insert_readings_wait_tasks[list_index] = waiter
try:
(await asyncio.wait_for(waiter, cls._readings_insert_batch_timeout_seconds))
except asyncio.CancelledError:
break
except asyncio.TimeoutError:
break
finally:
cls._insert_readings_wait_tasks[list_index] = None
if (not len(readings_list)):
continue
if ((not cls._stop) and (len(readings_list) < cls._readings_insert_batch_size) and ((time.time() - cls._last_insert_time) < cls._readings_insert_batch_timeout_seconds)):
continue
attempt = 0
cls._last_insert_time = time.time()
while True:
try:
batch_size = len(readings_list)
payload = json.dumps({'readings': readings_list[:batch_size]})
try:
(await cls.readings_storage_async.append(payload))
cls._readings_stats += batch_size
except StorageServerError as ex:
err_response = ex.error
if err_response['retryable']:
_LOGGER.warning('Got %s error, retrying ...', err_response['source'])
raise
else:
_LOGGER.error('%s, %s', err_response['source'], err_response['message'])
batch_size = len(readings_list)
cls._discarded_readings_stats += batch_size
break
except Exception as ex:
attempt += 1
_LOGGER.exception(ex, 'Insert failed on attempt #{}, list index: {}'.format(attempt, list_index))
if (cls._stop or (attempt >= _MAX_ATTEMPTS)):
batch_size = len(readings_list)
cls._discarded_readings_stats += batch_size
_LOGGER.warning('Insert failed: Queue index: %s Batch size: %s', list_index, batch_size)
break
(await cls._write_statistics())
del readings_list[:batch_size]
if (not lists_not_full.is_set()):
lists_not_full.set()
_LOGGER.info('Insert readings loop stopped')
async def _write_statistics(cls):
updates = {}
readings = cls._readings_stats
cls._readings_stats -= readings
updates.update({'READINGS': readings})
discarded_readings = cls._discarded_readings_stats
cls._discarded_readings_stats -= discarded_readings
updates.update({'DISCARDED': discarded_readings})
sensor_readings = cls._sensor_stats.copy()
for key in sensor_readings:
description = 'Readings received by Fledge since startup for sensor {}'.format(key)
(await cls.stats.register(key, description))
cls._sensor_stats[key] -= sensor_readings[key]
updates.update({key: sensor_readings[key]})
try:
(await cls.stats.update_bulk(updates))
except Exception as ex:
cls._readings_stats += readings
cls._discarded_readings_stats += discarded_readings
for key in sensor_readings:
cls._sensor_stats[key] += sensor_readings[key]
_LOGGER.exception(ex, 'An error occurred while writing sensor statistics')
def is_available(cls) -> bool:
if cls._stop:
return False
list_index = cls._current_readings_list_index
if (len(cls._readings_lists[list_index]) < cls._readings_list_size):
return True
if (cls._max_concurrent_readings_inserts > 1):
for list_index in range(cls._max_concurrent_readings_inserts):
if (len(cls._readings_lists[list_index]) < cls._readings_list_size):
cls._current_readings_list_index = list_index
return True
_LOGGER.warning('The ingest service is unavailable %s', list_index)
return False
async def add_readings(cls, asset: str, timestamp: Union[(str, datetime.datetime)], readings: dict=None) -> None:
if cls._stop:
_LOGGER.warning('The South Service is stopping')
return
if (not cls._started):
raise RuntimeError('The South Service was not started')
try:
if (asset is None):
raise ValueError('asset can not be None')
if (not isinstance(asset, str)):
raise TypeError('asset must be a string')
if (timestamp is None):
raise ValueError('timestamp can not be None')
if (readings is None):
readings = dict()
elif (not isinstance(readings, dict)):
raise TypeError('readings must be a dictionary')
except Exception:
cls.increment_discarded_readings()
raise
if (not cls.is_available()):
cls.increment_discarded_readings()
return
list_index = cls._current_readings_list_index
readings_list = cls._readings_lists[list_index]
read = dict()
read['asset_code'] = asset
read['reading'] = readings
read['user_ts'] = timestamp
readings_list.append(read)
list_size = len(readings_list)
if (asset.upper() in cls._sensor_stats):
cls._sensor_stats[asset.upper()] += 1
else:
cls._sensor_stats[asset.upper()] = 1
payload = {'asset': asset, 'event': 'Ingest', 'service': cls._parent_service._name, 'plugin': cls._parent_service._plugin_info['config']['plugin']['default']}
if (payload not in cls._payload_events):
cls._parent_service._core_microservice_management_client.create_asset_tracker_event(payload)
cls._payload_events.append(payload)
if (list_size == 1):
cls._readings_list_not_empty[list_index].set()
if (list_size == cls._readings_insert_batch_size):
cls._readings_list_batch_size_reached[list_index].set()
if ((cls._max_concurrent_readings_inserts > 1) and (list_size >= cls._readings_insert_batch_size)):
for list_index in range(cls._max_concurrent_readings_inserts):
if (len(cls._readings_lists[list_index]) < cls._readings_insert_batch_size):
cls._current_readings_list_index = list_index
break |
class TestParsingBenchmarks():
def test_all_candidates(self):
pit_id = pit_id_parsing_candidate_runner_parse(self.small_page.encode())
assert (pit_id == 'fedcba')
sort = sort_parsing_candidate_reverse_and_regexp(self.small_page)
assert (sort == [, '2'])
sort = sort_parsing_candidate_rfind_and_regexp(self.large_page)
assert (sort == [, '2'])
sort = sort_parsing_candidate_end_anchor_regexp(self.small_page)
assert (sort == [, '2'])
sort = sort_parsing_candidate_find_all(self.large_page)
assert (sort == [, '2'])
pit_id = pit_id_parsing_candidate_regexp(self.large_page)
assert (pit_id == 'fedcba')
(pit_id, sort) = combined_parsing_candidate_json_loads(self.small_page)
assert (sort == [, '2'])
assert (pit_id == 'fedcba')
small_page = '\n {\n "pit_id": "fedcba",\n "took": 10,\n "timed_out": false,\n "hits": {\n "total": 2,\n "hits": [\n {\n "_id": "1",\n "timestamp": ,\n "sort": [, "1"]\n },\n {\n "_id": "2",\n "timestamp": ,\n "sort": [, "2"]\n }\n ]\n }\n }\n '.replace('\n', '').replace(' ', '')
large_page = (('\n {\n "pit_id": "fedcba",\n "took": 10,\n "timed_out": false,\n "hits": {\n "total": 2,\n "hits": [' + ('\n {\n "_id": "1",\n "timestamp": ,\n "sort": [, "1"]\n },' * 100)) + '\n {\n "_id": "2",\n "timestamp": ,\n "sort": [, "2"]\n }\n ]\n }\n }\n ').replace('\n', '').replace(' ', '') |
.skip('These tests take a very long time to compute')
.parametrize('mode', [0, 1, 6])
def test_fwd(mode):
with set_double_precision():
x = torch.randn(1, 3, 16, 16, device=dev, requires_grad=True)
xfm = DWTForward(J=2).to(dev)
input = (x, xfm.h0_row, xfm.h1_row, xfm.h0_col, xfm.h1_col, mode)
gradcheck(AFB2D.apply, input, eps=EPS, atol=ATOL) |
class OptionPlotoptionsPyramidSonificationTracksMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.external
.skipif((has_openai_key is False), reason='OpenAI API key not available')
.parametrize('task', ['binary', 'multilabel_nonexcl', 'multilabel_excl'])
.parametrize('cfg_string', ['zeroshot_cfg_string', 'fewshot_cfg_string', 'ext_template_cfg_string', 'zeroshot_cfg_string_v3_lds'])
def test_textcat_io(task, cfg_string, request):
task = request.getfixturevalue(task)
(text, labels, gold_cats, exclusive_classes, examples) = task
overrides = {'components.llm.task.labels': labels, 'components.llm.task.exclusive_classes': exclusive_classes}
if (cfg_string == 'fewshot_cfg_string'):
overrides['components.llm.task.examples.path'] = examples
cfg_string = request.getfixturevalue(cfg_string)
orig_config = Config().from_str(cfg_string, overrides=overrides)
nlp = spacy.util.load_model_from_config(orig_config, auto_fill=True)
assert (nlp.pipe_names == ['llm'])
with make_tempdir() as tmpdir:
nlp.to_disk(tmpdir)
nlp2 = spacy.load(tmpdir)
assert (nlp2.pipe_names == ['llm'])
doc = nlp2(text)
assert (len(doc.cats) >= 0)
for cat in list(doc.cats.keys()):
assert (cat in gold_cats) |
def copy_template_file(fips_dir, proj_dir, filename, values, silent=False):
src_path = ((fips_dir + '/templates/') + filename)
dst_path = ((proj_dir + '/') + filename)
if (not os.path.isfile(src_path)):
log.error("template src file '{}' doesn't exist".format(src_path))
if (not silent):
if os.path.isfile(dst_path):
if (not util.confirm("overwrite '{}'?".format(dst_path))):
log.info("skipping '{}'".format(dst_path))
return False
content = None
with open(src_path, 'r') as f:
content = f.read()
content = Template(content).substitute(values)
with open(dst_path, 'w') as f:
f.write(content)
if (not silent):
log.info("wrote '{}'".format(dst_path))
return True |
def _get_active_overrides(request):
query = models.BuildrootOverride.query
query = query.filter(models.BuildrootOverride.expired_date.is_(None))
user = models.User.get(request.identity.name)
query = query.filter((models.BuildrootOverride.submitter == user))
query = query.order_by(models.BuildrootOverride.submission_date.desc())
return query.all() |
class DECMC(DeltaE):
NAME = 'cmc'
def __init__(self, l: float=2, c: float=1):
self.l = l
self.c = c
def distance(self, color: 'Color', sample: 'Color', l: Optional[float]=None, c: Optional[float]=None, **kwargs: Any) -> float:
if (l is None):
l = self.l
if (c is None):
c = self.c
(l1, a1, b1) = alg.no_nans(color.convert('lab')[:(- 1)])
(l2, a2, b2) = alg.no_nans(sample.convert('lab')[:(- 1)])
c1 = math.sqrt(((a1 ** 2) + (b1 ** 2)))
c2 = math.sqrt(((a2 ** 2) + (b2 ** 2)))
dc = (c1 - c2)
dl = (l1 - l2)
da = (a1 - a2)
db = (b1 - b2)
dh = (((da ** 2) + (db ** 2)) - (dc ** 2))
if (l1 < 16):
sl = 0.511
else:
sl = ((0.040975 * l1) / (1 + (0.01765 * l1)))
sc = (((0.0638 * c1) / (1 + (0.0131 * c1))) + 0.638)
h = math.degrees(math.atan2(b1, a1))
if (h >= 0):
h1 = h
else:
h1 = (h + 360)
if (164 <= h1 <= 345):
t = (0.56 + abs((0.2 * math.cos(math.radians((h1 + 168))))))
else:
t = (0.36 + abs((0.4 * math.cos(math.radians((h1 + 35))))))
c1_4 = (c1 ** 4)
f = math.sqrt((c1_4 / (c1_4 + 1900)))
sh = (sc * (((f * t) + 1) - f))
return math.sqrt(((((dl / (l * sl)) ** 2) + ((dc / (c * sc)) ** 2)) + (dh / (sh ** 2)))) |
def get_notes_in_collection():
deck_q = _deck_query()
if deck_q:
oList = mw.col.db.all(('select notes.id, flds, tags, did, mid from notes left join cards on notes.id = cards.nid where did in %s group by notes.id' % deck_q))
else:
oList = mw.col.db.all('select notes.id, flds, tags, did, mid from notes left join cards on notes.id = cards.nid group by notes.id')
index_notes = [(id, flds, t, did, str(mid), '') for (id, flds, t, did, mid) in oList]
return index_notes |
class FastZipFile(HasPrivateTraits):
path = File()
zf = Property
time_stamp = Float()
access = Any()
def namelist(self):
self.access.acquire()
try:
return self.zf.namelist()
finally:
self.access.release()
def read(self, file_name):
self.access.acquire()
try:
return self.zf.read(file_name)
finally:
self.access.release()
def close(self):
self.access.acquire()
try:
if (self._zf is not None):
self._zf.close()
self._zf = None
finally:
self.access.release()
def _access_default(self):
return allocate_lock()
def _get_zf(self):
self.time_stamp = time.time()
if (self._zf is None):
self._zf = ZipFile(self.path, 'r')
if (self._running is None):
Thread(target=self._process).start()
self._running = True
return self._zf
def _process(self):
while True:
time.sleep(1)
self.access.acquire()
if (time.time() > (self.time_stamp + 2.0)):
if (self._zf is not None):
self._zf.close()
self._zf = None
self._running = None
self.access.release()
break
self.access.release() |
class Cell(MixHtmlState.HtmlStates, Html.Html):
name = 'Cell'
_option_cls = OptTable.OptionsTableCell
builder_name = 'Text'
def __init__(self, page: primitives.PageModel, text, is_header, options=None):
super(Cell, self).__init__(page, text, options=options)
self.attr['class'].clear()
self.is_header = is_header
def tag(self):
return ('th' if self.is_header else 'td')
def dom(self) -> JsHtml.JsHtmlRich:
if (self._dom is None):
self._dom = JsHtml.JsHtmlRich(self, page=self.page)
return self._dom
def options(self) -> OptTable.OptionsTableCell:
return super().options
def set_html_content(self, component: primitives.HtmlModel):
component.options.managed = False
self.innerPyHTML = component
return self
def __str__(self):
return ('<%s %s>%s</%s>' % (self.tag, self.get_attrs(css_class_names=self.style.get_classes()), self.content, self.tag)) |
class OptionPlotoptionsScatterSonificationDefaultinstrumentoptionsMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.