code stringlengths 281 23.7M |
|---|
def test_read_md_file():
doc_path = os.path.join(ROOT_DIR, MD_FILE)
code_blocks = extract_code_blocks(filepath=doc_path, filter_='python')
test_code_path = os.path.join(CUR_PATH, PY_FILE)
python_file = extract_python_code(test_code_path)
assert (code_blocks[(- 1)] == python_file), 'Files must be exactly the same.' |
class BaseBytes(TraitType):
default_value_type = DefaultValue.constant
default_value = b''
info_text = 'a bytes string'
encoding = None
def validate(self, object, name, value):
if isinstance(value, bytes):
return value
self.error(object, name, value)
def create_editor(self):
from .traits import bytes_editor
auto_set = self.auto_set
if (auto_set is None):
auto_set = True
enter_set = (self.enter_set or False)
return bytes_editor(auto_set, enter_set, self.encoding) |
def channel_ignore(channel, material):
if (material.use_nodes == False):
material.use_nodes = True
tree = material.node_tree
bsdf_node = None
for n in tree.nodes:
if (n.bl_idname == 'ShaderNodeBsdfPrincipled'):
bsdf_node = n
if (len(bsdf_node.inputs[channel].links) != 0):
tree.links.remove(bsdf_node.inputs[channel].links[0])
bsdf_node.inputs[channel].default_value = 1.0 |
class TestARMSupport(unittest.TestCase):
def test_hello(self):
with open(os.path.join('test', 'testfiles_for_unittests', 'simple_gcc.elf.arm'), 'rb') as f:
elf = ELFFile(f)
self.assertEqual(elf.get_machine_arch(), 'ARM')
self.assertEqual(elf['e_entry'], 32792)
self.assertEqual(elf.num_sections(), 14)
self.assertEqual(elf.num_segments(), 2)
def test_build_attributes(self):
with open(os.path.join('test', 'testfiles_for_unittests', 'simple_gcc.elf.arm'), 'rb') as f:
elf = ELFFile(f)
sec = elf.get_section_by_name('.ARM.attributes')
self.assertEqual(sec['sh_type'], 'SHT_ARM_ATTRIBUTES')
self.assertEqual(sec.num_subsections, 1)
subsec = sec.subsections[0]
self.assertEqual(subsec.header['vendor_name'], 'aeabi')
self.assertEqual(subsec.num_subsubsections, 1)
subsubsec = subsec.subsubsections[0]
self.assertEqual(subsubsec.header.tag, 'TAG_FILE')
for i in subsubsec.iter_attributes('TAG_CPU_NAME'):
self.assertEqual(i.value, 'ARM7TDMI-S')
for i in subsubsec.iter_attributes('TAG_CPU_ARCH'):
self.assertEqual(i.value, 2)
def test_DWARF_indirect_forms(self):
with open(os.path.join('test', 'testfiles_for_unittests', 'arm_with_form_indirect.elf'), 'rb') as f:
elffile = ELFFile(f)
self.assertTrue(elffile.has_dwarf_info())
dwarfinfo = elffile.get_dwarf_info()
all_CUs = list(dwarfinfo.iter_CUs())
self.assertEqual(len(all_CUs), 9) |
def add_QueryServicer_to_server(servicer, server):
rpc_method_handlers = {'Proposal': grpc.unary_unary_rpc_method_handler(servicer.Proposal, request_deserializer=cosmos_dot_gov_dot_v1beta1_dot_query__pb2.QueryProposalRequest.FromString, response_serializer=cosmos_dot_gov_dot_v1beta1_dot_query__pb2.QueryProposalResponse.SerializeToString), 'Proposals': grpc.unary_unary_rpc_method_handler(servicer.Proposals, request_deserializer=cosmos_dot_gov_dot_v1beta1_dot_query__pb2.QueryProposalsRequest.FromString, response_serializer=cosmos_dot_gov_dot_v1beta1_dot_query__pb2.QueryProposalsResponse.SerializeToString), 'Vote': grpc.unary_unary_rpc_method_handler(servicer.Vote, request_deserializer=cosmos_dot_gov_dot_v1beta1_dot_query__pb2.QueryVoteRequest.FromString, response_serializer=cosmos_dot_gov_dot_v1beta1_dot_query__pb2.QueryVoteResponse.SerializeToString), 'Votes': grpc.unary_unary_rpc_method_handler(servicer.Votes, request_deserializer=cosmos_dot_gov_dot_v1beta1_dot_query__pb2.QueryVotesRequest.FromString, response_serializer=cosmos_dot_gov_dot_v1beta1_dot_query__pb2.QueryVotesResponse.SerializeToString), 'Params': grpc.unary_unary_rpc_method_handler(servicer.Params, request_deserializer=cosmos_dot_gov_dot_v1beta1_dot_query__pb2.QueryParamsRequest.FromString, response_serializer=cosmos_dot_gov_dot_v1beta1_dot_query__pb2.QueryParamsResponse.SerializeToString), 'Deposit': grpc.unary_unary_rpc_method_handler(servicer.Deposit, request_deserializer=cosmos_dot_gov_dot_v1beta1_dot_query__pb2.QueryDepositRequest.FromString, response_serializer=cosmos_dot_gov_dot_v1beta1_dot_query__pb2.QueryDepositResponse.SerializeToString), 'Deposits': grpc.unary_unary_rpc_method_handler(servicer.Deposits, request_deserializer=cosmos_dot_gov_dot_v1beta1_dot_query__pb2.QueryDepositsRequest.FromString, response_serializer=cosmos_dot_gov_dot_v1beta1_dot_query__pb2.QueryDepositsResponse.SerializeToString), 'TallyResult': grpc.unary_unary_rpc_method_handler(servicer.TallyResult, request_deserializer=cosmos_dot_gov_dot_v1beta1_dot_query__pb2.QueryTallyResultRequest.FromString, response_serializer=cosmos_dot_gov_dot_v1beta1_dot_query__pb2.QueryTallyResultResponse.SerializeToString)}
generic_handler = grpc.method_handlers_generic_handler('cosmos.gov.v1beta1.Query', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,)) |
def match_rtn_dates(rtns, bench):
if (not (isinstance(rtns, pd.Series) or isinstance(rtns, pd.DataFrame))):
return bench
if _is_pandas(bench):
bench = bench.reindex(rtns.index)
expected = len(rtns)
check = bench.count()
if (expected != check):
warnings.warn('Returns and benchmark length not matching, {} vs {}'.format(expected, check))
return bench
else:
return bench |
def genesis(chain_class: ChainAPI, db: AtomicDatabaseAPI=None, params: Dict[(str, HeaderParams)]=None, state: GeneralState=None) -> ChainAPI:
if (state is None):
genesis_state: AccountState = {}
else:
genesis_state = _fill_and_normalize_state(state)
genesis_params_defaults = _get_default_genesis_params(genesis_state)
if (params is None):
genesis_params = genesis_params_defaults
else:
genesis_params = merge(genesis_params_defaults, params)
if (db is None):
base_db: AtomicDatabaseAPI = AtomicDB()
else:
base_db = db
return chain_class.from_genesis(base_db, genesis_params, genesis_state) |
class Group(SoftDeletionModel):
__tablename__ = 'groups'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String, nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey('users.id', ondelete='CASCADE'), nullable=False)
social_links = db.Column(db.JSON)
logo_url = db.Column(db.String)
banner_url = db.Column(db.String)
thumbnail_image_url = db.Column(db.String)
is_promoted = db.Column(db.Boolean, default=False, nullable=False)
about = db.Column(db.Text)
created_at: datetime = db.Column(db.DateTime(timezone=True), default=datetime.utcnow)
modified_at: datetime = db.Column(db.DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow)
('followers', db.Column(db.Integer, default=0, server_default='0', nullable=False))
def follower_count(self):
return func.count('1')
user = db.relationship('User', backref='groups')
roles = db.relationship('UsersGroupsRoles', backref='group')
def follower(self):
if (not current_user):
return None
return UserFollowGroup.query.filter_by(user=current_user, group=self).first()
def view_page_link(self):
frontend_url = get_settings()['frontend_url']
return f'{frontend_url}/g/{self.id}' |
def get_logger(name: str='main') -> logging.Logger:
log_level = os.environ.get('LOG_LEVEL', 'INFO')
log_level = logging.getLevelName(log_level)
root_logger = logging.getLogger()
if root_logger.handlers:
root_logger.handlers = []
root_logger.setLevel(log_level)
logger = logging.getLogger(name)
if (not logger.handlers):
handler = logging.StreamHandler()
formatter = JsonFormatter()
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(log_level)
return logger |
class LegacyTestMeta(type):
def __new__(cls, name, bases, dct):
def generate_test(infile, outfile, normalize, kwargs):
def test(self):
with open(infile, encoding='utf-8') as f:
input = f.read()
with open(outfile, encoding='utf-8') as f:
expected = f.read().replace('\r\n', '\n')
output = markdown(input, **kwargs)
if (tidylib and normalize):
expected = _normalize_whitespace(expected)
output = _normalize_whitespace(output)
elif normalize:
self.skipTest('Tidylib not available.')
self.assertMultiLineEqual(output, expected)
return test
location = dct.get('location', '')
exclude = dct.get('exclude', [])
normalize = dct.get('normalize', False)
input_ext = dct.get('input_ext', '.txt')
output_ext = dct.get('output_ext', '.html')
kwargs = dct.get('default_kwargs', Kwargs())
if os.path.isdir(location):
for file in os.listdir(location):
infile = os.path.join(location, file)
if os.path.isfile(infile):
(tname, ext) = os.path.splitext(file)
if (ext == input_ext):
outfile = os.path.join(location, (tname + output_ext))
tname = tname.replace(' ', '_').replace('-', '_')
kws = kwargs.copy()
if (tname in dct):
kws.update(dct[tname])
test_name = ('test_%s' % tname)
if (tname not in exclude):
dct[test_name] = generate_test(infile, outfile, normalize, kws)
else:
dct[test_name] = unittest.skip('Excluded')((lambda : None))
return type.__new__(cls, name, bases, dct) |
class Lt(Node):
def forward(self, *args, **kwargs):
if any([(a is None) for a in args]):
return None
return (args[0] < args[1])
def follow(self, *args, **kwargs):
if any([(a is None) for a in args]):
return None
return (args[0] < args[1])
def final(self, ops, operands=None, result=None, **kwargs):
final_transition_indices = {'inc': 0, 'dec': 1, 'fin': 2, 'var': 3}
op1 = final_transition_indices[ops[0]]
op2 = final_transition_indices[ops[1]]
transition_table = [['var', 'fin', 'fin', 'var'], ['var', 'var', 'var', 'var'], ['var', 'fin', 'fin', 'var'], ['var', 'var', 'var', 'var']]
if result:
r = transition_table[op2][op1]
else:
r = transition_table[op1][op2]
return r
def token_hint(self):
num = [n for n in self.predecessors if (type(n) is int)]
len_op = [n for n in self.predecessors if isinstance(n, LenOp)]
if ((len(num) != 1) or (len(len_op) != 1)):
return super().token_hint()
limit = num[0]
if (limit != self.predecessors[1]):
return super().token_hint()
tokens_op = [n for n in len_op[0].predecessors if isinstance(n, TokensOp)]
if (len(tokens_op) != 1):
return super().token_hint()
var = [n for n in tokens_op[0].predecessors if isinstance(n, Var)]
if (len(var) != 1):
return super().token_hint()
return {var[0].name: (limit - 1)} |
class OFPAction(OFPActionHeader):
_ACTION_TYPES = {}
def register_action_type(type_, len_):
def _register_action_type(cls):
cls.cls_action_type = type_
cls.cls_action_len = len_
OFPAction._ACTION_TYPES[cls.cls_action_type] = cls
return cls
return _register_action_type
def __init__(self):
cls = self.__class__
super(OFPAction, self).__init__(cls.cls_action_type, cls.cls_action_len)
def parser(cls, buf, offset):
(type_, len_) = struct.unpack_from(ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset)
cls_ = cls._ACTION_TYPES.get(type_)
assert (cls_ is not None)
return cls_.parser(buf, offset)
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_ACTION_HEADER_PACK_STR, buf, offset, self.type, self.len) |
class DropdownHtmlTransform(SphinxPostTransform):
default_priority = 199
formats = ('html',)
def run(self):
document: nodes.document = self.document
for node in findall(document)((lambda node: is_component(node, 'dropdown'))):
use_card = True
open_marker = create_component('dropdown-open-marker', classes=['sd-summary-up'], children=[nodes.raw('', nodes.Text(get_octicon('chevron-up', height='1.5em')), format='html')])
closed_marker = create_component('dropdown-closed-marker', classes=['sd-summary-down'], children=[nodes.raw('', nodes.Text(get_octicon('chevron-down', height='1.5em')), format='html')])
newnode = dropdown_main(opened=node['opened'], classes=((['sd-sphinx-override', 'sd-dropdown'] + (['sd-card'] if use_card else ['sd-d-flex-column'])) + node['container_classes']))
if node['has_title']:
title_children = node[0].children
if node[0].get('ids'):
newnode['ids'] += node[0]['ids']
body_children = node[1:]
else:
title_children = [nodes.raw('...', nodes.Text(get_octicon('kebab-horizontal', height='1.5em')), format='html')]
body_children = node.children
if node['icon']:
title_children.insert(0, nodes.raw('', nodes.Text(get_octicon(node['icon'], height='1em')), classes=['sd-summary-icon'], format='html'))
newnode += dropdown_title('', '', *title_children, closed_marker, open_marker, classes=((['sd-summary-title'] + (['sd-card-header'] if use_card else [])) + node['title_classes']))
body_node = create_component('dropdown-body', classes=((['sd-summary-content'] + (['sd-card-body'] if use_card else [])) + node['body_classes']), children=body_children)
if use_card:
for para in findall(body_node)(nodes.paragraph):
para['classes'] = (([] if ('classes' in para) else para['classes']) + ['sd-card-text'])
newnode += body_node
node.replace_self(newnode) |
def write_text_header(f, mmap):
if mmap:
f.mmap()
f.puttext(0, '')
textheader = f.gettext(0)
assert (textheader == bytearray(3200))
f.puttext(0, ('yolo' * 800))
textheader = f.gettext(0)
textheader = textheader.decode('ascii')
assert (textheader == ('yolo' * 800))
f.close() |
class Company(HasTraits):
employees = List(Employee)
employee = Instance(Employee)
increase = Float()
give_raise = Button('Give raise')
traits_view = View(Item('employees', show_label=False, editor=TabularEditor(adapter=EmployeeAdapter(), selected='employee', auto_resize=True, auto_update=True)), HGroup(spring, Item('increase'), Item('give_raise', show_label=False, enabled_when='employee is not None')), title='Auto Update Tabular Editor demo', height=0.25, width=0.3, resizable=True)
def _give_raise_changed(self):
self.employee.salary += self.increase
self.employee = None |
def test_is_briefcase_package():
assert (not system_info.is_briefcase_package())
temp_app_packages = (Path(__file__).parent.parent.parent.parent / 'app_packages')
is_briefcase = False
try:
temp_app_packages.mkdir()
is_briefcase = system_info.is_briefcase_package()
finally:
temp_app_packages.rmdir()
assert is_briefcase |
class _PrivateKeyAccount(PublicKeyAccount):
def __init__(self, addr: str) -> None:
self._lock = threading.Lock()
super().__init__(addr)
def _pending_nonce(self) -> int:
tx_from_sender = sorted(history.from_sender(self.address), key=(lambda k: k.nonce))
if (len(tx_from_sender) == 0):
return self.nonce
last_tx = tx_from_sender[(- 1)]
if (last_tx.status == (- 1)):
return (last_tx.nonce + 1)
nonce = self.nonce
while (nonce == last_tx.nonce):
time.sleep(0.5)
nonce = self.nonce
return nonce
def _gas_limit(self, to: Optional['Account'], amount: int, gas_price: Optional[int], gas_buffer: Optional[float], data: Optional[str]=None) -> int:
gas_limit = CONFIG.active_network['settings']['gas_limit']
if (gas_limit == 'max'):
return Chain().block_gas_limit
if (isinstance(gas_limit, bool) or (gas_limit in (None, 'auto'))):
gas_buffer = (gas_buffer or CONFIG.active_network['settings']['gas_buffer'])
gas_limit = self.estimate_gas(to, amount, 0, (data or ''))
if ((gas_limit > 21000) and (gas_buffer != 1)):
gas_limit = Wei((gas_limit * gas_buffer))
return min(gas_limit, Chain().block_gas_limit)
return Wei(gas_limit)
def _gas_price(self, gas_price: Any=None) -> Tuple[(Wei, Optional[GasABC], Optional[Iterator])]:
if (gas_price is None):
gas_price = CONFIG.active_network['settings']['gas_price']
if isinstance(gas_price, GasABC):
value = gas_price.get_gas_price()
if isinstance(value, Iterator):
return (Wei(next(value)), gas_price, value)
else:
return (Wei(value), None, None)
if isinstance(gas_price, Wei):
return (gas_price, None, None)
if (isinstance(gas_price, bool) or (gas_price in (None, 'auto'))):
return (web3.eth.generate_gas_price(), None, None)
return (Wei(gas_price), None, None)
def _check_for_revert(self, tx: Dict) -> None:
try:
skip_keys = {'gasPrice', 'maxFeePerGas', 'maxPriorityFeePerGas'}
web3.eth.call({k: v for (k, v) in tx.items() if ((k not in skip_keys) and v)})
except ValueError as exc:
msg = (exc.args[0]['message'] if isinstance(exc.args[0], dict) else str(exc))
raise ValueError(f"Execution reverted during call: '{msg}'. This transaction will likely revert. If you wish to broadcast, include `allow_revert:True` as a transaction parameter.") from None
def deploy(self, contract: Any, *args: Tuple, amount: int=0, gas_limit: Optional[int]=None, gas_buffer: Optional[float]=None, gas_price: Optional[int]=None, max_fee: Optional[int]=None, priority_fee: Optional[int]=None, nonce: Optional[int]=None, required_confs: int=1, allow_revert: bool=None, silent: bool=None, publish_source: bool=False) -> Any:
data = contract.deploy.encode_input(*args)
(receipt, exc) = self._make_transaction(None, amount, gas_limit, gas_buffer, gas_price, max_fee, priority_fee, data, nonce, (contract._name + '.constructor'), required_confs, allow_revert, silent)
add_thread = threading.Thread(target=contract._add_from_tx, args=(receipt,), daemon=True)
add_thread.start()
if rpc.is_active():
undo_thread = threading.Thread(target=Chain()._add_to_undo_buffer, args=(receipt, self.deploy, (contract, *args), {'amount': amount, 'gas_limit': gas_limit, 'gas_buffer': gas_buffer, 'gas_price': gas_price, 'max_fee': max_fee, 'priority_fee': priority_fee}), daemon=True)
undo_thread.start()
if (receipt.status != 1):
receipt._raise_if_reverted(exc)
return receipt
add_thread.join()
try:
deployed_contract = contract.at(receipt.contract_address)
if publish_source:
contract.publish_source(deployed_contract, silent=silent)
return deployed_contract
except ContractNotFound:
return receipt
def estimate_gas(self, to: 'Account'=None, amount: int=0, gas_price: int=None, data: str=None) -> int:
tx: Dict = {'from': self.address, 'to': (to_address(str(to)) if to else None), 'value': Wei(amount), 'data': HexBytes((data or ''))}
if (gas_price is not None):
tx['gasPrice'] = web3.toHex(gas_price)
try:
return web3.eth.estimate_gas(tx)
except ValueError as exc:
revert_gas_limit = CONFIG.active_network['settings']['reverting_tx_gas_limit']
if (revert_gas_limit == 'max'):
revert_gas_limit = web3.eth.get_block('latest')['gasLimit']
CONFIG.active_network['settings']['reverting_tx_gas_limit'] = revert_gas_limit
if revert_gas_limit:
return revert_gas_limit
msg = (exc.args[0]['message'] if isinstance(exc.args[0], dict) else str(exc))
raise ValueError(f"Gas estimation failed: '{msg}'. This transaction will likely revert. If you wish to broadcast, you must set the gas limit manually.")
def transfer(self, to: 'Account'=None, amount: int=0, gas_limit: Optional[int]=None, gas_buffer: Optional[float]=None, gas_price: Optional[int]=None, max_fee: Optional[int]=None, priority_fee: Optional[int]=None, data: str=None, nonce: Optional[int]=None, required_confs: int=1, allow_revert: bool=None, silent: bool=None) -> TransactionReceipt:
(receipt, exc) = self._make_transaction(to, amount, gas_limit, gas_buffer, gas_price, max_fee, priority_fee, (data or ''), nonce, '', required_confs, allow_revert, silent)
if rpc.is_active():
undo_thread = threading.Thread(target=Chain()._add_to_undo_buffer, args=(receipt, self.transfer, [], {'to': to, 'amount': amount, 'gas_limit': gas_limit, 'gas_buffer': gas_buffer, 'gas_price': gas_price, 'max_fee': max_fee, 'priority_fee': priority_fee, 'data': data}), daemon=True)
undo_thread.start()
receipt._raise_if_reverted(exc)
return receipt
def _make_transaction(self, to: Optional['Account'], amount: int, gas_limit: Optional[int], gas_buffer: Optional[float], gas_price: Optional[int], max_fee: Optional[int], priority_fee: Optional[int], data: str, nonce: Optional[int], fn_name: str, required_confs: int, allow_revert: Optional[bool], silent: Optional[bool]) -> Tuple[(TransactionReceipt, Optional[Exception])]:
if (gas_limit and gas_buffer):
raise ValueError('Cannot set gas_limit and gas_buffer together')
if (silent is None):
silent = bool(((CONFIG.mode == 'test') or CONFIG.argv['silent']))
if (gas_price is None):
if (max_fee is None):
max_fee = (CONFIG.active_network['settings']['max_fee'] or None)
if (priority_fee is None):
priority_fee = (CONFIG.active_network['settings']['priority_fee'] or None)
if (priority_fee == 'auto'):
priority_fee = Chain().priority_fee
try:
if ((max_fee is None) and (priority_fee is None)):
(gas_price, gas_strategy, gas_iter) = self._gas_price(gas_price)
else:
(gas_strategy, gas_iter) = (None, None)
gas_limit = (Wei(gas_limit) or self._gas_limit(to, amount, (gas_price or max_fee), gas_buffer, data))
except ValueError as e:
raise VirtualMachineError(e) from None
with self._lock:
tx = {'from': self.address, 'value': Wei(amount), 'nonce': (nonce if (nonce is not None) else self._pending_nonce()), 'gas': web3.toHex(gas_limit), 'data': HexBytes(data)}
if to:
tx['to'] = to_address(str(to))
tx = _apply_fee_to_tx(tx, gas_price, max_fee, priority_fee)
txid = None
while True:
try:
response = self._transact(tx, allow_revert)
(exc, revert_data) = (None, None)
if (txid is None):
txid = HexBytes(response).hex()
if (not silent):
print(f'''
Transaction sent: {color('bright blue')}{txid}{color}''')
except ValueError as e:
if (txid is None):
exc = VirtualMachineError(e)
if (not hasattr(exc, 'txid')):
raise exc from None
txid = exc.txid
print(f'''
Transaction sent: {color('bright blue')}{txid}{color}''')
revert_data = (exc.revert_msg, exc.pc, exc.revert_type)
try:
receipt = TransactionReceipt(txid, self, silent=silent, required_confs=required_confs, is_blocking=False, name=fn_name, revert_data=revert_data)
break
except (TransactionNotFound, ValueError):
if (not silent):
sys.stdout.write(f''' Awaiting transaction in the mempool... {_marker[0]}
''')
sys.stdout.flush()
_marker.rotate(1)
time.sleep(1)
receipt = self._await_confirmation(receipt, required_confs, gas_strategy, gas_iter)
if ((receipt.status != 1) and (exc is None)):
error_data = {'message': f'VM Exception while processing transaction: revert {receipt.revert_msg}', 'code': (- 32000), 'data': {receipt.txid: {'error': 'revert', 'program_counter': receipt._revert_pc, 'return': receipt.return_value, 'reason': receipt.revert_msg}}}
exc = VirtualMachineError(ValueError(error_data))
return (receipt, exc)
def _await_confirmation(self, receipt: TransactionReceipt, required_confs: int, gas_strategy: Optional[GasABC], gas_iter: Optional[Iterator]) -> TransactionReceipt:
history._add_tx(receipt)
if (gas_strategy is not None):
gas_strategy.run(receipt, gas_iter)
if (required_confs == 0):
receipt._silent = True
return receipt
try:
receipt._confirmed.wait()
except KeyboardInterrupt as exc:
receipt._silent = True
for receipt in history.filter(sender=self, nonce=receipt.nonce, key=(lambda k: (k.status != (- 2)))):
receipt._silent = True
raise exc.with_traceback(None)
if (receipt.status != (- 2)):
return receipt
replacements = history.filter(sender=self, nonce=receipt.nonce, key=(lambda k: (k.status != (- 2))))
while True:
if (not replacements):
raise TransactionError(f'Tx dropped without known replacement: {receipt.txid}')
if (len(replacements) > 1):
replacements = [i for i in replacements if (i.status != 2)]
time.sleep(0.5)
else:
receipt = replacements[0]
receipt._await_confirmation(required_confs=required_confs)
return receipt |
class ApiTests(N26TestBase):
def test_create_request_url(self):
from n26.util import create_request_url
expected = '
result = create_request_url(BASE_URL_DE, {'foo': 'bar', 'bar': 'baz'})
self.assertEqual(result, expected)
_requests(method=GET, response_file='refresh_token.json')
def test_do_request(self):
result = self._underTest._do_request(GET, '/something')
self.assertIsNotNone(result)
_auth_token
def test_get_token(self):
expected = '-1234-1234-1234-'
api_client = api.Api(self.config)
result = api_client.get_token()
self.assertEqual(result, expected)
_requests(url_regex='.*/token', method=POST, response_file='refresh_token.json')
def test_refresh_token(self):
refresh_token = '-1234-abcd-abcd-ab'
expected = '-1234-abcd-abcd-ab'
result = self._underTest._refresh_token(refresh_token)
self.assertEqual(result['access_token'], expected)
def test_init_without_config(self):
api_client = api.Api()
self.assertIsNotNone(api_client.config)
def test_init_with_config(self):
from container_app_conf.source.yaml_source import YamlSource
conf = config.Config(singleton=False, data_sources=[YamlSource('test_creds', './tests/')])
api_client = api.Api(conf)
self.assertIsNotNone(api_client.config)
self.assertEqual(api_client.config, conf) |
def fastq_strand(argv, working_dir=None):
p = argparse.ArgumentParser(description='Generate strandedness statistics for FASTQ or FASTQpair, by running STAR using one or more genome indexes')
p.add_argument('--version', action='version', version=get_version())
p.add_argument('r1', metavar='READ1', default=None, help='R1 Fastq file')
p.add_argument('r2', metavar='READ2', default=None, nargs='?', help='R2 Fastq file')
p.add_argument('-g', '--genome', dest='star_genomedirs', metavar='GENOMEDIR', default=None, action='append', help='path to directory with STAR index for genome to use (use as an alternative to -c/--conf; can be specified multiple times to include additional genomes)')
p.add_argument('--subset', type=int, default=10000, help='use a random subset of read pairs from the input Fastqs; set to zero to use all reads (default: 10000)')
p.add_argument('-o', '--outdir', default=None, help='specify directory to write final outputs to (default: current directory)')
p.add_argument('-c', '--conf', metavar='FILE', default=None, help="specify delimited 'conf' file with list of NAME and STAR index directory pairs. NB if a conf file is supplied then any indices specifed on the command line will be ignored")
p.add_argument('-n', type=int, default=1, help='number of threads to run STAR with (default: 1)')
p.add_argument('--counts', action='store_true', help='include the count sums for unstranded, 1st read strand aligned and 2nd read strand aligned in the output file (default: only include percentages)')
p.add_argument('--keep-star-output', action='store_true', help='keep the output from STAR (default: delete outputs on completion)')
args = p.parse_args(argv)
print(('READ1\t: %s' % args.r1))
print(('READ2\t: %s' % args.r2))
star_exe = find_program('STAR')
if (star_exe is None):
logger.critical('STAR not found')
return 1
print(('STAR\t: %s' % star_exe))
genome_names = {}
if (args.conf is not None):
print(('Conf file\t: %s' % args.conf))
star_genomedirs = []
with io.open(args.conf, 'rt') as fp:
for line in fp:
if line.startswith('#'):
continue
(name, star_genomedir) = line.rstrip().split('\t')
star_genomedirs.append(star_genomedir)
genome_names[star_genomedir] = name
else:
star_genomedirs = args.star_genomedirs
if (not star_genomedirs):
logger.critical('No genome indices specified')
return 1
print('Genomes:')
for genome in star_genomedirs:
print(('- %s' % genome))
if (args.outdir is None):
outdir = os.getcwd()
else:
outdir = os.path.abspath(args.outdir)
if (not os.path.exists(outdir)):
logger.critical(("Output directory doesn't exist: %s" % outdir))
return 1
outfile = ('%s_fastq_strand.txt' % os.path.join(outdir, os.path.basename(strip_ngs_extensions(args.r1))))
if os.path.exists(outfile):
logger.warning(("Removing existing output file '%s'" % outfile))
os.remove(outfile)
prefix = 'fastq_strand_'
if (working_dir is None):
working_dir = os.getcwd()
else:
working_dir = os.path.abspath(working_dir)
if (not os.path.isdir(working_dir)):
raise Exception(('Bad working directory: %s' % working_dir))
print(('Working directory: %s' % working_dir))
nreads = sum((1 for i in getreads(os.path.abspath(args.r1))))
print(('%d reads' % nreads))
if (args.subset == 0):
print('Using all read pairs in Fastq files')
subset = nreads
elif (args.subset > nreads):
print('Actual number of read pairs smaller than requested subset')
subset = nreads
else:
subset = args.subset
print(('Using random subset of %d read pairs' % subset))
if (subset == nreads):
subset_indices = [i for i in range(nreads)]
else:
subset_indices = random.sample(range(nreads), subset)
fqs_in = filter((lambda fq: (fq is not None)), (args.r1, args.r2))
fastqs = []
for fq in fqs_in:
fq_subset = os.path.join(working_dir, os.path.basename(fq))
if fq_subset.endswith('.gz'):
fq_subset = '.'.join(fq_subset.split('.')[:(- 1)])
fq_subset = ('%s.subset.fq' % '.'.join(fq_subset.split('.')[:(- 1)]))
with io.open(fq_subset, 'wt') as fp:
for read in getreads_subset(os.path.abspath(fq), subset_indices):
fp.write((u'\n'.join(read) + '\n'))
fastqs.append(fq_subset)
if args.keep_star_output:
star_output_dir = os.path.join(outdir, ('STAR.%s.outputs' % os.path.basename(strip_ngs_extensions(args.r1))))
print(('Output from STAR will be copied to %s' % star_output_dir))
if os.path.exists(star_output_dir):
i = 0
backup_dir = ('%s.bak' % star_output_dir)
while os.path.exists(backup_dir):
i += 1
backup_dir = ('%s.bak%s' % (star_output_dir, i))
logger.warning(('Moving existing output directory to %s' % backup_dir))
os.rename(star_output_dir, backup_dir)
os.mkdir(star_output_dir)
with tempfile.TemporaryFile(mode='w+t') as fp:
for star_genomedir in star_genomedirs:
try:
name = genome_names[star_genomedir]
except KeyError:
name = star_genomedir
star_cmd = [star_exe]
star_cmd.extend(['--runMode', 'alignReads', '--genomeLoad', 'NoSharedMemory', '--genomeDir', os.path.abspath(star_genomedir)])
star_cmd.extend(['--readFilesIn', fastqs[0]])
if (len(fastqs) > 1):
star_cmd.append(fastqs[1])
star_cmd.extend(['--quantMode', 'GeneCounts', '--outSAMtype', 'BAM', 'Unsorted', '--outSAMstrandField', 'intronMotif', '--outFileNamePrefix', prefix, '--runThreadN', str(args.n)])
print(('Running %s' % ' '.join(star_cmd)))
try:
subprocess.check_output(star_cmd, cwd=working_dir)
except subprocess.CalledProcessError as ex:
raise Exception(('STAR returned non-zero exit code: %s' % ex.returncode))
if args.keep_star_output:
genome_dir = os.path.join(star_output_dir, name.replace(os.sep, '_'))
print(('Copying STAR outputs to %s' % genome_dir))
os.mkdir(genome_dir)
for f in os.listdir(working_dir):
if f.startswith(prefix):
shutil.copy(os.path.join(working_dir, f), os.path.join(genome_dir, f))
star_tab_file = os.path.join(working_dir, ('%sReadsPerGene.out.tab' % prefix))
if (not os.path.exists(star_tab_file)):
raise Exception(('Failed to find .out file: %s' % star_tab_file))
sum_col2 = 0
sum_col3 = 0
sum_col4 = 0
with io.open(star_tab_file, 'rt') as out:
for (i, line) in enumerate(out):
if (i < 4):
continue
cols = line.rstrip('\n').split('\t')
sum_col2 += int(cols[1])
sum_col3 += int(cols[2])
sum_col4 += int(cols[3])
print('Sums:')
print(('- col2: %d' % sum_col2))
print(('- col3: %d' % sum_col3))
print(('- col4: %d' % sum_col4))
if (sum_col2 > 0.0):
forward_1st = ((float(sum_col3) / float(sum_col2)) * 100.0)
reverse_2nd = ((float(sum_col4) / float(sum_col2)) * 100.0)
else:
logger.warning('Sum of mapped reads is zero!')
forward_1st = 0.0
reverse_2nd = 0.0
print('Strand percentages:')
print(('- 1st forward: %.2f%%' % forward_1st))
print(('- 2nd reverse: %.2f%%' % reverse_2nd))
data = [name, ('%.2f' % forward_1st), ('%.2f' % reverse_2nd)]
if args.counts:
data.extend([sum_col2, sum_col3, sum_col4])
fp.write((u'%s\n' % '\t'.join([str(d) for d in data])))
fp.seek(0)
with io.open(outfile, 'wt') as out:
out.write((u'#fastq_strand version: %s\t#Aligner: %s\t#Reads in subset: %s\n' % (get_version(), 'STAR', subset)))
columns = ['Genome', '1st forward', '2nd reverse']
if args.counts:
columns.extend(['Unstranded', '1st read strand aligned', '2nd read strand aligned'])
out.write((u'#%s\n' % '\t'.join(columns)))
for line in fp:
out.write(str(line))
return 0 |
def test_verify_message_command(base_message):
msg = base_message
msg.type = MsgType.Text
command = MessageCommand(name='Command 1', callable_name='command_1')
msg.commands = MessageCommands([command])
msg.verify()
with pytest.raises(ValueError):
MessageCommand('name', 'callable_name', args='args', kwargs='kwargs') |
def prune_existing_sys_deps(deps_list):
remove_deps = []
for dep in deps_list:
if (('node' in dep) and which('node')):
remove_deps.append(dep)
elif (('npm' in dep) and which('npm')):
remove_deps.append(dep)
return list((set(deps_list) - set(remove_deps))) |
def test_indirect_symlink(workdir):
paths = set_up(workdir)
os.symlink(paths['dir1'], paths['link1'])
with push_dir('projectA'):
status = mu_repo.main(config_file='.bar_file', args=['register', '--recursive'])
assert status.succeeded
_compare_repos(status.config.repos, ['sectionX/repo1']) |
class Config(BaseConfig):
def __init__(self, env: str=None, dbutils=None):
log.info(f"ENV: {os.environ.get('ENV')}")
self.env = (env or os.environ.get('ENV') or ('local' if BaseYmlConfigReader(env='local').exists() else None) or 'default')
self.__cfg = BaseYmlConfigReader(self.env, dbutils=dbutils).config
log.info(f'Full config: {self.__cfg}')
log.info(f'Configuration loaded for env: {self.env}')
file_system_type_map: Mapping[(str, type(FileSystemConfig))] = {FileSystemType.blob: AzureBlobFileSystemConfig, FileSystemType.local: LocalFileSystemConfig}
if (self.file_system_type not in file_system_type_map):
raise ValueError(f'Unsupported file system type {self.file_system_type}')
self.file_system: FileSystemConfig = file_system_type_map[self.file_system_type](self.__cfg)
self.web_config: WebConfig = parse_web_config(self.__cfg['web'])
def __getitem__(self, key):
return self.__cfg.get(key)
def file_system_type(self) -> str:
return self.__cfg.get('file_system', dict()).get('type', '')
def bq(self) -> dict:
return self.__cfg.get('bq', dict())
def bq_secret(self) -> dict:
secret = self.bq.get('secret')
if secret:
return json.loads(secret)
return dict()
def bq_project(self) -> str:
return self.bq.get('project', '')
def github_token(self) -> str:
return self.__cfg.get('github', dict()).get('token', '')
def default_company(self) -> str:
return self.__cfg.get('company', dict()).get('default', '')
def spark_conf(self) -> Dict[(str, str)]:
return self.__cfg.get('spark', dict()) |
class RearrangementProjector():
def __init__(self, old_scaffolds, new_scaffolds, conservative):
self.old_scaffolds = old_scaffolds
self.new_scaffolds = new_scaffolds
self._build_bp_graph()
self._build_adj_graph()
self.conservative = conservative
def connected_component_subgraphs(self, G):
for c in nx.connected_components(G):
(yield G.subgraph(c))
def project(self):
num_kbreaks = 0
subgraphs = list(self.connected_component_subgraphs(self.bp_graph))
for subgr in subgraphs:
if any(((len(subgr[node]) != 2) for node in subgr.nodes)):
continue
red_edges = []
black_edges = []
for (u, v, data) in subgr.edges(data=True):
if (data['scf_set'] == 'old'):
red_edges.append((u, v))
else:
black_edges.append((u, v))
if (not self._good_k_break(red_edges, black_edges)):
continue
num_kbreaks += 1
for (u, v) in red_edges:
self.bp_graph.remove_edge(u, v)
self.adj_graph.remove_edge(u, v)
for (u, v) in black_edges:
link = self.bp_graph[u][v][0]['link']
infinity = self.bp_graph[u][v][0]['infinity']
self.bp_graph.add_edge(u, v, scf_set='old', link=link, infinity=infinity)
self.adj_graph.add_edge(u, v)
logger.debug('Made %d k-breaks', num_kbreaks)
adjacencies = {}
for (u, v, data) in self.bp_graph.edges(data=True):
if (data['scf_set'] == 'old'):
(gap, support) = (0, [])
if (not data['infinity']):
gap = data['link'].gap
support = data['link'].supporting_genomes
adjacencies[u] = Adjacency(v, gap, support, data['infinity'])
adjacencies[v] = Adjacency(u, gap, support, data['infinity'])
return adjacencies
def _good_k_break(self, old_edges, new_edges):
MIN_OVLP_SCORE = 0.9
MAX_K_BREAK = 4
if (len(old_edges) > MAX_K_BREAK):
return False
new_adj_graph = self.adj_graph.copy()
for (u, v) in old_edges:
new_adj_graph.remove_edge(u, v)
for (u, v) in new_edges:
new_adj_graph.add_edge(u, v)
old_sets = [set(g.nodes) for g in self.connected_component_subgraphs(self.adj_graph)]
new_sets = [set(g.nodes) for g in self.connected_component_subgraphs(new_adj_graph)]
if (len(old_sets) != len(new_sets)):
return False
for old_set in old_sets:
max_overlap = 0
best_score = 0
for new_set in new_sets:
overlap = len((old_set & new_set))
if (overlap > max_overlap):
max_overlap = overlap
best_score = (float(overlap) / len((old_set | new_set)))
if (best_score < MIN_OVLP_SCORE):
return False
return True
def _build_bp_graph(self):
old_contigs = set()
for scf in self.old_scaffolds:
for cnt in scf.contigs:
old_contigs.add(cnt.name())
bp_graph = nx.MultiGraph()
for scf in self.old_scaffolds:
for (cnt_1, cnt_2) in zip(scf.contigs[:(- 1)], scf.contigs[1:]):
bp_graph.add_edge(cnt_1.right_end(), cnt_2.left_end(), scf_set='old', link=copy(cnt_1.link), scf_name=scf.name, infinity=False)
bp_graph.add_edge(scf.contigs[(- 1)].right_end(), scf.contigs[0].left_end(), scf_set='old', infinity=True)
for scf in self.new_scaffolds:
prev_cont = None
first_ctg = None
pos = 0
for (pos, contig) in enumerate(scf.contigs):
if (contig.name() in old_contigs):
prev_cont = deepcopy(contig)
first_ctg = prev_cont
break
if (prev_cont is None):
continue
for next_cont in scf.contigs[(pos + 1):]:
if (next_cont.name() not in old_contigs):
prev_cont.link.gap += (next_cont.length() + next_cont.link.gap)
common_genomes = (set(prev_cont.link.supporting_genomes) & set(next_cont.link.supporting_genomes))
prev_cont.link.supporting_genomes = list(common_genomes)
else:
bp_graph.add_edge(prev_cont.right_end(), next_cont.left_end(), scf_set='new', link=copy(prev_cont.link), scf_name=scf.name, infinity=False)
prev_cont = deepcopy(next_cont)
bp_graph.add_edge(prev_cont.right_end(), first_ctg.left_end(), scf_set='new', infinity=True, link=None)
self.bp_graph = bp_graph
def _build_adj_graph(self):
adj_graph = nx.Graph()
for scf in self.old_scaffolds:
for (cnt_1, cnt_2) in zip(scf.contigs[:(- 1)], scf.contigs[1:]):
adj_graph.add_edge(cnt_1.right_end(), cnt_2.left_end())
for cnt in scf.contigs:
adj_graph.add_edge(cnt.left_end(), cnt.right_end())
adj_graph.add_edge(scf.contigs[(- 1)].right_end(), scf.contigs[0].left_end())
self.adj_graph = adj_graph |
class TrackingJSONHandler(media.JSONHandler):
def __init__(self):
super().__init__()
self.deserialize_count = 0
def deserialize(self, *args, **kwargs):
result = super().deserialize(*args, **kwargs)
self.deserialize_count += 1
return result
async def deserialize_async(self, *args, **kwargs):
result = (await super().deserialize_async(*args, **kwargs))
self.deserialize_count += 1
return result |
class FaucetUntaggedACLTest(FaucetUntaggedTest):
CONFIG_GLOBAL = '\nvlans:\n 100:\n description: "untagged"\nacls:\n 1:\n - rule:\n dl_type: 0x800\n ip_proto: 6\n tcp_dst: 5002\n actions:\n allow: 1\n - rule:\n dl_type: 0x800\n ip_proto: 6\n tcp_dst: 5001\n actions:\n allow: 0\n - rule:\n actions:\n allow: 1\n'
CONFIG = '\n interfaces:\n %(port_1)d:\n native_vlan: 100\n acl_in: 1\n %(port_2)d:\n native_vlan: 100\n %(port_3)d:\n native_vlan: 100\n %(port_4)d:\n native_vlan: 100\n'
def test_port5001_blocked(self):
self.ping_all_when_learned()
(first_host, second_host) = self.hosts_name_ordered()[0:2]
self.verify_tp_dst_blocked(5001, first_host, second_host)
def test_port5002_notblocked(self):
self.ping_all_when_learned()
(first_host, second_host) = self.hosts_name_ordered()[0:2]
self.verify_tp_dst_notblocked(5002, first_host, second_host) |
def summarize_info(records, arg_names, be_groupby='te_top1', larger_is_better=True, get_final=False):
headers = (arg_names + [be_groupby])
records = reorder_records(records, based_on='n_clients')
test_records = [_summarize_info(record, arg_names, be_groupby, larger_is_better, get_final=get_final) for record in records]
aggregated_records = pd.DataFrame(test_records, columns=headers)
averaged_records = aggregated_records.fillna((- 1)).groupby(headers[:(- 1)], as_index=False).agg({be_groupby: ['mean', 'std', 'max', 'min', 'count']}).sort_values((be_groupby, 'mean'), ascending=(not larger_is_better))
return (aggregated_records, averaged_records) |
class StorageConversation(OnceConversation, StorageItem):
def identifier(self) -> ConversationIdentifier:
return self._id
def to_dict(self) -> Dict:
dict_data = self._to_dict()
messages: Dict = dict_data.pop('messages')
message_ids = []
index = 0
for message in messages:
if ('index' in message):
message_idx = message['index']
else:
message_idx = index
index += 1
message_ids.append(MessageIdentifier(self.conv_uid, message_idx).str_identifier)
dict_data['conv_uid'] = self.conv_uid
dict_data['message_ids'] = message_ids
dict_data['save_message_independent'] = self.save_message_independent
return dict_data
def merge(self, other: 'StorageItem') -> None:
if (not isinstance(other, StorageConversation)):
raise ValueError(f'Can not merge {other} to {self}')
self.from_conversation(other)
def __init__(self, conv_uid: str, chat_mode: str=None, user_name: str=None, sys_code: str=None, message_ids: List[str]=None, summary: str=None, save_message_independent: Optional[bool]=True, conv_storage: StorageInterface=None, message_storage: StorageInterface=None, **kwargs):
super().__init__(chat_mode, user_name, sys_code, summary, **kwargs)
self.conv_uid = conv_uid
self._message_ids = message_ids
self._has_stored_message_index = ((len(kwargs['messages']) - 1) if ('messages' in kwargs) else (- 1))
self.save_message_independent = save_message_independent
self._id = ConversationIdentifier(conv_uid)
if (conv_storage is None):
conv_storage = InMemoryStorage()
if (message_storage is None):
message_storage = InMemoryStorage()
self.conv_storage = conv_storage
self.message_storage = message_storage
self.load_from_storage(self.conv_storage, self.message_storage)
def message_ids(self) -> List[str]:
return (self._message_ids if self._message_ids else [])
def end_current_round(self) -> None:
self.save_to_storage()
def _get_message_items(self) -> List[MessageStorageItem]:
return [MessageStorageItem(self.conv_uid, message.index, message.to_dict()) for message in self.messages]
def save_to_storage(self) -> None:
message_list = self._get_message_items()
self._message_ids = [message.identifier.str_identifier for message in message_list]
messages_to_save = message_list[(self._has_stored_message_index + 1):]
self._has_stored_message_index = (len(message_list) - 1)
self.message_storage.save_list(messages_to_save)
self.conv_storage.save_or_update(self)
def load_from_storage(self, conv_storage: StorageInterface, message_storage: StorageInterface) -> None:
conversation: StorageConversation = conv_storage.load(self._id, StorageConversation)
if (conversation is None):
return
message_ids = (conversation._message_ids or [])
message_list = message_storage.load_list([MessageIdentifier.from_str_identifier(message_id) for message_id in message_ids], MessageStorageItem)
messages = [message.to_message() for message in message_list]
conversation.messages = messages
conversation._message_index = len(messages)
self._message_ids = message_ids
self._has_stored_message_index = (len(messages) - 1)
self.from_conversation(conversation) |
class OptionNavigationEvents(Options):
def closePopup(self):
return self._config_get(None)
def closePopup(self, value: Any):
self._config(value, js_type=False)
def deselectButton(self):
return self._config_get(None)
def deselectButton(self, value: Any):
self._config(value, js_type=False)
def selectButton(self):
return self._config_get(None)
def selectButton(self, value: Any):
self._config(value, js_type=False)
def showPopup(self):
return self._config_get(None)
def showPopup(self, value: Any):
self._config(value, js_type=False) |
class TestTransformStatsRunner():
('elasticsearch.Elasticsearch')
.asyncio
async def test_transform_stats_with_timeout_and_headers(self, es):
es.transform.get_transform_stats = mock.AsyncMock(return_value={})
transform_stats = runner.TransformStats()
transform_id = 'a-transform'
result = (await transform_stats(es, params={'transform-id': transform_id, 'request-timeout': 3.0, 'headers': {'header1': 'value1'}, 'opaque-id': 'test-id1'}))
assert (result == {'weight': 1, 'unit': 'ops', 'success': True})
es.transform.get_transform_stats.assert_awaited_once_with(transform_id=transform_id, headers={'header1': 'value1'}, opaque_id='test-id1', request_timeout=3.0)
('elasticsearch.Elasticsearch')
.asyncio
async def test_transform_stats_with_failed_condition(self, es):
transform_id = 'a-transform'
es.transform.get_transform_stats = mock.AsyncMock(return_value={'count': 3, 'transforms': [{'id': transform_id, 'state': 'started', 'stats': {}, 'checkpointing': {'last': {}, 'operations_behind': 10000}}]})
transform_stats = runner.TransformStats()
result = (await transform_stats(es, params={'transform-id': transform_id, 'condition': {'path': 'checkpointing.operations_behind', 'expected-value': None}}))
assert (result == {'weight': 1, 'unit': 'ops', 'success': False, 'condition': {'path': 'checkpointing.operations_behind', 'actual-value': '10000', 'expected-value': None}})
es.transform.get_transform_stats.assert_awaited_once_with(transform_id=transform_id)
('elasticsearch.Elasticsearch')
.asyncio
async def test_transform_stats_with_successful_condition(self, es):
transform_id = 'a-transform'
es.transform.get_transform_stats = mock.AsyncMock(return_value={'count': 3, 'transforms': [{'id': transform_id, 'state': 'started', 'stats': {}, 'checkpointing': {'last': {}}}]})
transform_stats = runner.TransformStats()
result = (await transform_stats(es, params={'transform-id': transform_id, 'condition': {'path': 'checkpointing.operations_behind', 'expected-value': None}}))
assert (result == {'weight': 1, 'unit': 'ops', 'success': True, 'condition': {'path': 'checkpointing.operations_behind', 'actual-value': None, 'expected-value': None}})
es.transform.get_transform_stats.assert_awaited_once_with(transform_id=transform_id)
('elasticsearch.Elasticsearch')
.asyncio
async def test_transform_stats_with_non_existing_path(self, es):
transform_id = 'a-transform'
es.transform.get_transform_stats = mock.AsyncMock(return_value={'count': 3, 'transforms': [{'id': transform_id, 'state': 'started', 'stats': {}, 'checkpointing': {'last': {}}}]})
transform_stats = runner.TransformStats()
result = (await transform_stats(es, params={'transform-id': transform_id, 'condition': {'path': 'checkpointing.last.checkpoint', 'expected-value': 42}}))
assert (result == {'weight': 1, 'unit': 'ops', 'success': False, 'condition': {'path': 'checkpointing.last.checkpoint', 'actual-value': None, 'expected-value': '42'}})
es.transform.get_transform_stats.assert_awaited_once_with(transform_id=transform_id) |
def test_check_schema_version_false_when_wrong_version(session):
_setup_schema(session)
assert (_check_schema_version(session) is True)
schema_version = session.query(SchemaVersion).one()
schema_version.version = 'unknown'
session.add(schema_version)
session.commit()
assert (_check_schema_version(session) is False) |
class _TraceableClassificationModel(_TransformerTraceableModel, ABC):
def classification_labels(self) -> Optional[List[str]]:
id_label_items = self._model.config.id2label.items()
labels = [v for (_, v) in sorted(id_label_items, key=(lambda kv: kv[0]))]
return [label.replace('-', '_') for label in labels] |
class Solution():
def assignBikes(self, workers: List[List[int]], bikes: List[List[int]]) -> List[int]:
distances = []
for (i, worker) in enumerate(workers):
for (j, bike) in enumerate(bikes):
distances.append(((abs((worker[0] - bike[0])) + abs((worker[1] - bike[1]))), i, j))
distances.sort()
assigned = set()
taken = set()
track = ([(- 1)] * len(workers))
for (dist, i, j) in distances:
if ((i in assigned) or (j in taken)):
continue
assigned.add(i)
taken.add(j)
track[i] = j
if (len(assigned) == len(workers)):
break
return track |
class BasicLLM():
prompt: PromptTemplate
llm: LLMChain
def __init__(self, base_prompt: str, model: str='gpt-4-1106-preview') -> None:
llm = get_model(model)
self.llm = LLMChain(llm=llm, prompt=PromptTemplate(template=base_prompt, input_variables=extract_variable_names(base_prompt)))
def run(self, **kwargs):
kwargs['feedback'] = kwargs.get('feedback', '')
return self.llm.predict(**kwargs) |
class TmpEnv():
def __init__(self, **kwargs):
self.kwargs = kwargs
self.previous = {}
def __enter__(self):
for (key, value) in self.kwargs.items():
self.previous[key] = os.environ.get(key)
os.environ[key] = str(value)
def __exit__(self, type, value, traceback):
for key in self.kwargs.keys():
if (self.previous.get(key) is not None):
os.environ[key] = self.previous[key]
else:
del os.environ[key] |
class TriggerPhraseDataset(TextEmbeddingLatentsDataset):
def __init__(self, trainer: 'LoraLatentDiffusionTrainer') -> None:
super().__init__(trainer=trainer)
self.trigger_phrase = trainer.config.lora.trigger_phrase
self.use_only_trigger_probability = trainer.config.lora.use_only_trigger_probability
logger.info(f'Trigger phrase: {self.trigger_phrase}')
def process_caption(self, caption: str) -> str:
caption = super().process_caption(caption=caption)
if self.trigger_phrase:
caption = (f'{self.trigger_phrase} {caption}' if (random.random() < self.use_only_trigger_probability) else self.trigger_phrase)
return caption |
class ManagerMock(object):
def __init__(self):
self.blueprints = []
self.mimetype_functions = []
self.actions = []
self.widgets = []
def style_class(endpoint, **kwargs):
return ('style', endpoint, kwargs)
def button_class(*args, **kwargs):
return ('button', args, kwargs)
def javascript_class(endpoint, **kwargs):
return ('javascript', endpoint, kwargs)
def link_class(*args, **kwargs):
return ('link', args, kwargs)
def register_blueprint(self, blueprint):
self.blueprints.append(blueprint)
def register_mimetype_function(self, fnc):
self.mimetype_functions.append(fnc)
def register_widget(self, widget):
self.widgets.append(widget)
def register_action(self, blueprint, widget, mimetypes=(), **kwargs):
self.actions.append((blueprint, widget, mimetypes, kwargs)) |
class OptionSeriesDependencywheelSonificationDefaultspeechoptionsMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class FlaskBBRenderer(mistune.HTMLRenderer):
def __init__(self, **kwargs):
super(FlaskBBRenderer, self).__init__(**kwargs)
def block_code(self, code, lang=None):
if lang:
try:
lexer = get_lexer_by_name(lang, stripall=True)
except ClassNotFound:
lexer = None
else:
lexer = None
if (not lexer):
return ('\n<pre><code>%s</code></pre>\n' % mistune.escape(code))
formatter = HtmlFormatter()
return highlight(code, lexer, formatter) |
class OptionPlotoptionsSplineSonificationTracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
class HmacMd5(base.AbstractAuthenticationService):
SERVICE_ID = (1, 3, 6, 1, 6, 3, 10, 1, 1, 2)
IPAD = ([54] * 64)
OPAD = ([92] * 64)
def hashPassphrase(self, authKey):
return localkey.hashPassphraseMD5(authKey)
def localizeKey(self, authKey, snmpEngineID):
return localkey.localizeKeyMD5(authKey, snmpEngineID)
def digestLength(self):
return 12
def authenticateOutgoingMsg(self, authKey, wholeMsg):
ln = wholeMsg.find(TWELVE_ZEROS)
if (ln == (- 1)):
raise error.ProtocolError('Cant locate digest placeholder')
wholeHead = wholeMsg[:ln]
wholeTail = wholeMsg[(ln + 12):]
extendedAuthKey = (authKey.asNumbers() + FORTY_EIGHT_ZEROS)
k1 = univ.OctetString(map((lambda x, y: (x ^ y)), extendedAuthKey, self.IPAD))
k2 = univ.OctetString(map((lambda x, y: (x ^ y)), extendedAuthKey, self.OPAD))
d1 = md5((k1.asOctets() + wholeMsg)).digest()
d2 = md5((k2.asOctets() + d1)).digest()
mac = d2[:12]
return ((wholeHead + mac) + wholeTail)
def authenticateIncomingMsg(self, authKey, authParameters, wholeMsg):
if (len(authParameters) != 12):
raise error.StatusInformation(errorIndication=errind.authenticationError)
ln = wholeMsg.find(authParameters.asOctets())
if (ln == (- 1)):
raise error.ProtocolError('Cant locate digest in wholeMsg')
wholeHead = wholeMsg[:ln]
wholeTail = wholeMsg[(ln + 12):]
authenticatedWholeMsg = ((wholeHead + TWELVE_ZEROS) + wholeTail)
extendedAuthKey = (authKey.asNumbers() + FORTY_EIGHT_ZEROS)
k1 = univ.OctetString(map((lambda x, y: (x ^ y)), extendedAuthKey, self.IPAD))
k2 = univ.OctetString(map((lambda x, y: (x ^ y)), extendedAuthKey, self.OPAD))
d1 = md5((k1.asOctets() + authenticatedWholeMsg)).digest()
d2 = md5((k2.asOctets() + d1)).digest()
mac = d2[:12]
if (mac != authParameters):
raise error.StatusInformation(errorIndication=errind.authenticationFailure)
return authenticatedWholeMsg |
class IntervalTests(unittest.TestCase):
combiner = {'gene': (lambda a: ''.join(a))}
region_coords_1 = ((1, 20, 'A'), (3, 8, 'B'), (5, 6, 'C'), (11, 15, 'D'), (19, 23, 'E'))
region_coords_2 = ((3, 32, 'A'), (5, 8, 'B'), (11, 14, 'C'), (17, 42, 'D'), (19, 22, 'E'), (19, 22, 'F'), (25, 28, 'G'), (25, 28, 'H'), (36, 39, 'I'))
def _from_intervals(coords):
garr = GA(pd.DataFrame(list(coords), columns=['start', 'end', 'gene']).assign(chromosome='chr0'))
garr.sort_columns()
return garr
def _compare_regions(self, result, expect):
self.assertEqual(expect.data.shape, result.data.shape, '\n'.join(['Got:', str(result.data), 'Expected:', str(expect.data)]))
for col in expect.data.columns:
self.assertTrue((expect[col].values == result[col].values).all(), "Col '{}' differs:\nExpect:\n{}\nGot:\n{}".format(col, expect.data, result.data))
def setUp(self):
self.regions_1 = self._from_intervals(self.region_coords_1)
self.regions_2 = self._from_intervals(self.region_coords_2)
def test_flatten(self):
flat_coords_1 = [(1, 3, 'A'), (3, 5, 'AB'), (5, 6, 'ABC'), (6, 8, 'AB'), (8, 11, 'A'), (11, 15, 'AD'), (15, 19, 'A'), (19, 20, 'AE'), (20, 23, 'E')]
flat_coords_2 = [(3, 5, 'A'), (5, 8, 'AB'), (8, 11, 'A'), (11, 14, 'AC'), (14, 17, 'A'), (17, 19, 'AD'), (19, 22, 'ADEF'), (22, 25, 'AD'), (25, 28, 'ADGH'), (28, 32, 'AD'), (32, 36, 'D'), (36, 39, 'DI'), (39, 42, 'D')]
for (regions, flat_coords) in [(self.regions_1, flat_coords_1), (self.regions_2, flat_coords_2)]:
result = regions.flatten(combine=self.combiner)
expect = self._from_intervals(flat_coords)
self._compare_regions(result, expect)
def test_merge(self):
merged_coords_1 = [(1, 23, 'ABCDE')]
merged_coords_2 = [(3, 42, 'ABCDEFGHI')]
for (regions, merged_coords) in [(self.regions_1, merged_coords_1), (self.regions_2, merged_coords_2)]:
result = regions.merge(combine=self.combiner)
expect = self._from_intervals(merged_coords)
self._compare_regions(result, expect)
def test_intersect(self):
selections1 = self._from_intervals([(1, 8, ''), (4, 10, ''), (8, 19, ''), (11, 20, ''), (21, 22, '')])
expectations1 = {'outer': ([(1, 20, 'A'), (3, 8, 'B'), (5, 6, 'C')], [(1, 20, 'A'), (3, 8, 'B'), (5, 6, 'C')], [(1, 20, 'A'), (11, 15, 'D')], [(1, 20, 'A'), (11, 15, 'D'), (19, 23, 'E')], [(19, 23, 'E')]), 'trim': ([(1, 8, 'A'), (3, 8, 'B'), (5, 6, 'C')], [(4, 10, 'A'), (4, 8, 'B'), (5, 6, 'C')], [(8, 19, 'A'), (11, 15, 'D')], [(11, 20, 'A'), (11, 15, 'D'), (19, 20, 'E')], [(21, 22, 'E')]), 'inner': ([(3, 8, 'B'), (5, 6, 'C')], [(5, 6, 'C')], [(11, 15, 'D')], [(11, 15, 'D')], [])}
selections2 = self._from_intervals([(0, 1, ''), (5, 14, ''), (16, 45, ''), (18, 37, ''), (19, 25, ''), (29, 31, ''), (34, 39, '')])
expectations2 = {'outer': ([], [(3, 32, 'A'), (5, 8, 'B'), (11, 14, 'C')], [(3, 32, 'A'), (17, 42, 'D'), (19, 22, 'E'), (19, 22, 'F'), (25, 28, 'G'), (25, 28, 'H'), (36, 39, 'I')], [(3, 32, 'A'), (17, 42, 'D'), (19, 22, 'E'), (19, 22, 'F'), (25, 28, 'G'), (25, 28, 'H'), (36, 39, 'I')], [(3, 32, 'A'), (17, 42, 'D'), (19, 22, 'E'), (19, 22, 'F')], [(3, 32, 'A'), (17, 42, 'D')], [(17, 42, 'D'), (36, 39, 'I')]), 'trim': ([], [(5, 14, 'A'), (5, 8, 'B'), (11, 14, 'C')], [(16, 32, 'A'), (17, 42, 'D'), (19, 22, 'E'), (19, 22, 'F'), (25, 28, 'G'), (25, 28, 'H'), (36, 39, 'I')], [(18, 32, 'A'), (18, 37, 'D'), (19, 22, 'E'), (19, 22, 'F'), (25, 28, 'G'), (25, 28, 'H'), (36, 37, 'I')], [(19, 25, 'A'), (19, 25, 'D'), (19, 22, 'E'), (19, 22, 'F')], [(29, 31, 'A'), (29, 31, 'D')], [(34, 39, 'D'), (36, 39, 'I')]), 'inner': ([], [(5, 8, 'B'), (11, 14, 'C')], [(17, 42, 'D'), (19, 22, 'E'), (19, 22, 'F'), (25, 28, 'G'), (25, 28, 'H'), (36, 39, 'I')], [(19, 22, 'E'), (19, 22, 'F'), (25, 28, 'G'), (25, 28, 'H')], [(19, 22, 'E'), (19, 22, 'F')], [], [(36, 39, 'I')])}
for (regions, selections, expectations) in ((self.regions_1, selections1, expectations1), (self.regions_2, selections2, expectations2)):
for mode in ('outer', 'trim', 'inner'):
grouped_results = regions.by_ranges(selections, mode=mode)
for ((_coord, result), expect) in zip(grouped_results, expectations[mode]):
self._compare_regions(result, self._from_intervals(expect))
result = regions.intersection(selections, mode=mode)
expect = self._from_intervals(sum(expectations[mode], []))
self._compare_regions(result, expect)
def test_subtract(self):
access = self._from_intervals([(1, 5, ''), (8, 12, ''), (15, 19, ''), (23, 33, '')])
target = self._from_intervals([(1, 5, ''), (7, 13, ''), (14, 17, ''), (20, 21, ''), (22, 24, ''), (26, 28, ''), (31, 34, '')])
expect = self._from_intervals([(17, 19, ''), (24, 26, ''), (28, 31, '')])
invert = self._from_intervals([(7, 8, ''), (12, 13, ''), (14, 15, ''), (20, 21, ''), (22, 23, ''), (33, 34, '')])
result = access.subtract(target)
self._compare_regions(result, expect)
iresult = target.subtract(access)
self._compare_regions(iresult, invert) |
class ThreadDemo(HasTraits):
thread_0 = Int()
thread_1 = Int()
thread_2 = Int()
start = Button('Start Threads')
running = Int()
view = View(VGroup(Item('thread_0', style='readonly'), Item('thread_1', style='readonly'), Item('thread_2', style='readonly')), '_', Item('start', show_label=False, enabled_when='running == 0'), resizable=True, width=250, title='Monitoring threads')
def _start_changed(self):
for i in range(3):
Thread(target=self.counter, args=(('thread_%d' % i), (((i * 10) + 10) / 1000.0))).start()
def counter(self, name, interval):
self.running += 1
count = 0
for i in range(200):
setattr(self, name, count)
count += 1
sleep(interval)
self.running -= 1 |
class TestUnpackList(unittest.TestCase):
def test_simple(self):
def deserializer(reader):
(length,) = reader.peek('!B')
return reader.read(('!%ds' % length))[0]
reader = loxi.generic_util.OFReader('\x04abc\x03de\x02f\x01')
a = loxi.generic_util.unpack_list(reader, deserializer)
self.assertEquals(['\x04abc', '\x03de', '\x02f', '\x01'], a) |
def test_error_keyword_truncation(sending_elasticapm_client):
too_long = ('x' * (KEYWORD_MAX_LENGTH + 1))
expected = encoding.keyword_field(too_long)
WayTooLongException = type(too_long.upper(), (Exception,), {})
WayTooLongException.__module__ = too_long
try:
raise WayTooLongException()
except WayTooLongException:
with mock.patch('elasticapm.events.get_culprit') as mock_get_culprit:
mock_get_culprit.return_value = too_long
sending_elasticapm_client.capture_exception(handled=False)
sending_elasticapm_client.close()
error = sending_elasticapm_client.
assert (error['exception']['type'] == expected.upper())
assert (error['exception']['module'] == expected)
assert (error['culprit'] == expected) |
class ImportPatched(ProcessBase):
def test_patch_a_module(self):
self.write_to_tempfile('base', base_module_contents)
self.write_to_tempfile('patching', patching_module_contents)
self.write_to_tempfile('importing', import_module_contents)
(output, lines) = self.launch_subprocess('importing.py')
assert lines[0].startswith('patcher'), repr(output)
assert lines[1].startswith('base'), repr(output)
assert lines[2].startswith('importing'), repr(output)
assert ('eventlet.green.socket' in lines[1]), repr(output)
assert ('eventlet.green.urllib' in lines[1]), repr(output)
assert ('eventlet.green.socket' in lines[2]), repr(output)
assert ('eventlet.green.urllib' in lines[2]), repr(output)
assert ('eventlet.green. not in lines[2]), repr(output) |
_dict
def _generate_next_value_(name, start, count, last_values, *args, **kwds):
for last_value in reversed(last_values):
try:
new_value = (last_value + 1)
break
except TypeError:
pass
else:
new_value = start
if args:
return ((new_value,) + args)
else:
return new_value |
(version_base=None, config_path='conf', config_name='config')
def main(cfg: DictConfig):
openai.api_key = cfg.OPENAI_API_KEY
env = actGPTEnv(cfg.executable_path, user_data_dir=cfg.user_data_dir, headless=False)
ldict = {'env': env}
while True:
inp = ''
print('\nenter instruction:')
while True:
dummy = (input() + '\n')
if (dummy == '\n'):
break
inp += dummy
prompt = get_prompt_selenium(inp, code=False)
text = env.get_openai_response(prompt, model='text-davinci-003')
text = text.replace('```', '')
exec(text, globals(), ldict) |
def get_tesseract_path() -> Path:
if is_briefcase_package():
if (sys.platform == 'linux'):
binary_path = (Path(__file__).parent.parent.parent.parent / 'bin')
elif (sys.platform == 'win32'):
binary_path = ((Path(__file__).parent.parent / 'resources') / 'tesseract')
elif (sys.platform == 'darwin'):
binary_path = ((Path(__file__).parent.parent.parent.parent / 'app_packages') / 'bin')
else:
raise ValueError(f'Platform {sys.platform} is not supported')
extension = ('.exe' if (sys.platform == 'win32') else '')
tesseract_path = (binary_path / f'tesseract{extension}')
if (not tesseract_path.exists()):
raise RuntimeError(f'Could not locate Tesseract binary {tesseract_path}!')
return tesseract_path
if (tesseract_bin := shutil.which('tesseract')):
tesseract_path = Path(tesseract_bin)
if tesseract_path.exists():
return tesseract_path
raise RuntimeError('No Tesseract binary found! Tesseract has to be installed and added to PATH environment variable.') |
class CRUDMenu(CRUDBase[(Menu, CreateMenu, UpdateMenu)]):
async def get(self, db, menu_id: int) -> (Menu | None):
return (await self.get_(db, pk=menu_id))
async def get_by_title(self, db, title: str) -> (Menu | None):
result = (await db.execute(select(self.model).where((self.model.title == title))))
return result.scalars().first()
async def get_all(self, db, title: (str | None)=None, status: (int | None)=None) -> Sequence[Menu]:
se = select(self.model).order_by(asc(self.model.sort))
where_list = []
if title:
where_list.append(self.model.title.like(f'%{title}%'))
if (status is not None):
where_list.append((self.model.status == status))
if where_list:
se = se.where(and_(*where_list))
menu = (await db.execute(se))
return menu.scalars().all()
async def get_role_menus(self, db, superuser: bool, menu_ids: list[int]) -> Sequence[Menu]:
se = select(self.model).order_by(asc(self.model.sort))
where_list = [self.model.menu_type.in_([0, 1])]
if (not superuser):
where_list.append(self.model.id.in_(menu_ids))
se = se.where(and_(*where_list))
menu = (await db.execute(se))
return menu.scalars().all()
async def create(self, db, obj_in: CreateMenu) -> None:
(await self.create_(db, obj_in))
async def update(self, db, menu_id: int, obj_in: UpdateMenu) -> int:
return (await self.update_(db, menu_id, obj_in))
async def delete(self, db, menu_id: int) -> int:
return (await self.delete_(db, menu_id))
async def get_children(self, db, menu_id: int) -> list[Menu]:
result = (await db.execute(select(self.model).options(selectinload(self.model.children)).where((self.model.id == menu_id))))
menu = result.scalars().first()
return menu.children |
.parallel(nprocs=3)
def test_project_parallel(coarse, fine):
distribution_parameters = {'partition': True, 'overlap_type': (DistributedMeshOverlapType.VERTEX, 10)}
cmesh = RectangleMesh(2, 2, 1, 1, diagonal='left', distribution_parameters=distribution_parameters)
fmesh = RectangleMesh(5, 5, 1, 1, diagonal='right', distribution_parameters=distribution_parameters)
fmesh._parallel_compatible = {weakref.ref(cmesh)}
Vc = FunctionSpace(cmesh, *coarse)
Vf = FunctionSpace(fmesh, *fine)
c = Function(Vc)
c.interpolate((SpatialCoordinate(cmesh) ** 2))
expect = assemble((c * dx))
actual = project(c, Vf)
actual = assemble((actual * dx))
assert numpy.allclose(expect, actual) |
class OptionSeriesArcdiagramSonificationDefaultinstrumentoptionsMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestMinMaxIntegerField(FieldValues):
valid_inputs = {'1': 1, '3': 3, 1: 1, 3: 3}
invalid_inputs = {0: ['Ensure this value is greater than or equal to 1.'], 4: ['Ensure this value is less than or equal to 3.'], '0': ['Ensure this value is greater than or equal to 1.'], '4': ['Ensure this value is less than or equal to 3.']}
outputs = {}
field = serializers.IntegerField(min_value=1, max_value=3) |
(scope='function')
def timescale_integration_db(timescale_integration_session):
timescale_integration_session = seed_postgres_data(timescale_integration_session, './docker/sample_data/timescale_example.sql')
(yield timescale_integration_session)
drop_database(timescale_integration_session.bind.url) |
.EventDecorator()
def norm(v, norm_type='L2', mesh=None):
typ = norm_type.lower()
p = 2
if (typ == 'l2'):
expr = inner(v, v)
elif typ.startswith('l'):
try:
p = int(typ[1:])
if (p < 1):
raise ValueError
except ValueError:
raise ValueError(("Don't know how to interpret %s-norm" % norm_type))
expr = inner(v, v)
elif (typ == 'h1'):
expr = (inner(v, v) + inner(grad(v), grad(v)))
elif (typ == 'hdiv'):
expr = (inner(v, v) + inner(div(v), div(v)))
elif (typ == 'hcurl'):
expr = (inner(v, v) + inner(curl(v), curl(v)))
else:
raise RuntimeError(("Unknown norm type '%s'" % norm_type))
return (assemble(((expr ** (p / 2)) * dx)) ** (1 / p)) |
.parametrize('num_txns, expected_base_fee', ((0, ), (1, ), (2, ), (3, ), (4, )))
def test_base_fee_evolution(london_plus_miner, funded_address, funded_address_private_key, num_txns, expected_base_fee):
chain = london_plus_miner
assert (chain.header.gas_limit == FOUR_TXN_GAS_LIMIT)
vm = chain.get_vm()
txns = [new_transaction(vm, funded_address, (b'\x00' * 20), private_key=funded_address_private_key, gas=21000, nonce=nonce) for nonce in range(num_txns)]
(block_import, _, _) = chain.mine_all(txns, gas_limit=FOUR_TXN_GAS_LIMIT)
mined_header = block_import.imported_block.header
assert (mined_header.gas_limit == FOUR_TXN_GAS_LIMIT)
assert (mined_header.gas_used == (21000 * num_txns))
assert (mined_header.base_fee_per_gas == (10 ** 9))
(block_import, _, _) = chain.mine_all([], gas_limit=FOUR_TXN_GAS_LIMIT)
mined_header = block_import.imported_block.header
assert (mined_header.gas_limit == FOUR_TXN_GAS_LIMIT)
assert (mined_header.gas_used == 0)
assert (mined_header.base_fee_per_gas == expected_base_fee) |
def train(model, train_loader, val_loader, optimizer, init_lr=0.002, checkpoint_dir=None, checkpoint_interval=None, nepochs=None, clip_thresh=1.0):
if use_cuda:
model = model.cuda()
criterion = nn.CrossEntropyLoss()
global global_step, global_epoch
if (hparams.exponential_moving_average is not None):
ema = ExponentialMovingAverage(hparams.ema_decay)
for (name, param) in model.named_parameters():
if param.requires_grad:
ema.register(name, param.data)
else:
ema = None
while (global_epoch < nepochs):
model.train()
h = open(logfile_name, 'a')
running_loss = 0.0
for (step, (latents, lid, lengths, fnames)) in tqdm(enumerate(train_loader)):
current_lr = learning_rate_decay(init_lr, global_step)
for param_group in optimizer.param_groups:
param_group['lr'] = current_lr
optimizer.zero_grad()
(sorted_lengths, indices) = torch.sort(lengths.view((- 1)), dim=0, descending=True)
sorted_lengths = sorted_lengths.long().numpy()
(latents, lid) = (latents[indices], lid[indices])
(latents, lid) = (Variable(latents), Variable(lid))
if use_cuda:
(latents, lid) = (latents.cuda().long(), lid.cuda().long())
logits = model(latents, lengths=sorted_lengths)
loss = criterion(logits, lid)
loss.backward(retain_graph=False)
grad_norm = torch.nn.utils.clip_grad_norm_(model.parameters(), clip_thresh)
optimizer.step()
if (ema is not None):
for (name, param) in model.named_parameters():
if (name in ema.shadow):
ema.update(name, param.data)
if ((global_step % checkpoint_interval) == 0):
save_checkpoint(model, optimizer, global_step, checkpoint_dir, global_epoch, ema=ema)
log_value('Training Loss', float(loss.item()), global_step)
log_value('gradient norm', grad_norm, global_step)
log_value('learning rate', current_lr, global_step)
global_step += 1
running_loss += loss.item()
averaged_loss = (running_loss / len(train_loader))
log_value('loss (per epoch)', averaged_loss, global_epoch)
h.write((((('Loss after epoch ' + str(global_epoch)) + ': ') + format((running_loss / len(train_loader)))) + '\n'))
h.close()
(recall, model) = validate_model(model, val_loader)
log_value('Unweighted Recall per epoch', recall, global_epoch)
global_epoch += 1
return (model, ema) |
def main():
logHandler = logging.StreamHandler()
formatter = jsonlogger.JsonFormatter('%(asctime) %(levelname) %(message)', datefmt='%Y-%m-%d %H:%M:%S')
logHandler.setFormatter(formatter)
logger.addHandler(logHandler)
logger.setLevel('INFO')
config = get_config()
logger.setLevel(config['log_level'])
signal_handler = ShutdownSignalHandler()
if (not config['host']):
logger.error('No host specified, please set QBITTORRENT_HOST environment variable')
sys.exit(1)
if (not config['port']):
logger.error('No port specified, please set QBITTORRENT_PORT environment variable')
sys.exit(1)
logger.info('Exporter is starting up')
REGISTRY.register(QbittorrentMetricsCollector(config))
start_
logger.info(f"Exporter listening on port {config['exporter_port']}")
while (not signal_handler.is_shutting_down()):
time.sleep(1)
logger.info('Exporter has shutdown') |
def test_rate_limiting_on_form_posts(client, msend):
client.post('/', headers={'referer': ' data={'name': 'john'})
form = Form.query.filter_by(host='somewhere.com', email='').first()
form.confirmed = True
DB.session.add(form)
DB.session.commit()
replies = []
for _ in range(1000):
r = client.post('/', headers={'referer': ' data={'name': 'attacker'})
replies.append(r.status_code)
limit = int(settings.RATE_LIMIT.split(' ')[0])
form = Form.query.filter_by(host='somewhere.com', email='').first()
assert (form.counter < limit)
assert (replies.count(302) <= limit)
assert (replies.count(429) >= (900 - limit)) |
def hexdump(bytes_hex, palette=None, offset=0, prefix='', output='print'):
generator = _hexdump(bytes_hex, offset=offset, prefix=prefix, palette=palette)
if (output == 'print'):
print('\n'.join(generator))
elif (output == 'generator'):
return generator
elif (output == 'string'):
return '\n'.join(list(generator))
else:
raise ValueError("Invalid output argument: '{:s}' (should be 'print', 'generator' or 'string').".format(output)) |
def test_link_in_body_empty_string(response_with_empty_string_link):
config = LinkPaginationConfiguration(source='body', path='links.next')
request_params: SaaSRequestParams = SaaSRequestParams(method=HTTPMethod.GET, path='/customers', query_params={'page': 'abc'})
paginator = LinkPaginationStrategy(config)
next_request: Optional[SaaSRequestParams] = paginator.get_next_request(request_params, {}, response_with_empty_string_link, 'customers')
assert (next_request is None) |
class Lamp(Boxes):
webinterface = False
def __init__(self) -> None:
Boxes.__init__(self)
self.addSettingsArgs(edges.FingerJointSettings)
self.buildArgParser(x=220, y=75, h=70)
self.argparser.add_argument('--radius', action='store', type=float, default='105', help='radius of the lamp')
self.argparser.add_argument('--width', action='store', type=float, default='10', help='width of the ring')
def side(self, y, h):
return
self.edges['f'](y)
self.corner(90)
self.edges['f'](h)
self.roundedTriangle(y, 75, 25)
self.edges['f'](h)
self.corner(90)
def render(self):
(x, y, h) = (self.x, self.y, self.h)
(r, w) = (self.radius, self.width)
s = RoundedTriangleSettings(self.thickness, angle=72, r_hole=2)
self.addPart(RoundedTriangle(self, s))
self.flexSettings = (3, 5.0, 20.0)
self.edges['f'].settings.setValues(self.thickness, finger=5, space=5, relative=False)
d = (2 * (r + w))
self.roundedPlate(d, d, r, move='right', callback=[(lambda : self.hole(w, (r + w), r))])
self.roundedPlate(d, d, r, holesMargin=(w / 2.0))
self.roundedPlate(d, d, r, move='only left up')
hole = (lambda : self.hole(w, 70, 2))
self.surroundingWall(d, d, r, 120, top='h', bottom='h', callback=[None, hole, None, hole], move='up')
with self.saved_context():
self.rectangularWall(x, y, edges='fFfF', holesMargin=5, move='right')
self.rectangularWall(x, y, edges='fFfF', holesMargin=5, move='right')
self.rectangularWall(y, h, 'fftf', move='right')
self.rectangularWall(y, h, 'fftf')
self.rectangularWall(x, y, edges='fFfF', holesMargin=5, move='up only')
self.rectangularWall(x, h, edges='hFFF', holesMargin=5, move='right')
self.rectangularWall(x, h, edges='hFFF', holesMargin=5) |
def clean_checkpoints(path_to_models='logs/44k/', n_ckpts_to_keep=2, sort_by_time=True):
import re
ckpts_files = [f for f in os.listdir(path_to_models) if os.path.isfile(os.path.join(path_to_models, f))]
def name_key(_f):
return int(re.compile('._(\\d+)\\.pth').match(_f).group(1))
def time_key(_f):
return os.path.getmtime(os.path.join(path_to_models, _f))
sort_key = (time_key if sort_by_time else name_key)
def x_sorted(_x):
return sorted([f for f in ckpts_files if (f.startswith(_x) and (not f.endswith('_0.pth')))], key=sort_key)
to_del = [os.path.join(path_to_models, fn) for fn in ((x_sorted('G')[:(- n_ckpts_to_keep)] + x_sorted('D')[:(- n_ckpts_to_keep)]) + x_sorted('WD')[:(- n_ckpts_to_keep)])]
def del_info(fn):
return logger.info(f'.. Free up space by deleting ckpt {fn}')
def del_routine(x):
return [os.remove(x), del_info(x)]
[del_routine(fn) for fn in to_del] |
def filter_log_memory_setting_data(json):
option_list = ['diskfull', 'status']
json = remove_invalid_fields(json)
dictionary = {}
for attribute in option_list:
if ((attribute in json) and (json[attribute] is not None)):
dictionary[attribute] = json[attribute]
return dictionary |
_dbus_servive
.parametrize('powerconfig', [{'battery_name': 'BAT1', 'percentage_low': 0.7, 'percentage_critical': 0.55}], indirect=True)
def test_upower_critical_battery(manager_nospawn, powerconfig):
manager_nospawn.start(powerconfig)
battery_found(manager_nospawn)
assert (len(manager_nospawn.c.widget['upowerwidget'].info()['batteries']) == 1)
assert (manager_nospawn.c.widget['upowerwidget'].info()['batteries'][0]['status'] == 'Critical') |
class Image():
def __init__(self, config, image_id, size):
self._config = config
self.image_id = image_id
self.size = size
self.modified = datetime.datetime.utcnow()
def path(self):
return (self._config.storage_path / self.image_id)
def uri(self):
return f'/images/{self.image_id}.jpeg'
def serialize(self):
return {'id': self.image_id, 'image': self.uri, 'modified': falcon.dt_to_ 'size': self.size, 'thumbnails': self.thumbnails()}
def thumbnails(self):
def reductions(size, min_size):
(width, height) = size
factor = 2
while (((width // factor) >= min_size) and ((height // factor) >= min_size)):
(yield ((width // factor), (height // factor)))
factor *= 2
return [f'/thumbnails/{self.image_id}/{width}x{height}.jpeg' for (width, height) in reductions(self.size, self._config.min_thumb_size)] |
def test_line_coordinates_spacing_larger_than_twice_interval():
(start, stop) = (0, 1)
spacing = 3
coordinates = line_coordinates(start, stop, spacing=spacing)
npt.assert_allclose(coordinates, [0, 1])
coordinates = line_coordinates(start, stop, spacing=spacing, pixel_register=True)
npt.assert_allclose(coordinates, [0.5])
coordinates = line_coordinates(start, stop, spacing=spacing, adjust='region')
npt.assert_allclose(coordinates, [0, 3])
coordinates = line_coordinates(start, stop, spacing=spacing, pixel_register=True, adjust='region')
npt.assert_allclose(coordinates, [1.5]) |
class UserResults():
def __init__(self):
save_dir = os.path.expanduser('~/.poker')
os.makedirs(save_dir, exist_ok=True)
self._file_path = os.path.join(save_dir, 'results.yaml')
try:
with open(self._file_path, 'r') as stream:
self._results: Dict[(str, Any)] = yaml.safe_load(stream=stream)
except FileNotFoundError:
self._results: Dict[(str, Any)] = {'stats': {}, 'results': []}
def add_result(self, strategy_path: str, agent: str, state: ShortDeckPokerState, og_name_to_name: Dict[(str, str)]):
ai_key = f'{agent}_{os.path.basename(strategy_path)}'
players = []
for (player_i, player) in enumerate(state.players):
name = og_name_to_name[player.name]
player_info_dict = dict(name=name, args=dict(cards=[c.to_dict() for c in player.cards], value=state.payout[player_i], is_big_blind=player.is_big_blind, is_small_blind=player.is_small_blind, is_dealer=player.is_dealer))
players.append(player_info_dict)
result_entry = dict(ai_key=ai_key, players=players, community_cards=[c.to_dict() for c in state.community_cards])
self._results['results'].append(result_entry)
self._compute_human_stats()
self._write_to_file()
def _compute_human_stats(self):
values = collections.defaultdict((lambda : collections.defaultdict(list)))
for result_entry in self._results['results']:
ai_key = result_entry['ai_key']
for player in result_entry['players']:
if (player['name'].lower() == 'human'):
if player['args']['is_big_blind']:
key = 'BB'
elif player['args']['is_small_blind']:
key = 'SB'
elif player['args']['is_dealer']:
key = 'D'
else:
raise NotImplementedError('')
values[ai_key][key].append(player['args']['value'])
break
self._results['stats'] = {ai_key: {p: {'mean': float(np.mean(v)), 'std': float(np.std(v))} for (p, v) in positions_to_values.items()} for (ai_key, positions_to_values) in values.items()}
def _write_to_file(self):
with open(self._file_path, 'w') as stream:
yaml.safe_dump(self._results, stream=stream, default_flow_style=False) |
def get_fbgemm_info(n, rows_per_table):
num_tables = (n.input_shapes[6][0] - 1)
rows = ([rows_per_table] * num_tables)
batch_size = int(((n.input_shapes[12][0] - 1) / num_tables))
assert (batch_size == n.output_shapes[0][0])
avg_dim = (int(((n.inputs[7] / num_tables) / 4)) * 4)
dims = [avg_dim for _ in range(num_tables)]
addition = (n.inputs[7] - (avg_dim * num_tables))
pos = (len(dims) - 1)
while ((addition > 0) and (pos >= 0)):
if (addition >= (1024 - dims[pos])):
addition -= (1024 - dims[pos])
dims[pos] += (1024 - dims[pos])
else:
dims[pos] += addition
addition = 0
pos -= 1
pooling_mode = ([n.inputs[13]] * num_tables)
weighted = ('Float' not in n.input_types[1])
weights_precision = c10_type_to_str(n.input_types[1])
optimizer = get_optimizer_from_fbgemm_function_name(n.name)
if (optimizer == 'exact_sgd'):
lr = n.inputs[20]
elif (optimizer == 'exact_row_wise_adagrad'):
lr = n.inputs[25]
else:
lr = 0.01
if (optimizer == 'exact_row_wise_adagrad'):
eps = n.inputs[24]
weight_decay = n.inputs[26]
if (n.inputs[27] == 0):
weight_decay_mode = WeightDecayMode.NONE
elif (n.inputs[27] == 1):
weight_decay_mode = WeightDecayMode.L2
else:
weight_decay_mode = WeightDecayMode.DECOUPLE
else:
eps = 1e-08
weight_decay = 0.0
weight_decay_mode = WeightDecayMode.NONE
return (rows, num_tables, dims, batch_size, pooling_mode, weighted, weights_precision, optimizer, lr, eps, weight_decay, weight_decay_mode) |
class OptionPlotoptionsBellcurveSonificationDefaultinstrumentoptionsMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_before_first_request_functions_concurrent(app, client):
got = []
_first_request
def foo():
time.sleep(0.2)
got.append(42)
def get_and_assert():
client.get('/')
assert (got == [42])
t = Thread(target=get_and_assert)
t.start()
get_and_assert()
t.join()
assert app.got_first_request |
.PostProcessingTools
class TestBDM2Mesh8(object):
def setup_class(cls):
pass
def teardown_class(cls):
pass
def setup_method(self, method):
reload(bt)
self.transport_obj = bt.ns.modelList[0].levelModelList[0]
self.bdm2_obj = self.transport_obj.velocityPostProcessor.vpp_algorithms[0]
self._setRelativePath()
def _setRelativePath(self):
self.scriptdir = os.path.dirname(__file__)
def teardown_method(self, method):
filenames = ['poisson_bdm1_test.h5', 'poisson_bdm1_test.xmf', 'reference_triangle.ele', 'reference_triangle.node', 'reference_triangle.poly', 'proteus.log', 'blockDomain.poly']
for file in filenames:
if os.path.exists(file):
try:
os.remove(file)
except OSError as e:
print(('Error: %s - %s.' % (e.filename, e.strerror)))
else:
pass
def test_BDM2_reference_triangle_full_in_space(self):
rel_path_1 = 'comparison_files/bdm_bdy_func_values_mesh_8.npy'
rel_path_2 = 'comparison_files/bdm_func_values_mesh_8.npy'
bdm_bdy_values = np.load(os.path.join(self.scriptdir, rel_path_1))
bdm_values = np.load(os.path.join(self.scriptdir, rel_path_2))
self.bdm2_obj.ebq[('velocity', 0)] = bdm_bdy_values.copy()
self.bdm2_obj.q[('velocity', 0)] = bdm_values.copy()
self.bdm2_obj.evaluateLocalVelocityRepresentation(0, True)
np.testing.assert_almost_equal(self.bdm2_obj.q[('velocity', 0)], bdm_values, decimal=6) |
class TestExceptionHandling(CoprsTestCase):
def test_json_only_for_api(self):
app.config['SESSION_COOKIE_DOMAIN'] = 'localhost.localdomain'
r1 = self.tc.get('/nonexisting/endpoint/')
assert (r1.status_code == 404)
with pytest.raises(json.JSONDecodeError):
json.loads(r1.data)
r2 = self.tc.get('/api_3/nonexisting/endpoint/')
assert (r2.status_code == 404)
data = json.loads(r2.data)
assert ('error' in data)
def test_both_nonexisting_page_and_object(self):
r1 = self.tc.get('/nonexisting/endpoint/')
assert (r1.status_code == 404)
assert ('<h1> Error 404: Page Not Found</h1>' in str(r1.data))
r2 = self.tc.get('/coprs/nonexisting/nonexisting/')
assert (r2.status_code == 404)
def test_both_nonexisting_page_and_object_api(self):
r1 = self.tc.get('/api_3/nonexisting/endpoint/')
assert (r1.status_code == 404)
d1 = json.loads(r1.data)
assert ('API endpoint' in d1['error'])
r2 = self.tc.get('/api_3/project?ownername=nonexisting&projectname=nonexisting')
assert (r2.status_code == 404)
d2 = json.loads(r2.data)
assert ('Project nonexisting/nonexisting does not exist' in d2['error'])
def test_api_401(self):
r1 = self.tc.post('api_3/project/add/someone')
assert (r1.status_code == 401)
data = json.loads(r1.data)
assert ('Login invalid/expired' in data['error'])
def test_api_403(self, f_users, f_coprs, f_mock_chroots, f_users_api, f_db):
request_data = {'chroots': None, 'description': 'Changed description'}
r1 = self.post_api3_with_auth('api_3/project/edit/user2/foocopr', request_data, self.u3)
assert (r1.status_code == 403)
data = json.loads(r1.data)
assert ('Only owners and admins may update their projects' in data['error'])
.usefixtures('f_users', 'f_users_api', 'f_coprs', 'f_mock_chroots', 'f_builds', 'f_db')
def test_api_409(self):
r1 = self.post_api3_with_auth('api_3/build/cancel/1', {}, self.u1)
assert (r1.status_code == 409)
data = json.loads(r1.data)
assert ('Cannot cancel build 1' in data['error'])
.usefixtures('f_users', 'f_users_api', 'f_coprs', 'f_mock_chroots', 'f_builds', 'f_db')
def test_api_400(self):
data = {'builder': True}
url = 'api_3/project/permissions/request/user1/foocopr'
r1 = self.post_api3_with_auth(url, data, self.u1)
assert (r1.status_code == 400)
data = json.loads(r1.data)
assert ("is owner of the 'user1/foocopr' project" in data['error'])
def test_api_504(self):
def raise_exception():
raise GatewayTimeout()
app.view_functions['apiv3_ns.home'] = raise_exception
r1 = self.tc.get('/api_3', follow_redirects=True)
assert (r1.status_code == 504)
data = json.loads(r1.data)
assert ('The API request timeouted' in data['error'])
def test_api_500(self):
def raise_exception():
raise CoprHttpException('Whatever unspecified error')
app.view_functions['apiv3_ns.home'] = raise_exception
r1 = self.tc.get('/api_3', follow_redirects=True)
assert (r1.status_code == 500)
data = json.loads(r1.data)
assert ('Whatever unspecified error' in data['error'])
def test_api_500_default_message(self):
def raise_exception():
raise CoprHttpException
app.view_functions['apiv3_ns.home'] = raise_exception
r1 = self.tc.get('/api_3/', follow_redirects=True)
assert (r1.status_code == 500)
data = json.loads(r1.data)
assert ('Generic copr exception' in data['error'])
def test_api_500_runtime_error(self):
def raise_exception():
raise RuntimeError('Whatever unspecified error')
app.view_functions['apiv3_ns.home'] = raise_exception
r1 = self.tc.get('/api_3', follow_redirects=True)
assert (r1.status_code == 500)
data = json.loads(r1.data)
assert ("Request wasn't successful, there is probably a bug in the Copr code." in data['error'])
def test_api_500_storage(self):
def raise_exception():
raise InsufficientStorage
app.view_functions['apiv3_ns.home'] = raise_exception
r1 = self.tc.get('/api_3', follow_redirects=True)
assert (r1.status_code == 500)
data = json.loads(r1.data)
assert ('Not enough space left' in data['error'])
def test_api_500_in_progress(self):
def raise_exception():
raise ActionInProgressException('Hey! Action in progress', None)
app.view_functions['apiv3_ns.home'] = raise_exception
r1 = self.tc.get('/api_3', follow_redirects=True)
assert (r1.status_code == 500)
data = json.loads(r1.data)
assert ('Hey! Action in progress' in data['error']) |
def test_traverse_types_filtering():
provider1 = providers.Resource(dict)
provider2 = providers.Resource(dict)
provider3 = providers.Provider()
provider = providers.Provider()
provider.override(provider1)
provider.override(provider2)
provider.override(provider3)
all_providers = list(providers.traverse(provider, types=[providers.Resource]))
assert (len(all_providers) == 2)
assert (provider1 in all_providers)
assert (provider2 in all_providers) |
def test_impute_with_string_missing_and_automatically_find_variables(df_na):
imputer = CategoricalImputer(imputation_method='missing', variables=None)
X_transformed = imputer.fit_transform(df_na)
X_reference = df_na.copy()
X_reference['Name'] = X_reference['Name'].fillna('Missing')
X_reference['City'] = X_reference['City'].fillna('Missing')
X_reference['Studies'] = X_reference['Studies'].fillna('Missing')
assert (imputer.imputation_method == 'missing')
assert (imputer.variables is None)
assert (imputer.variables_ == ['Name', 'City', 'Studies'])
assert (imputer.n_features_in_ == 6)
assert (imputer.imputer_dict_ == {'Name': 'Missing', 'City': 'Missing', 'Studies': 'Missing'})
assert (X_transformed[['Name', 'City', 'Studies']].isnull().sum().sum() == 0)
assert (X_transformed[['Age', 'Marks']].isnull().sum().sum() > 0)
pd.testing.assert_frame_equal(X_transformed, X_reference) |
def _get_file_fingerprint(input_data):
prefix = re.search(b'\\-\\-\\=\\<\\/Begin HP Signed File Fingerprint\\\\\\>\\=\\-\\-', input_data)
if prefix:
suffix = re.search(b'\\-\\-\\=\\<\\/End HP Signed File Fingerprint\\\\\\>\\=\\-\\-', input_data)
if suffix:
return input_data[prefix.start():suffix.end()]
return None |
def adc_info(esp, efuses, args):
print('')
if (efuses['BLK_VERSION_MAJOR'].get() == 1):
print('Temperature Sensor Calibration = {}C'.format(efuses['TEMP_SENSOR_CAL'].get()))
print('')
print('ADC1 readings stored in efuse BLOCK2:')
print(' MODE0 D1 reading (250mV): {}'.format(efuses['ADC1_MODE0_D1'].get()))
print(' MODE0 D2 reading (600mV): {}'.format(efuses['ADC1_MODE0_D2'].get()))
print(' MODE1 D1 reading (250mV): {}'.format(efuses['ADC1_MODE1_D1'].get()))
print(' MODE1 D2 reading (800mV): {}'.format(efuses['ADC1_MODE1_D2'].get()))
print(' MODE2 D1 reading (250mV): {}'.format(efuses['ADC1_MODE2_D1'].get()))
print(' MODE2 D2 reading (1000mV): {}'.format(efuses['ADC1_MODE2_D2'].get()))
print(' MODE3 D1 reading (250mV): {}'.format(efuses['ADC1_MODE3_D1'].get()))
print(' MODE3 D2 reading (2000mV): {}'.format(efuses['ADC1_MODE3_D2'].get()))
print('')
print('ADC2 readings stored in efuse BLOCK2:')
print(' MODE0 D1 reading (250mV): {}'.format(efuses['ADC2_MODE0_D1'].get()))
print(' MODE0 D2 reading (600mV): {}'.format(efuses['ADC2_MODE0_D2'].get()))
print(' MODE1 D1 reading (250mV): {}'.format(efuses['ADC2_MODE1_D1'].get()))
print(' MODE1 D2 reading (800mV): {}'.format(efuses['ADC2_MODE1_D2'].get()))
print(' MODE2 D1 reading (250mV): {}'.format(efuses['ADC2_MODE2_D1'].get()))
print(' MODE2 D2 reading (1000mV): {}'.format(efuses['ADC2_MODE2_D2'].get()))
print(' MODE3 D1 reading (250mV): {}'.format(efuses['ADC2_MODE3_D1'].get()))
print(' MODE3 D2 reading (2000mV): {}'.format(efuses['ADC2_MODE3_D2'].get()))
else:
print('BLK_VERSION_MAJOR = {}'.format(efuses['BLK_VERSION_MAJOR'].get_meaning())) |
class Solution():
def alienOrder(self, words):
(pre, suc) = (collections.defaultdict(set), collections.defaultdict(set))
for pair in zip(words, words[1:]):
for (a, b) in zip(*pair):
if (a != b):
suc[a].add(b)
pre[b].add(a)
break
if (len(pair[0]) > len(pair[1])):
return ''
chars = set(suc).union(set(pre))
free = (chars - set(pre))
order = ''
while free:
a = free.pop()
order += a
for b in suc[a]:
pre[b].discard(a)
if (not pre[b]):
free.add(b)
return (order * (set(order) == chars)) |
class Multiplexer(AsyncMultiplexer):
_thread_was_started: bool
_is_connected: bool
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
self._sync_lock = threading.Lock()
self._init()
def _init(self) -> None:
self._thread_was_started = False
self._is_connected = False
def set_loop(self, loop: AbstractEventLoop) -> None:
super().set_loop(loop)
self._thread_runner = ThreadedAsyncRunner(self._loop)
def connect(self) -> None:
with self._sync_lock:
if (not self._loop.is_running()):
self._thread_runner.start()
self._thread_was_started = True
self._thread_runner.call(super().connect()).result(240)
self._is_connected = True
def disconnect(self) -> None:
self.logger.debug('Disconnect called')
with self._sync_lock:
if (not self._loop.is_running()):
return
if self._is_connected:
self._thread_runner.call(super().disconnect()).result(240)
self._is_connected = False
self.logger.debug('Disconnect async method executed')
if (self._thread_runner.is_alive() and self._thread_was_started):
self._thread_runner.stop()
self.logger.debug('Thread stopped')
self.logger.debug('Disconnected')
self._init()
self.set_loop(self._loop)
def put(self, envelope: Envelope) -> None:
self._thread_runner.call(super()._put(envelope)) |
class OptionLangAccessibilityTable(Options):
def tableSummary(self):
return self._config_get('Table representation of chart.')
def tableSummary(self, text: str):
self._config(text, js_type=False)
def viewAsDataTableButtonText(self):
return self._config_get('View as data table, {chartTitle}')
def viewAsDataTableButtonText(self, text: str):
self._config(text, js_type=False) |
((detect_target().name() == 'rocm'), 'Not supported by ROCM.')
class GroupOpTestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(GroupOpTestCase, self).__init__(*args, **kwargs)
self._test_id = 0
def _test_group_layernorm_sigmoid_mul_cat_fusion(self, input_shapes, gamma_is_none=False, beta_is_none=False, add_size_op=False, fuse_sigmoid_mul=True, num_group_ops=1, should_fail=False, dtype='float16'):
if (gamma_is_none or beta_is_none or (len(input_shapes) <= 1)):
should_fail = True
testname = ('group_layernorm_sigmoid_mul_fusion' if fuse_sigmoid_mul else 'group_layernorm_fusion')
_LOGGER.info(f'{testname}: input_shapes={input_shapes}, gamma_is_none={gamma_is_none}, beta_is_none={beta_is_none}')
inputs = []
gammas = []
betas = []
normalized_shapes = []
for (i, shape) in enumerate(input_shapes):
inputs.append(Tensor(shape=[IntImm(shape[0]), IntImm(shape[1])], dtype=dtype, name=('X_' + str(i)), is_input=True))
gamma = (None if gamma_is_none else Tensor(shape=[IntImm(shape[1])], dtype=dtype, name=('gamma_' + str(i)), is_input=True))
gammas.append(gamma)
beta = (None if beta_is_none else Tensor(shape=[IntImm(shape[1])], dtype=dtype, name=('beta_' + str(i)), is_input=True))
betas.append(beta)
if add_size_op:
size = ops.size()(inputs[(- 1)], 1)
normalized_shapes.append([size])
else:
normalized_shapes.append([IntImm(shape[1])])
Ys = []
for i in range(len(input_shapes)):
Y0 = ops.layernorm()(inputs[i], gammas[i], betas[i], normalized_shapes[i])
if fuse_sigmoid_mul:
Y1 = ops.elementwise(FuncEnum.SIGMOID)(Y0)
Y2 = ops.elementwise(FuncEnum.MUL)(inputs[i], Y1)
Ys.append(Y2)
else:
Ys.append(Y0)
for (i, Y) in enumerate(Ys):
Y._attrs['is_output'] = True
Y._attrs['name'] = f'output_{i}'
target = detect_target()
module = compile_model(Ys, target, './tmp', f'{testname}_{self._test_id}')
self._test_id += 1
sorted_graph = module.debug_sorted_graph
sorted_ops = graph_utils.get_sorted_ops(sorted_graph)
group_op = ('group_layernorm_sigmoid_mul' if fuse_sigmoid_mul else 'group_layernorm')
if should_fail:
assert (not has_op(sorted_ops, group_op))
return
else:
assert has_op(sorted_ops, group_op)
assert (count_ops(sorted_ops, group_op) == num_group_ops), f'expecting {num_group_ops} {group_op} ops, found {count_ops(sorted_ops, group_op)}'
B = len(input_shapes)
_LOGGER.info(f'Run test group_layernorm_sigmoid_mul. Input shapes: {input_shapes}')
xs_pt = []
gammas_pt = []
betas_pt = []
for shape in input_shapes:
xs_pt.append(get_random_torch_tensor(shape, dtype))
gamma_pt = (None if gamma_is_none else get_random_torch_tensor([shape[1]], dtype))
gammas_pt.append(gamma_pt)
beta_pt = (None if beta_is_none else get_random_torch_tensor([shape[1]], dtype))
betas_pt.append(beta_pt)
ys_pt = []
for i in range(B):
y0 = torch.nn.functional.layer_norm(xs_pt[i], xs_pt[i].size()[1:], gammas_pt[i], betas_pt[i])
if fuse_sigmoid_mul:
y = torch.mul(xs_pt[i], torch.sigmoid(y0))
ys_pt.append(y)
else:
ys_pt.append(y0)
input_name_to_index = module.get_input_name_to_index_map()
num_inputs = (len(input_shapes) * 3)
inputs = [0 for i in range(num_inputs)]
for i in range(len(input_shapes)):
inputs[input_name_to_index[f'X_{i}']] = xs_pt[i]
if (not gamma_is_none):
inputs[input_name_to_index[f'gamma_{i}']] = gammas_pt[i]
if (not beta_is_none):
inputs[input_name_to_index[f'beta_{i}']] = betas_pt[i]
ys = []
for y_pt in ys_pt:
ys.append(get_torch_empty_tensor(y_pt.size(), dtype))
module.run_with_tensors(inputs, ys)
for (y_pt, y) in zip(ys_pt, ys):
self.assertTrue(torch.allclose(y_pt, y, atol=0.01, rtol=0.01), f'max diff: {torch.max((y_pt - y))}, min diff: {torch.min((y_pt - y))}')
def test_group_layernorm_sigmoid_mul_fusion_float16(self):
self._test_group_layernorm_sigmoid_mul_cat_fusion([[128, 256]], fuse_sigmoid_mul=True)
self._test_group_layernorm_sigmoid_mul_cat_fusion(([[128, 256]] * 4), fuse_sigmoid_mul=True)
self._test_group_layernorm_sigmoid_mul_cat_fusion([[128, 128], [128, 256], [128, 125]], fuse_sigmoid_mul=True)
self._test_group_layernorm_sigmoid_mul_cat_fusion([[10, 64], [10, 64], [10, 64]], beta_is_none=True, fuse_sigmoid_mul=True)
self._test_group_layernorm_sigmoid_mul_cat_fusion([[128, 1025], [128, 1276], [128, 1023]], gamma_is_none=True, fuse_sigmoid_mul=True)
self._test_group_layernorm_sigmoid_mul_cat_fusion(([[128, 256]] * 4), fuse_sigmoid_mul=False)
self._test_group_layernorm_sigmoid_mul_cat_fusion([[64, 64], [128, 256], [1, 125]], fuse_sigmoid_mul=True, should_fail=True)
self._test_group_layernorm_sigmoid_mul_cat_fusion([[128, 128], [128, 256], [128, 125]], fuse_sigmoid_mul=True, add_size_op=True)
self._test_group_layernorm_sigmoid_mul_cat_fusion([[128, 128], [128, 256], [128, 125], [128, 125]], fuse_sigmoid_mul=True, num_group_ops=2)
self._test_group_layernorm_sigmoid_mul_cat_fusion([[128, 120], [128, 1], [128, 256], [128, 1024]], fuse_sigmoid_mul=True, num_group_ops=1)
self._test_group_layernorm_sigmoid_mul_cat_fusion((([[128, 64]] * 39) + ([[128, 256]] * 10)), fuse_sigmoid_mul=True, num_group_ops=2)
self._test_group_layernorm_sigmoid_mul_cat_fusion(([[128, 64]] * 50), fuse_sigmoid_mul=True, num_group_ops=2)
self._test_group_layernorm_sigmoid_mul_cat_fusion([[2048, 256], [2048, 256], [2048, 128], [2048, 128], [2048, 128], [2048, 128], [2048, 128], [2048, 1024]], fuse_sigmoid_mul=True, num_group_ops=1)
self._test_group_layernorm_sigmoid_mul_cat_fusion([[2048, 256], [2048, 256], [2048, 1024]], fuse_sigmoid_mul=True, num_group_ops=1)
def test_group_layernorm_sigmoid_mul_fusion_float32(self):
self._test_group_layernorm_sigmoid_mul_cat_fusion(([[128, 256]] * 4), fuse_sigmoid_mul=True, dtype='float32')
self._test_group_layernorm_sigmoid_mul_cat_fusion([[10, 64], [10, 64], [10, 64]], beta_is_none=True, fuse_sigmoid_mul=True, dtype='float32')
self._test_group_layernorm_sigmoid_mul_cat_fusion(([[128, 256]] * 4), fuse_sigmoid_mul=False, dtype='float32')
self._test_group_layernorm_sigmoid_mul_cat_fusion([[64, 64], [128, 256], [1, 125]], fuse_sigmoid_mul=True, should_fail=True, dtype='float32')
self._test_group_layernorm_sigmoid_mul_cat_fusion([[128, 128], [128, 256], [128, 125]], fuse_sigmoid_mul=True, add_size_op=True, dtype='float32')
self._test_group_layernorm_sigmoid_mul_cat_fusion([[128, 128], [128, 256], [128, 125], [128, 125]], fuse_sigmoid_mul=True, num_group_ops=2, dtype='float32')
self._test_group_layernorm_sigmoid_mul_cat_fusion((([[128, 64]] * 39) + ([[128, 256]] * 10)), fuse_sigmoid_mul=True, num_group_ops=2, dtype='float32')
self._test_group_layernorm_sigmoid_mul_cat_fusion([[2048, 256], [2048, 256], [2048, 128], [2048, 128], [2048, 128], [2048, 128], [2048, 128], [2048, 1024]], fuse_sigmoid_mul=True, num_group_ops=1, dtype='float32')
def test_layernorm_with_cycles(self):
torch.manual_seed(0)
testname = 'layernorm_with_cycles_0'
dtype = 'float16'
batch_sizes = [1, 2048]
eps = 1e-05
Input0 = Tensor(shape=[IntVar(values=batch_sizes, name='batch'), IntImm(value=1024)], dtype=dtype, name='input0', is_input=True)
reshape_to_shape_0 = [(- 1), 32, 32]
reshape_0 = ops.reshape()(Input0, reshape_to_shape_0)
W0 = Tensor(shape=[IntImm(16), IntImm(32)], name='w0', is_input=True)
gemm_rcr_0 = ops.gemm_rcr()(reshape_0, W0)
reshape_to_shape_1 = [(- 1), 512]
reshape_1 = ops.reshape()(gemm_rcr_0, reshape_to_shape_1)
Input1 = Tensor(shape=[IntVar(values=batch_sizes, name='batch'), IntImm(value=512)], dtype=dtype, name='input1', is_input=True)
elementwise_0 = ops.elementwise(func_enum=FuncEnum.MUL)(reshape_1, Input1)
W1 = Tensor(shape=[IntImm(3821), IntImm(512)], name='w1', is_input=True)
gemm_rcr_1 = ops.gemm_rcr()(elementwise_0, W1)
concat_dim = 1
concatenate_0 = ops.concatenate()([Input1, gemm_rcr_1], concat_dim)
Gamma0 = Tensor(shape=[IntImm(4333)], name='gamma0', is_input=True)
Beta0 = Tensor(shape=[IntImm(4333)], name='beta0', is_input=True)
layernorm_0 = ops.layernorm(normalized_shape=None)(concatenate_0, Gamma0, Beta0, [IntImm(4333)], eps)
Input2 = Tensor(shape=[IntVar(values=batch_sizes, name='batch'), IntImm(value=256)], dtype=dtype, name='input2', is_input=True)
W2 = Tensor(shape=[IntImm(256), IntImm(256)], name='w2', is_input=True)
gemm_rcr_2 = ops.gemm_rcr()(Input2, W2)
Gamma1 = Tensor(shape=[IntImm(256)], name='gamma1', is_input=True)
Beta1 = Tensor(shape=[IntImm(256)], name='beta1', is_input=True)
layernorm_1 = ops.layernorm(normalized_shape=None)(gemm_rcr_2, Gamma1, Beta1, [IntImm(256)], eps)
elementwise_1 = ops.elementwise(func_enum=FuncEnum.SIGMOID)(layernorm_1)
elementwise_2 = ops.elementwise(func_enum=FuncEnum.MUL)(gemm_rcr_2, elementwise_1)
W3 = Tensor(shape=[IntImm(2048), IntImm(256)], name='w3', is_input=True)
gemm_rcr_3 = ops.gemm_rcr()(elementwise_2, W3)
Gamma2 = Tensor(shape=[IntImm(2048)], name='gamma2', is_input=True)
Beta2 = Tensor(shape=[IntImm(2048)], name='beta2', is_input=True)
layernorm_2 = ops.layernorm(normalized_shape=None)(gemm_rcr_3, Gamma2, Beta2, [IntImm(2048)], eps)
Input3 = Tensor(shape=[IntVar(values=batch_sizes, name='batch'), IntImm(value=1320)], dtype=dtype, name='input3', is_input=True)
Gamma3 = Tensor(shape=[IntImm(1320)], name='gamma3', is_input=True)
Beta3 = Tensor(shape=[IntImm(1320)], name='beta3', is_input=True)
layernorm_3 = ops.layernorm(normalized_shape=None)(Input3, Gamma3, Beta3, [IntImm(1320)], eps)
W4 = Tensor(shape=[IntImm(128), IntImm(1320)], name='w4', is_input=True)
gemm_rcr_4 = ops.gemm_rcr()(layernorm_3, W4)
Gamma4 = Tensor(shape=[IntImm(128)], name='gamma4', is_input=True)
Beta4 = Tensor(shape=[IntImm(128)], name='beta4', is_input=True)
layernorm_4 = ops.layernorm(normalized_shape=None)(gemm_rcr_4, Gamma4, Beta4, [IntImm(128)], eps)
elementwise_3 = ops.elementwise(func_enum=FuncEnum.SIGMOID)(layernorm_4)
elementwise_4 = ops.elementwise(func_enum=FuncEnum.MUL)(gemm_rcr_4, elementwise_3)
output_0 = ops.concatenate()([elementwise_4, layernorm_3, layernorm_0, layernorm_2], concat_dim)
output_0._attrs['name'] = 'output_0'
output_0._attrs['is_output'] = True
target = detect_target()
module = compile_model([output_0], target, './tmp', testname)
for batch in batch_sizes:
input0_pt = get_random_torch_tensor([batch, 1024], dtype)
reshape_0_pt = torch.reshape(input0_pt, reshape_to_shape_0)
w0_pt = get_random_torch_tensor([16, 32], dtype)
gemm_rcr_0_pt = torch.nn.functional.linear(reshape_0_pt, w0_pt)
reshape_1_pt = torch.reshape(gemm_rcr_0_pt, reshape_to_shape_1)
input1_pt = get_random_torch_tensor([batch, 512], dtype)
elementwise_0_pt = (reshape_1_pt * input1_pt)
w1_pt = get_random_torch_tensor([3821, 512], dtype)
gemm_rcr_1_pt = torch.nn.functional.linear(elementwise_0_pt, w1_pt)
concatenate_0_pt = torch.cat([input1_pt, gemm_rcr_1_pt], concat_dim)
gamma0_pt = get_random_torch_tensor([4333], dtype)
beta0_pt = get_random_torch_tensor([4333], dtype)
layernorm_0_pt = torch.nn.functional.layer_norm(concatenate_0_pt, concatenate_0_pt.size()[1:], gamma0_pt, beta0_pt, eps=eps)
input2_pt = get_random_torch_tensor([batch, 256], dtype)
w2_pt = get_random_torch_tensor([256, 256], dtype)
gemm_rcr_2_pt = torch.nn.functional.linear(input2_pt, w2_pt)
gamma1_pt = get_random_torch_tensor([256], dtype)
beta1_pt = get_random_torch_tensor([256], dtype)
layernorm_1_pt = torch.nn.functional.layer_norm(gemm_rcr_2_pt, gemm_rcr_2_pt.size()[1:], gamma1_pt, beta1_pt, eps=eps)
elementwise_1_pt = torch.sigmoid(layernorm_1_pt)
elementwise_2_pt = torch.mul(gemm_rcr_2_pt, elementwise_1_pt)
w3_pt = get_random_torch_tensor([2048, 256], dtype)
gemm_rcr_3_pt = torch.nn.functional.linear(elementwise_2_pt, w3_pt)
gamma2_pt = get_random_torch_tensor([2048], dtype)
beta2_pt = get_random_torch_tensor([2048], dtype)
layernorm_2_pt = torch.nn.functional.layer_norm(gemm_rcr_3_pt, gemm_rcr_3_pt.size()[1:], gamma2_pt, beta2_pt, eps=eps)
input3_pt = get_random_torch_tensor([batch, 1320], dtype)
gamma3_pt = get_random_torch_tensor([1320], dtype)
beta3_pt = get_random_torch_tensor([1320], dtype)
layernorm_3_pt = torch.nn.functional.layer_norm(input3_pt, input3_pt.size()[1:], gamma3_pt, beta3_pt, eps=eps)
w4_pt = get_random_torch_tensor([128, 1320], dtype)
gemm_rcr_4_pt = torch.nn.functional.linear(layernorm_3_pt, w4_pt)
gamma4_pt = get_random_torch_tensor([128], dtype)
beta4_pt = get_random_torch_tensor([128], dtype)
layernorm_4_pt = torch.nn.functional.layer_norm(gemm_rcr_4_pt, gemm_rcr_4_pt.size()[1:], gamma4_pt, beta4_pt, eps=eps)
elementwise_3_pt = torch.sigmoid(layernorm_4_pt)
elementwise_4_pt = torch.mul(gemm_rcr_4_pt, elementwise_3_pt)
output_0_pt = torch.cat([elementwise_4_pt, layernorm_3_pt, layernorm_0_pt, layernorm_2_pt], concat_dim)
inputs = {'input0': input0_pt, 'input1': input1_pt, 'input2': input2_pt, 'input3': input3_pt, 'w0': w0_pt, 'w1': w1_pt, 'w2': w2_pt, 'w3': w3_pt, 'w4': w4_pt, 'gamma0': gamma0_pt, 'beta0': beta0_pt, 'gamma1': gamma1_pt, 'beta1': beta1_pt, 'gamma2': gamma2_pt, 'beta2': beta2_pt, 'gamma3': gamma3_pt, 'beta3': beta3_pt, 'gamma4': gamma4_pt, 'beta4': beta4_pt}
y = torch.empty_like(output_0_pt)
module.run_with_tensors(inputs, [y])
self.assertTrue(torch.allclose(output_0_pt, y, atol=0.03, rtol=0.03))
def _test_group_gemm_fusion(self, m, nk_groups, has_bias=True, has_relu=False, has_sigmoid=False, should_fail=False, dtype='float16'):
_LOGGER.info(f'Running _test_group_gemm_fusion, m = {m}, nk_groups = {nk_groups}, has_bias = {has_bias}, has_relu = {has_relu}, has_sigmoid = {has_sigmoid}, should_fail = {should_fail}')
if (len(nk_groups) == 1):
should_fail = True
op_type = None
if has_bias:
if has_relu:
op = ops.gemm_rcr_bias_relu
op_type = 'group_gemm_rcr_bias_relu'
elif has_sigmoid:
op = ops.gemm_rcr_bias_sigmoid
op_type = 'group_gemm_rcr_bias_sigmoid'
else:
op = ops.gemm_rcr_bias
op_type = 'group_gemm_rcr_bias'
else:
op = ops.gemm_rcr
op_type = 'group_gemm_rcr'
group_input_tensors = _prepare_input_tensors(m, nk_groups, dtype, has_bias=has_bias)
graph = []
for (i, group) in enumerate(group_input_tensors):
Y = op()(*group)
graph.append(Y)
Y._attrs['name'] = 'y_{}'.format(i)
Y._attrs['is_output'] = True
target = detect_target()
with target:
graph = compiler.transform.toposort(graph)
compiler.transform.name_graph(graph)
compiler.transform.mark_param_tensor(graph)
graph = compiler.transform.fuse_ops(graph)
graph = compiler.transform.fuse_group_gemm_ops(graph)
sorted_ops = graph_utils.get_sorted_ops(graph)
if (not should_fail):
assert has_op(sorted_ops, op_type)
else:
assert (not has_op(sorted_ops, op_type))
def test_group_gemm_fusion_float16(self):
self._test_group_gemm_fusion(1024, [[16, 64], [32, 32]])
self._test_group_gemm_fusion(1024, [[16, 64], [32, 40]], has_bias=False)
self._test_group_gemm_fusion(1024, [[16, 64], [32, 40], [75, 128]], has_relu=True)
self._test_group_gemm_fusion(1024, [[16, 64], [32, 40], [75, 128]], has_sigmoid=True)
self._test_group_gemm_fusion(1024, [[16, 44], [32, 32]], should_fail=True)
self._test_group_gemm_fusion(1024, [[16, 13], [32, 1]], should_fail=True)
def test_group_gemm_fusion_float32_sm80(self):
self._test_group_gemm_fusion(32, [[16, 64], [32, 32]], dtype='float32')
self._test_group_gemm_fusion(32, [[16, 64], [32, 40]], has_bias=False, dtype='float32')
self._test_group_gemm_fusion(32, [[16, 64], [32, 40], [75, 128]], has_relu=True, dtype='float32')
self._test_group_gemm_fusion(32, [[16, 13], [32, 1]], should_fail=True, dtype='float32')
def _test_split_group_gemm_fusion(self, m, nk_groups_1, nk_groups_2, split_dim=1, should_fail=False, num_group_ops=2, dtype='float16'):
_LOGGER.info(f'Running _test_split_group_gemm_fusion, m = {m}, nk_groups_1 = {nk_groups_1}, nk_groups_2 = {nk_groups_2}, split_dim = {split_dim}, should_fail: {should_fail}, num_group_ops = {num_group_ops}')
op_type = 'group_gemm_rcr_bias'
inputs1 = _prepare_input_tensors(m, nk_groups_1, dtype, has_bias=True, only_params=True)
inputs2 = _prepare_input_tensors(m, nk_groups_2, dtype, start=len(nk_groups_1), has_bias=True, only_params=False)
if (split_dim == 1):
split_sizes = [k for (n, k) in nk_groups_1]
K = sum(split_sizes)
X = Tensor(shape=[m, K], dtype=dtype, name='input', is_input=True)
else:
split_sizes = m
X = Tensor(shape=[(m * len(nk_groups_1)), nk_groups_1[0][1]], dtype=dtype, name='input', is_input=True)
Y1s = ops.split()(X, split_sizes, split_dim)
graph = []
for (i, inputs) in enumerate(inputs1):
inputs = ([Y1s[i]] + inputs)
Y = ops.gemm_rcr_bias()(*inputs)
graph.append(Y)
Y._attrs['name'] = 'y_{}'.format(i)
Y._attrs['is_output'] = True
for (i, inputs) in enumerate(inputs2):
Y = ops.gemm_rcr_bias()(*inputs)
graph.append(Y)
Y._attrs['name'] = 'y_{}'.format((len(nk_groups_1) + i))
Y._attrs['is_output'] = True
target = detect_target()
with target:
graph = compiler.transform.toposort(graph)
compiler.transform.name_graph(graph)
compiler.transform.mark_param_tensor(graph)
graph = compiler.transform.fuse_ops(graph)
graph = compiler.transform.fuse_group_gemm_ops(graph)
graph = compiler.transform.transform_strided_ops(graph)
sorted_ops = graph_utils.get_sorted_ops(graph)
if should_fail:
assert has_op(sorted_ops, 'split')
assert (count_ops(sorted_ops, op_type) == num_group_ops)
else:
assert (not has_op(sorted_ops, 'split'))
assert (count_ops(sorted_ops, op_type) == num_group_ops)
def test_split_group_gemm_fusion_float16(self):
self._test_split_group_gemm_fusion(1024, [[16, 64], [16, 40], [16, 128]], [[1, 16], [3, 48]], num_group_ops=2)
self._test_split_group_gemm_fusion(48, [[16, 64], [16, 64], [16, 64]], [[1, 16], [3, 48]], split_dim=0, should_fail=True, num_group_ops=1)
self._test_split_group_gemm_fusion(48, [[16, 63], [16, 64], [16, 64]], [[1, 16], [3, 48]], should_fail=True, num_group_ops=1)
def test_split_group_gemm_fusion_float32_sm80(self):
self._test_split_group_gemm_fusion(32, [[16, 64], [16, 40], [16, 128]], [[1, 16], [3, 48]], num_group_ops=2, dtype='float32')
self._test_split_group_gemm_fusion(48, [[16, 64], [16, 64], [16, 64]], [[1, 16], [3, 48]], split_dim=0, should_fail=True, num_group_ops=1, dtype='float32') |
class TraitsUIWidgetAction(Action):
model = Instance(HasTraits)
style = Constant('widget')
def create_control(self, parent):
ui = self.edit_traits(kind='subpanel', parent=parent)
control = ui.control
control._ui = ui
return control
def trait_context(self):
if (self.model is not None):
context = {'object': self.model, 'action': self}
return context
return super().trait_context() |
def extractFondlefishWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class ExtractionContainer():
def __init__(self, id_: int, tmp_dir: TemporaryDirectory, value: multiprocessing.managers.ValueProxy):
self.id_ = id_
self.tmp_dir = tmp_dir
self.port = (config.backend.unpacking.base_port + id_)
self.container_id = None
self.exception = value
self._adapter = HTTPAdapter(max_retries=Retry(total=3, backoff_factor=0.1))
def start(self):
if (self.container_id is not None):
raise RuntimeError('Already running.')
try:
self._start_container()
except APIError as exception:
if ('port is already allocated' in str(exception)):
self._recover_from_port_in_use(exception)
def _start_container(self):
volume = Mount('/tmp/extractor', self.tmp_dir.name, read_only=False, type='bind')
container = DOCKER_CLIENT.containers.run(image=EXTRACTOR_DOCKER_IMAGE, ports={'5000/tcp': self.port}, mem_limit=f'{config.backend.unpacking.memory_limit}m', mounts=[volume], volumes={'/dev': {'bind': '/dev', 'mode': 'rw'}}, privileged=True, detach=True, remove=True, environment={'CHMOD_OWNER': f'{getuid()}:{getgid()}'}, entrypoint='gunicorn --timeout 600 -w 1 -b 0.0.0.0:5000 server:app')
self.container_id = container.id
logging.info(f'Started unpack worker {self.id_}')
def stop(self):
if (self.container_id is None):
raise RuntimeError('Container is not running.')
logging.info(f'Stopping unpack worker {self.id_}')
self._remove_container()
def set_exception(self):
return self.exception.set(1)
def exception_occurred(self) -> bool:
return (self.exception.get() == 1)
def _remove_container(self, container: (Container | None)=None):
if (not container):
container = self._get_container()
container.stop(timeout=5)
with suppress(DockerException):
container.kill()
with suppress(DockerException):
container.remove()
def _get_container(self) -> Container:
return DOCKER_CLIENT.containers.get(self.container_id)
def restart(self):
self.stop()
self.exception.set(0)
self.container_id = None
self.start()
def _recover_from_port_in_use(self, exception: Exception):
logging.warning('Extractor port already in use -> trying to remove old container...')
for running_container in DOCKER_CLIENT.containers.list():
if (self._is_extractor_container(running_container) and self._has_same_port(running_container)):
self._remove_container(running_container)
self._start_container()
return
logging.error('Could not free extractor port')
raise RuntimeError('Could not create extractor container') from exception
def _is_extractor_container(container: Container) -> bool:
return any(((tag == EXTRACTOR_DOCKER_IMAGE) for tag in container.image.attrs['RepoTags']))
def _has_same_port(self, container: Container) -> bool:
return any(((entry['HostPort'] == str(self.port)) for entry in container.ports.get('5000/tcp', [])))
def get_logs(self) -> str:
container = self._get_container()
return container.logs().decode(errors='replace')
def start_unpacking(self, tmp_dir: str, timeout: (int | None)=None) -> Response:
response = self._check_connection()
if (response.status_code != HTTPStatus.OK):
return response
url = f'
return requests.get(url, timeout=timeout)
def _check_connection(self) -> Response:
url = f'
with requests.Session() as session:
session.mount(' self._adapter)
return session.get(url, timeout=5) |
class TestCu2QuPointPen(unittest.TestCase, _TestPenMixin):
def __init__(self, *args, **kwargs):
super(TestCu2QuPointPen, self).__init__(*args, **kwargs)
self.Glyph = DummyPointGlyph
self.Pen = DummyPointPen
self.Cu2QuPen = Cu2QuPointPen
self.pen_getter_name = 'getPointPen'
self.draw_method_name = 'drawPoints'
def test_super_bezier_curve(self):
pen = DummyPointPen()
quadpen = Cu2QuPointPen(pen, MAX_ERR)
quadpen.beginPath()
quadpen.addPoint((0, 0), segmentType='move')
quadpen.addPoint((1, 1))
quadpen.addPoint((2, 2))
quadpen.addPoint((3, 3))
quadpen.addPoint((4, 4), segmentType='curve', smooth=False, name='up', selected=1)
quadpen.endPath()
self.assertEqual(str(pen).splitlines(), "pen.beginPath()\npen.addPoint((0, 0), name=None, segmentType='move', smooth=False)\npen.addPoint((0.75, 0.75), name=None, segmentType=None, smooth=False)\npen.addPoint((1.625, 1.625), name=None, segmentType=None, smooth=False)\npen.addPoint((2, 2), name=None, segmentType='qcurve', smooth=True)\npen.addPoint((2.375, 2.375), name=None, segmentType=None, smooth=False)\npen.addPoint((3.25, 3.25), name=None, segmentType=None, smooth=False)\npen.addPoint((4, 4), name='up', segmentType='qcurve', selected=1, smooth=False)\npen.endPath()".splitlines())
def test__flushContour_restore_starting_point(self):
pen = DummyPointPen()
quadpen = Cu2QuPointPen(pen, MAX_ERR)
new_segments = []
def _drawPoints(segments):
new_segments.extend(segments)
Cu2QuPointPen._drawPoints(quadpen, segments)
quadpen._drawPoints = _drawPoints
quadpen._flushContour([('curve', [((2, 2), False, None, {}), ((1, 1), False, None, {}), ((0, 0), False, None, {})]), ('curve', [((1, 1), False, None, {}), ((2, 2), False, None, {}), ((3, 3), False, None, {})])])
self.assertEqual(new_segments[0][1][(- 1)][0], (3, 3))
self.assertEqual(new_segments[(- 1)][1][(- 1)][0], (0, 0))
new_segments = []
quadpen._flushContour([('move', [((0, 0), False, None, {})]), ('curve', [((1, 1), False, None, {}), ((2, 2), False, None, {}), ((3, 3), False, None, {})])])
self.assertEqual(new_segments[0][1][(- 1)][0], (0, 0))
self.assertEqual(new_segments[(- 1)][1][(- 1)][0], (3, 3))
def test_quad_no_oncurve(self):
self.maxDiff = None
pen = DummyPointPen()
quadpen = Cu2QuPointPen(pen, MAX_ERR)
quadpen.beginPath()
quadpen.addPoint((1, 1))
quadpen.addPoint((2, 2))
quadpen.addPoint((3, 3))
quadpen.endPath()
self.assertEqual(str(pen), dedent(' pen.beginPath()\n pen.addPoint((1, 1), name=None, segmentType=None, smooth=False)\n pen.addPoint((2, 2), name=None, segmentType=None, smooth=False)\n pen.addPoint((3, 3), name=None, segmentType=None, smooth=False)\n pen.endPath()')) |
def fortios_antivirus(data, fos, check_mode):
fos.do_member_operation('antivirus', 'notification')
if data['antivirus_notification']:
resp = antivirus_notification(data, fos, check_mode)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'antivirus_notification'))
if check_mode:
return resp
return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {}) |
class Sym():
_unq_count = 1
def __init__(self, nm):
if (not is_valid_name(nm)):
raise TypeError(f"expected an alphanumeric name string, but got '{nm}'")
self._nm = nm
self._id = Sym._unq_count
Sym._unq_count += 1
def __str__(self):
return self._nm
def __repr__(self):
return f'{self._nm}_{self._id}'
def __hash__(self):
return id(self)
def __lt__(self, rhs):
assert isinstance(rhs, Sym)
return ((self._nm, self._id) < (rhs._nm, rhs._id))
def __eq__(self, rhs):
if (not isinstance(rhs, Sym)):
return False
return ((self._nm == rhs._nm) and (self._id == rhs._id))
def __ne__(self, rhs):
return (not (self == rhs))
def name(self):
return self._nm
def copy(self):
return Sym(self._nm) |
class Lookahead(Compound):
__slots__ = ['negativity']
def __init__(self, member, *, negative=False, **kwargs):
super().__init__(member, **kwargs)
self.negativity = bool(negative)
def _uncached_match(self, text, pos, cache, error):
node = self.members[0].match_core(text, pos, cache, error)
if ((node is None) == self.negativity):
return Node(self, text, pos, pos)
def _as_rhs(self):
return ('%s%s' % (('!' if self.negativity else '&'), self._unicode_members()[0]))
def _eq_check_cycles(self, other, checked):
return (super()._eq_check_cycles(other, checked) and (self.negativity == other.negativity)) |
def test_cluster_fields():
yaml = '\n---\napiVersion: getambassador.io/v3alpha1\nkind: AuthService\nmetadata:\n name: mycoolauthservice\n namespace: default\nspec:\n auth_service: someservice\n protocol_version: "v3"\n proto: grpc\n stats_name: authservice\n'
econf = _get_envoy_config(yaml)
conf = econf.as_dict()
ext_auth_config = _get_ext_auth_config(conf)
cluster_name = 'cluster_extauth_someservice_default'
assert ext_auth_config
assert (ext_auth_config['typed_config']['grpc_service']['envoy_grpc']['cluster_name'] == cluster_name)
def check_fields(cluster):
assert (cluster['alt_stat_name'] == 'authservice')
econf_foreach_cluster(econf.as_dict(), check_fields, name=cluster_name) |
def downside_risk(data: pd.DataFrame, weights: ARRAY_OR_SERIES[FLOAT], risk_free_rate: FLOAT=0.005) -> FLOAT:
type_validation(data=data, weights=weights, risk_free_rate=risk_free_rate)
wtd_daily_mean = weighted_mean_daily_returns(data, weights)
return float(np.sqrt(np.mean((np.minimum(0, (wtd_daily_mean - risk_free_rate)) ** 2)))) |
_os(*metadata.platforms)
def main():
mount_dir = '/tmp/.exploit'
disk_file = 'disk.dmg'
common.execute(['hdiutil', 'create', '-size', '50b', '-volname', '.exploit', '-ov', disk_file], kill=True)
common.log('Launching hdutil commands to mount dummy dmg')
common.execute(['hdiutil', 'attach', '-mountpoint', mount_dir, disk_file], kill=True)
common.execute(['hdiutil', 'eject', '/tmp/.exploit'], timeout=10, kill=True)
common.remove_file(disk_file) |
.host_test
class TestMergeBin():
def run_merge_bin(self, chip, offsets_names, options=[]):
output_file = tempfile.NamedTemporaryFile(delete=False)
try:
output_file.close()
cmd = ([sys.executable, '-m', 'esptool', '--chip', chip, 'merge_bin', '-o', output_file.name] + options)
for (offset, name) in offsets_names:
cmd += [hex(offset), name]
print('\nExecuting {}'.format(' '.join(cmd)))
output = subprocess.check_output(cmd, cwd=IMAGES_DIR, stderr=subprocess.STDOUT)
output = output.decode('utf-8')
print(output)
assert ('warning' not in output.lower()), 'merge_bin should not output warnings'
with open(output_file.name, 'rb') as f:
return f.read()
except subprocess.CalledProcessError as e:
print(e.output)
raise
finally:
os.unlink(output_file.name)
def assertAllFF(self, some_bytes):
assert ((b'\xff' * len(some_bytes)) == some_bytes)
def test_simple_merge(self):
merged = self.run_merge_bin('esp8266', [(0, 'one_kb.bin'), (4096, 'one_kb.bin'), (65536, 'one_kb.bin')])
one_kb = read_image('one_kb.bin')
assert (len(one_kb) == 1024)
assert (len(merged) == 66560)
assert (merged[:1024] == one_kb)
assert (merged[4096:5120] == one_kb)
assert (merged[65536:] == one_kb)
self.assertAllFF(merged[1024:4096])
self.assertAllFF(merged[5120:65536])
def test_args_out_of_order(self):
args = [(0, 'one_kb.bin'), (4096, 'one_kb.bin'), (65536, 'one_kb.bin')]
merged_orders = [self.run_merge_bin('esp8266', perm_args) for perm_args in itertools.permutations(args)]
for m in merged_orders:
assert (m == merged_orders[0])
def test_error_overlap(self, capsys):
args = [(4096, 'one_mb.bin'), (131072, 'one_kb.bin')]
for perm_args in itertools.permutations(args):
with pytest.raises(subprocess.CalledProcessError):
self.run_merge_bin('esp32', perm_args)
output = capsys.readouterr().out
assert ('overlap' in output)
def test_leading_padding(self):
merged = self.run_merge_bin('esp32c3', [(1048576, 'one_mb.bin')])
self.assertAllFF(merged[:1048576])
assert (read_image('one_mb.bin') == merged[1048576:])
def test_update_bootloader_params(self):
merged = self.run_merge_bin('esp32', [(4096, 'bootloader_esp32.bin'), (65536, 'ram_helloworld/helloworld-esp32.bin')], ['--flash_size', '2MB', '--flash_mode', 'dout'])
self.assertAllFF(merged[:4096])
bootloader = read_image('bootloader_esp32.bin')
helloworld = read_image('ram_helloworld/helloworld-esp32.bin')
assert (merged[4112:(4096 + len(bootloader))] == bootloader[16:])
merged_hdr = merged[4096:4112]
bootloader_hdr = bootloader[:16]
assert (bootloader_hdr[:2] == merged_hdr[:2])
assert (byte(merged_hdr, 2) == 3)
assert ((byte(merged_hdr, 3) & 240) == 16)
assert ((byte(bootloader_hdr, 3) & 15) == (byte(merged_hdr, 3) & 15))
assert (bootloader_hdr[4:] == merged_hdr[4:])
self.assertAllFF(merged[(4096 + len(bootloader)):65536])
assert merged[65536:(65536 + len(helloworld))], helloworld
def test_target_offset(self):
merged = self.run_merge_bin('esp32', [(4096, 'bootloader_esp32.bin'), (65536, 'ram_helloworld/helloworld-esp32.bin')], ['--target-offset', '0x1000'])
bootloader = read_image('bootloader_esp32.bin')
helloworld = read_image('ram_helloworld/helloworld-esp32.bin')
assert (bootloader == merged[:len(bootloader)])
assert (helloworld == merged[61440:(61440 + len(helloworld))])
self.assertAllFF(merged[(4096 + len(bootloader)):61440])
def test_fill_flash_size(self):
merged = self.run_merge_bin('esp32c3', [(0, 'bootloader_esp32c3.bin')], ['--fill-flash-size', '4MB'])
bootloader = read_image('bootloader_esp32c3.bin')
assert (len(merged) == 4194304)
assert (bootloader == merged[:len(bootloader)])
self.assertAllFF(merged[len(bootloader):])
def test_fill_flash_size_w_target_offset(self):
merged = self.run_merge_bin('esp32', [(4096, 'bootloader_esp32.bin'), (65536, 'ram_helloworld/helloworld-esp32.bin')], ['--target-offset', '0x1000', '--fill-flash-size', '2MB'])
assert (len(merged) == (2097152 - 4096))
bootloader = read_image('bootloader_esp32.bin')
helloworld = read_image('ram_helloworld/helloworld-esp32.bin')
assert (bootloader == merged[:len(bootloader)])
assert (helloworld == merged[61440:(61440 + len(helloworld))])
self.assertAllFF(merged[(61440 + len(helloworld)):])
def test_merge_mixed(self):
hex = self.run_merge_bin('esp32', [(4096, 'bootloader_esp32.bin')], options=['--format', 'hex'])
with tempfile.NamedTemporaryFile(suffix='.hex', delete=False) as f:
f.write(hex)
try:
merged = self.run_merge_bin('esp32', [(4096, f.name), (65536, 'ram_helloworld/helloworld-esp32.bin')], ['--target-offset', '0x1000', '--fill-flash-size', '2MB'])
finally:
os.unlink(f.name)
assert (len(merged) == (2097152 - 4096))
bootloader = read_image('bootloader_esp32.bin')
helloworld = read_image('ram_helloworld/helloworld-esp32.bin')
assert (bootloader == merged[:len(bootloader)])
assert (helloworld == merged[61440:(61440 + len(helloworld))])
self.assertAllFF(merged[(61440 + len(helloworld)):])
def test_merge_bin2hex(self):
merged = self.run_merge_bin('esp32', [(4096, 'bootloader_esp32.bin')], options=['--format', 'hex'])
lines = merged.splitlines()
assert (lines[0][3:7] == b'1000')
line = lines[random.randrange(0, len(lines))]
assert (line[0] == ord(':'))
data_len = int((b'0x' + line[1:3]), 16)
assert (len(line) == (((((1 + 2) + 4) + 2) + (data_len * 2)) + 2))
assert (lines[(- 1)] == b':FF')
with tempfile.NamedTemporaryFile(suffix='.hex', delete=False) as hex:
hex.write(merged)
merged_bin = self.run_merge_bin('esp32', [(4096, hex.name)], options=['--format', 'raw'])
source = read_image('bootloader_esp32.bin')
assert ((b'\xff' * 4096) == merged_bin[:4096])
assert (source == merged_bin[4096:]) |
def links_to_dicts(links_in, starturl, distance, priority):
ret = []
for link in links_in:
if (link in SEEN_CACHE):
continue
SEEN_CACHE[link] = True
netloc = urllib.parse.urlsplit(link).netloc
assert link.startswith(' ("Link %s doesn't seem to be HTTP content?" % link)
assert netloc
data = {'url': link, 'starturl': starturl, 'netloc': netloc, 'distance': distance, 'priority': priority, 'state': 'new', 'addtime': datetime.datetime.now(), 'epoch': raw_misc.get_epoch_for_url(link, netloc)}
ret.append(data)
return ret |
def test_correct_matrix():
outfile = NamedTemporaryFile(suffix='.h5', delete=False)
outfile.close()
args = '--matrix {} --numBins 5 --outFileName {}'.format((ROOT + 'small_test_matrix.h5'), outfile.name).split()
compute(hicMergeMatrixBins.main, args, 5)
test = hm.hiCMatrix((ROOT + 'hicMergeMatrixBins/result.h5'))
new = hm.hiCMatrix(outfile.name)
nt.assert_equal(test.matrix.data, new.matrix.data)
nt.assert_equal(test.cut_intervals, new.cut_intervals)
os.unlink(outfile.name) |
class OptionSeriesCylinderSonificationContexttracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
.orca_irc
def tmp():
xyz_fn = '07_hfabstraction_hf422gsp_ts.xyz'
keywords = 'HF 4-22GSP TightSCF'
(geometry, this_dir) = prepare_geometry(keywords, xyz_fn)
irc = Euler(geometry, max_steps=1, step_length=0.025, forward=True, mass_weight=False)
irc.run()
irc.write_trj(this_dir, 'hf_422gsp_mw') |
class FixtureRegistry(object):
__SPLIT_SUFFIX = '__split'
def __init__(self):
path = os.path.dirname(os.path.abspath(__file__))
items = {}
for fixture in glob(os.path.join(path, 'fixtures', '*')):
(name, _) = os.path.splitext(os.path.basename(fixture))
if (name in items):
raise RuntimeError('Duplicate fixture name for {} in {}'.format(fixture, path))
with open(fixture, 'r', encoding=FINAM_CHARSET) as f:
data = f.read()
items[name] = data
items[(name + self.__SPLIT_SUFFIX)] = data.split('\n')
self._fixtures = items
def __getattr__(self, key):
return self._fixtures[key] |
class FaqListPost(ResourceList):
def before_post(self, args, kwargs, data):
require_relationship(['event'], data)
if (not has_access('is_coorganizer', event_id=data['event'])):
raise ObjectNotFound({'parameter': 'event_id'}, 'Event: {} not found'.format(data['event']))
schema = FaqSchema
methods = ['POST']
data_layer = {'session': db.session, 'model': Faq} |
def _get_ragged(model: Model[(SeqT, SeqT)], seq: SeqT) -> Ragged:
if isinstance(seq, Ragged):
return seq
elif isinstance(seq, Padded):
lists = model.ops.padded2list(seq)
lengths = model.ops.asarray1i([len(x) for x in lists])
k = model.ops.flatten(lists)
return Ragged(model.ops.flatten(lists), lengths)
elif _is_ragged_data(seq):
return Ragged(*seq)
else:
list2d_seq = cast(List2d, seq)
lengths = model.ops.asarray1i([len(x) for x in list2d_seq])
return Ragged(model.ops.flatten(list2d_seq), lengths) |
def run_periodic_helmholtz(direction, quadrilateral, diagonal):
if quadrilateral:
if (diagonal == 'left'):
diagonal = None
else:
return
mesh = PeriodicRectangleMesh(100, 60, 5, 3, quadrilateral=quadrilateral, diagonal=diagonal, direction=direction)
x = SpatialCoordinate(mesh)
V = FunctionSpace(mesh, 'CG', 1)
u_exact = Function(V)
u_exact.interpolate((sin((((4.0 * pi) * x[0]) / 5.0)) * sin((((2.0 * pi) * x[1]) / 3.0))))
f = Function(V).assign((((((244.0 * pi) * pi) / 225.0) + 1.0) * u_exact))
if (direction in ('x', 'y')):
bcs = DirichletBC(V, Constant(0), (1, 2))
elif (direction == 'both'):
bcs = []
u = TrialFunction(V)
v = TestFunction(V)
a = ((inner(grad(u), grad(v)) * dx) + (inner(u, v) * dx))
L = (inner(f, v) * dx)
out = Function(V)
solve((a == L), out, solver_parameters={'ksp_type': 'cg'}, bcs=bcs)
l2err = sqrt(assemble((inner((out - u_exact), (out - u_exact)) * dx)))
l2norm = sqrt(assemble((inner(u_exact, u_exact) * dx)))
assert ((l2err / l2norm) < 0.004) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.