code stringlengths 281 23.7M |
|---|
class OptionSeriesSolidgaugeSonificationContexttracksMappingHighpass(Options):
def frequency(self) -> 'OptionSeriesSolidgaugeSonificationContexttracksMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionSeriesSolidgaugeSonificationContexttracksMappingHighpassFrequency)
def resonance(self) -> 'OptionSeriesSolidgaugeSonificationContexttracksMappingHighpassResonance':
return self._config_sub_data('resonance', OptionSeriesSolidgaugeSonificationContexttracksMappingHighpassResonance) |
class DBTTask(Task):
model_ids: List[str]
def execute(self, args: argparse.Namespace, fal_dbt: FalDbt) -> int:
from fal.dbt.cli.dbt_runner import dbt_run_through_python
model_names = _unique_ids_to_model_names(self.model_ids)
output = dbt_run_through_python(args, model_names, fal_dbt.target_path, self.run_index)
for (node, status, adapter_response) in _map_cli_output_model_results(output.run_results):
_mark_dbt_nodes_status_and_response(fal_dbt, status, node, adapter_response)
return output.return_code |
def _start():
global patch, name, path, monitor
global delay, window, value, winx, winy, winwidth, winheight, data, lock, trigger, number, i, this, thread, app, win, timer, plot
delay = patch.getfloat('general', 'delay')
window = patch.getfloat('general', 'window')
value = patch.getint('general', 'value', default=0)
winx = patch.getint('display', 'xpos')
winy = patch.getint('display', 'ypos')
winwidth = patch.getint('display', 'width')
winheight = patch.getint('display', 'height')
data = {}
lock = threading.Lock()
trigger = []
number = []
for i in range(1, 17):
name = 'channel{}'.format(i)
if patch.config.has_option('gate', name):
number.append(i)
data[i] = []
this = TriggerThread(patch.get('gate', name), i)
trigger.append(this)
monitor.info((name + ' trigger configured'))
if (len(trigger) == 0):
monitor.warning('no gates were specified in the ini file')
for thread in trigger:
thread.start()
app = QtGui.QApplication(sys.argv)
app.setWindowIcon(QtGui.QIcon(os.path.join(path, '../../doc/figures/logo-128.ico')))
app.aboutToQuit.connect(_stop)
signal.signal(signal.SIGINT, _stop)
sys.excepthook = _exception_hook
win = pg.GraphicsWindow(title=patch.getstring('display', 'title', default='EEGsynth plottrigger'))
win.setWindowTitle(patch.getstring('display', 'title', default='EEGsynth plottrigger'))
win.setGeometry(winx, winy, winwidth, winheight)
pg.setConfigOptions(antialias=True)
plot = win.addPlot()
plot.setLabel('left', text='Channel')
plot.setLabel('bottom', text='Time (s)')
plot.setXRange((- window), 0)
plot.setYRange(0.5, (len(trigger) + 0.5))
timer = QtCore.QTimer()
timer.timeout.connect(_loop_once)
timer.setInterval(10)
timer.start(int(round((delay * 1000)))) |
def test_kconfig_construct_result():
fo = FileObject()
fo.processed_analysis.update(_mock_kernel_config_analysis_mips)
result = kconfig.construct_result(fo)
for key in result:
assert ('mips_v2' in key)
assert ('64-bit' not in key)
fo = FileObject()
fo.processed_analysis.update(_mock_kernel_config_analysis_arm)
result = kconfig.construct_result(fo)
for key in result:
assert ('armv7' in key)
assert ('64-bit' not in key) |
class OefSearchMessage(Message):
protocol_id = PublicId.from_str('fetchai/oef_search:1.1.7')
protocol_specification_id = PublicId.from_str('fetchai/oef_search:1.0.0')
AgentsInfo = CustomAgentsInfo
Description = CustomDescription
OefErrorOperation = CustomOefErrorOperation
Query = CustomQuery
class Performative(Message.Performative):
OEF_ERROR = 'oef_error'
REGISTER_SERVICE = 'register_service'
SEARCH_RESULT = 'search_result'
SEARCH_SERVICES = 'search_services'
SUCCESS = 'success'
UNREGISTER_SERVICE = 'unregister_service'
def __str__(self) -> str:
return str(self.value)
_performatives = {'oef_error', 'register_service', 'search_result', 'search_services', 'success', 'unregister_service'}
__slots__: Tuple[(str, ...)] = tuple()
class _SlotsCls():
__slots__ = ('agents', 'agents_info', 'dialogue_reference', 'message_id', 'oef_error_operation', 'performative', 'query', 'service_description', 'target')
def __init__(self, performative: Performative, dialogue_reference: Tuple[(str, str)]=('', ''), message_id: int=1, target: int=0, **kwargs: Any):
super().__init__(dialogue_reference=dialogue_reference, message_id=message_id, target=target, performative=OefSearchMessage.Performative(performative), **kwargs)
def valid_performatives(self) -> Set[str]:
return self._performatives
def dialogue_reference(self) -> Tuple[(str, str)]:
enforce(self.is_set('dialogue_reference'), 'dialogue_reference is not set.')
return cast(Tuple[(str, str)], self.get('dialogue_reference'))
def message_id(self) -> int:
enforce(self.is_set('message_id'), 'message_id is not set.')
return cast(int, self.get('message_id'))
def performative(self) -> Performative:
enforce(self.is_set('performative'), 'performative is not set.')
return cast(OefSearchMessage.Performative, self.get('performative'))
def target(self) -> int:
enforce(self.is_set('target'), 'target is not set.')
return cast(int, self.get('target'))
def agents(self) -> Tuple[(str, ...)]:
enforce(self.is_set('agents'), "'agents' content is not set.")
return cast(Tuple[(str, ...)], self.get('agents'))
def agents_info(self) -> CustomAgentsInfo:
enforce(self.is_set('agents_info'), "'agents_info' content is not set.")
return cast(CustomAgentsInfo, self.get('agents_info'))
def oef_error_operation(self) -> CustomOefErrorOperation:
enforce(self.is_set('oef_error_operation'), "'oef_error_operation' content is not set.")
return cast(CustomOefErrorOperation, self.get('oef_error_operation'))
def query(self) -> CustomQuery:
enforce(self.is_set('query'), "'query' content is not set.")
return cast(CustomQuery, self.get('query'))
def service_description(self) -> CustomDescription:
enforce(self.is_set('service_description'), "'service_description' content is not set.")
return cast(CustomDescription, self.get('service_description'))
def _is_consistent(self) -> bool:
try:
enforce(isinstance(self.dialogue_reference, tuple), "Invalid type for 'dialogue_reference'. Expected 'tuple'. Found '{}'.".format(type(self.dialogue_reference)))
enforce(isinstance(self.dialogue_reference[0], str), "Invalid type for 'dialogue_reference[0]'. Expected 'str'. Found '{}'.".format(type(self.dialogue_reference[0])))
enforce(isinstance(self.dialogue_reference[1], str), "Invalid type for 'dialogue_reference[1]'. Expected 'str'. Found '{}'.".format(type(self.dialogue_reference[1])))
enforce((type(self.message_id) is int), "Invalid type for 'message_id'. Expected 'int'. Found '{}'.".format(type(self.message_id)))
enforce((type(self.target) is int), "Invalid type for 'target'. Expected 'int'. Found '{}'.".format(type(self.target)))
enforce(isinstance(self.performative, OefSearchMessage.Performative), "Invalid 'performative'. Expected either of '{}'. Found '{}'.".format(self.valid_performatives, self.performative))
actual_nb_of_contents = (len(self._body) - DEFAULT_BODY_SIZE)
expected_nb_of_contents = 0
if (self.performative == OefSearchMessage.Performative.REGISTER_SERVICE):
expected_nb_of_contents = 1
enforce(isinstance(self.service_description, CustomDescription), "Invalid type for content 'service_description'. Expected 'Description'. Found '{}'.".format(type(self.service_description)))
elif (self.performative == OefSearchMessage.Performative.UNREGISTER_SERVICE):
expected_nb_of_contents = 1
enforce(isinstance(self.service_description, CustomDescription), "Invalid type for content 'service_description'. Expected 'Description'. Found '{}'.".format(type(self.service_description)))
elif (self.performative == OefSearchMessage.Performative.SEARCH_SERVICES):
expected_nb_of_contents = 1
enforce(isinstance(self.query, CustomQuery), "Invalid type for content 'query'. Expected 'Query'. Found '{}'.".format(type(self.query)))
elif (self.performative == OefSearchMessage.Performative.SEARCH_RESULT):
expected_nb_of_contents = 2
enforce(isinstance(self.agents, tuple), "Invalid type for content 'agents'. Expected 'tuple'. Found '{}'.".format(type(self.agents)))
enforce(all((isinstance(element, str) for element in self.agents)), "Invalid type for tuple elements in content 'agents'. Expected 'str'.")
enforce(isinstance(self.agents_info, CustomAgentsInfo), "Invalid type for content 'agents_info'. Expected 'AgentsInfo'. Found '{}'.".format(type(self.agents_info)))
elif (self.performative == OefSearchMessage.Performative.SUCCESS):
expected_nb_of_contents = 1
enforce(isinstance(self.agents_info, CustomAgentsInfo), "Invalid type for content 'agents_info'. Expected 'AgentsInfo'. Found '{}'.".format(type(self.agents_info)))
elif (self.performative == OefSearchMessage.Performative.OEF_ERROR):
expected_nb_of_contents = 1
enforce(isinstance(self.oef_error_operation, CustomOefErrorOperation), "Invalid type for content 'oef_error_operation'. Expected 'OefErrorOperation'. Found '{}'.".format(type(self.oef_error_operation)))
enforce((expected_nb_of_contents == actual_nb_of_contents), 'Incorrect number of contents. Expected {}. Found {}'.format(expected_nb_of_contents, actual_nb_of_contents))
if (self.message_id == 1):
enforce((self.target == 0), "Invalid 'target'. Expected 0 (because 'message_id' is 1). Found {}.".format(self.target))
except (AEAEnforceError, ValueError, KeyError) as e:
_default_logger.error(str(e))
return False
return True |
def update_c_files():
c_files = []
cnt = 0
for (root, dirs, files) in os.walk(os.path.abspath('./logs')):
for file in files:
if file.startswith('config.json'):
c_files.append(os.path.join(root, file))
cnt += 1
print(c_files)
return (f', {cnt}', gr.Dropdown.update(choices=c_files)) |
def add_MsgServicer_to_server(servicer, server):
rpc_method_handlers = {'SubmitEvidence': grpc.unary_unary_rpc_method_handler(servicer.SubmitEvidence, request_deserializer=cosmos_dot_evidence_dot_v1beta1_dot_tx__pb2.MsgSubmitEvidence.FromString, response_serializer=cosmos_dot_evidence_dot_v1beta1_dot_tx__pb2.MsgSubmitEvidenceResponse.SerializeToString)}
generic_handler = grpc.method_handlers_generic_handler('cosmos.evidence.v1beta1.Msg', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,)) |
def edit_project(session, project, name, homepage, backend, version_scheme, version_pattern, version_url, version_prefix, pre_release_filter, version_filter, regex, insecure, releases_only, user_id, check_release=False, archived=False, dry_run=False):
changes = {}
if (name != project.name):
old = project.name
project.name = (name.strip() if name else None)
changes['name'] = {'old': old, 'new': project.name}
if (homepage != project.homepage):
old = project.homepage
project.homepage = (homepage.strip() if homepage else None)
changes['homepage'] = {'old': old, 'new': project.homepage}
if (backend != project.backend):
old = project.backend
project.backend = backend
changes['backend'] = {'old': old, 'new': project.backend}
if (version_scheme != project.version_scheme):
old = project.version_scheme
project.version_scheme = version_scheme
changes['version_scheme'] = {'old': old, 'new': project.version_scheme}
if (version_pattern != project.version_pattern):
old = project.version_pattern
project.version_pattern = (version_pattern.strip() if version_pattern else None)
if (old != project.version_pattern):
changes['version_pattern'] = {'old': old, 'new': project.version_pattern}
if (version_url != project.version_url):
old = project.version_url
project.version_url = (version_url.strip() if version_url else None)
if (old != project.version_url):
changes['version_url'] = {'old': old, 'new': project.version_url}
if (version_prefix != project.version_prefix):
old = project.version_prefix
project.version_prefix = (version_prefix.strip() if version_prefix else None)
if (old != project.version_prefix):
changes['version_prefix'] = {'old': old, 'new': project.version_prefix}
if (pre_release_filter != project.pre_release_filter):
old = project.pre_release_filter
project.pre_release_filter = (pre_release_filter.strip() if pre_release_filter else None)
if (old != project.pre_release_filter):
changes['pre_release_filter'] = {'old': old, 'new': project.pre_release_filter}
if (version_filter != project.version_filter):
old = project.version_filter
project.version_filter = (version_filter.strip() if version_filter else None)
if (old != project.version_filter):
changes['version_filter'] = {'old': old, 'new': project.version_filter}
if (regex != project.regex):
old = project.regex
project.regex = (regex.strip() if regex else None)
if (old != project.regex):
changes['regex'] = {'old': old, 'new': project.regex}
if (insecure != project.insecure):
old = project.insecure
project.insecure = insecure
changes['insecure'] = {'old': old, 'new': project.insecure}
if (releases_only != project.releases_only):
old = project.releases_only
project.releases_only = releases_only
changes['releases_only'] = {'old': old, 'new': project.releases_only}
if (archived != project.archived):
old = project.archived
project.archived = archived
changes['archived'] = {'old': old, 'new': project.archived}
try:
if (not dry_run):
if changes:
publish_message(project=project.__json__(), topic='project.edit', message=dict(agent=user_id, project=project.name, fields=list(changes.keys()), changes=changes))
session.add(project)
session.commit()
if (check_release is True):
check_project_release(project, session)
else:
session.add(project)
session.flush()
except exc.SQLAlchemyError as err:
_log.exception(err)
session.rollback()
raise exceptions.AnityaException('Could not edit this project. Is there already a project with these name and homepage?')
return changes |
class SyntaxTreeNode():
def __init__(self, tokens: Sequence[Token]=(), *, create_root: bool=True) -> None:
self.token: (Token | None) = None
self.nester_tokens: (_NesterTokens | None) = None
self._parent: Any = None
self._children: list[Any] = []
if create_root:
self._set_children_from_tokens(tokens)
return
if (not tokens):
raise ValueError('Can only create root from empty token sequence. Set `create_root=True`.')
elif (len(tokens) == 1):
inline_token = tokens[0]
if inline_token.nesting:
raise ValueError('Unequal nesting level at the start and end of token stream.')
self.token = inline_token
if inline_token.children:
self._set_children_from_tokens(inline_token.children)
else:
self.nester_tokens = _NesterTokens(tokens[0], tokens[(- 1)])
self._set_children_from_tokens(tokens[1:(- 1)])
def __repr__(self) -> str:
return f'{type(self).__name__}({self.type})'
def __getitem__(self: _NodeType, item: int) -> _NodeType:
...
def __getitem__(self: _NodeType, item: slice) -> list[_NodeType]:
...
def __getitem__(self: _NodeType, item: (int | slice)) -> (_NodeType | list[_NodeType]):
return self.children[item]
def to_tokens(self: _NodeType) -> list[Token]:
def recursive_collect_tokens(node: _NodeType, token_list: list[Token]) -> None:
if (node.type == 'root'):
for child in node.children:
recursive_collect_tokens(child, token_list)
elif node.token:
token_list.append(node.token)
else:
assert node.nester_tokens
token_list.append(node.nester_tokens.opening)
for child in node.children:
recursive_collect_tokens(child, token_list)
token_list.append(node.nester_tokens.closing)
tokens: list[Token] = []
recursive_collect_tokens(self, tokens)
return tokens
def children(self: _NodeType) -> list[_NodeType]:
return self._children
def children(self: _NodeType, value: list[_NodeType]) -> None:
self._children = value
def parent(self: _NodeType) -> (_NodeType | None):
return self._parent
def parent(self: _NodeType, value: (_NodeType | None)) -> None:
self._parent = value
def is_root(self) -> bool:
return (not (self.token or self.nester_tokens))
def is_nested(self) -> bool:
return bool(self.nester_tokens)
def siblings(self: _NodeType) -> Sequence[_NodeType]:
if (not self.parent):
return [self]
return self.parent.children
def type(self) -> str:
if self.is_root:
return 'root'
if self.token:
return self.token.type
assert self.nester_tokens
return _removesuffix(self.nester_tokens.opening.type, '_open')
def next_sibling(self: _NodeType) -> (_NodeType | None):
self_index = self.siblings.index(self)
if ((self_index + 1) < len(self.siblings)):
return self.siblings[(self_index + 1)]
return None
def previous_sibling(self: _NodeType) -> (_NodeType | None):
self_index = self.siblings.index(self)
if ((self_index - 1) >= 0):
return self.siblings[(self_index - 1)]
return None
def _add_child(self, tokens: Sequence[Token]) -> None:
child = type(self)(tokens, create_root=False)
child.parent = self
self.children.append(child)
def _set_children_from_tokens(self, tokens: Sequence[Token]) -> None:
reversed_tokens = list(reversed(tokens))
while reversed_tokens:
token = reversed_tokens.pop()
if (not token.nesting):
self._add_child([token])
continue
if (token.nesting != 1):
raise ValueError('Invalid token nesting')
nested_tokens = [token]
nesting = 1
while (reversed_tokens and nesting):
token = reversed_tokens.pop()
nested_tokens.append(token)
nesting += token.nesting
if nesting:
raise ValueError(f'unclosed tokens starting {nested_tokens[0]}')
self._add_child(nested_tokens)
def pretty(self, *, indent: int=2, show_text: bool=False, _current: int=0) -> str:
prefix = (' ' * _current)
text = (prefix + f'<{self.type}')
if ((not self.is_root) and self.attrs):
text += (' ' + ' '.join((f'{k}={v!r}' for (k, v) in self.attrs.items())))
text += '>'
if (show_text and (not self.is_root) and (self.type in ('text', 'text_special')) and self.content):
text += ('\n' + textwrap.indent(self.content, (prefix + (' ' * indent))))
for child in self.children:
text += ('\n' + child.pretty(indent=indent, show_text=show_text, _current=(_current + indent)))
return text
def walk(self: _NodeType, *, include_self: bool=True) -> Generator[(_NodeType, None, None)]:
if include_self:
(yield self)
for child in self.children:
(yield from child.walk(include_self=True))
def _attribute_token(self) -> Token:
if self.token:
return self.token
if self.nester_tokens:
return self.nester_tokens.opening
raise AttributeError('Root node does not have the accessed attribute')
def tag(self) -> str:
return self._attribute_token().tag
def attrs(self) -> dict[(str, ((str | int) | float))]:
return self._attribute_token().attrs
def attrGet(self, name: str) -> (((None | str) | int) | float):
return self._attribute_token().attrGet(name)
def map(self) -> (tuple[(int, int)] | None):
map_ = self._attribute_token().map
if map_:
return tuple(map_)
return None
def level(self) -> int:
return self._attribute_token().level
def content(self) -> str:
return self._attribute_token().content
def markup(self) -> str:
return self._attribute_token().markup
def info(self) -> str:
return self._attribute_token().info
def meta(self) -> dict[(Any, Any)]:
return self._attribute_token().meta
def block(self) -> bool:
return self._attribute_token().block
def hidden(self) -> bool:
return self._attribute_token().hidden |
def verify_code_verifier(verifier: str, challenge: str, method: str) -> bool:
if (method == 'plain'):
return secrets.compare_digest(verifier, challenge)
elif (method == 'S256'):
verifier_hash = get_code_verifier_hash(verifier)
return secrets.compare_digest(verifier_hash, challenge)
return False |
def cfg_with_single_aliased_variable_1(x, z_aliased) -> Tuple[(ControlFlowGraph, ControlFlowGraph)]:
cfg = ControlFlowGraph()
(mem0, mem1, mem2, mem3) = generate_mem_phi_variables(4)
n1 = BasicBlock(1, [Assignment(x[0], x[1])])
n2 = BasicBlock(2, [MemPhi(mem1, [mem0, mem3]), Assignment(x[2], z_aliased[1])])
n3 = BasicBlock(3, [])
n4 = BasicBlock(4, [])
n5 = BasicBlock(5, [MemPhi(mem3, [mem1, mem2])])
cfg.add_edges_from([UnconditionalEdge(n1, n2), UnconditionalEdge(n2, n3), UnconditionalEdge(n2, n4), UnconditionalEdge(n3, n5), UnconditionalEdge(n4, n5), UnconditionalEdge(n5, n2)])
expected_cfg = ControlFlowGraph()
n2 = BasicBlock(2, [Phi(z_aliased[1], [z_aliased[0], z_aliased[3]]), Assignment(x[2], z_aliased[1])])
n5 = BasicBlock(5, [Phi(z_aliased[3], [z_aliased[1], z_aliased[2]])])
expected_cfg.add_edges_from([UnconditionalEdge(n1, n2), UnconditionalEdge(n2, n3), UnconditionalEdge(n2, n4), UnconditionalEdge(n3, n5), UnconditionalEdge(n4, n5), UnconditionalEdge(n5, n2)])
return (cfg, expected_cfg) |
def test_epoch_length_range(tester, casper, new_epoch, epoch_length, assert_tx_failed):
new_epoch()
for _ in range((epoch_length * 3)):
block_number = tester.get_block_by_number('latest')['number']
next_is_init_block = (((block_number + 1) % epoch_length) == 0)
next_epoch = (casper.functions.current_epoch().call() + 1)
if next_is_init_block:
casper.functions.initialize_epoch(next_epoch).transact()
assert (casper.functions.current_epoch().call() == next_epoch)
else:
assert_tx_failed((lambda : casper.functions.initialize_epoch(next_epoch).transact()))
tester.mine_block() |
def extractHidamarisoutranslationsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('devil wife', 'I Summoned the Devil to Grant Me a Wish, but I Married Her Instead Since She Was Adorable ~My New Devil Wife~', 'translated'), ('futago no ane', 'My Twin Sister Was Taken as a Miko and I Was Thrown Away but Im Probably the Miko ', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class DemandDrivenBase(EvaluatorBase, ABC):
def __init__(self, grammar):
super().__init__(grammar)
self.grammar = grammar
self.__interfaces = self.__build_attribute_interfaces()
self._cache = {}
def for_tree(self, root):
self.grammar.validate_tree(root)
root = self._prepare_tree(root)
self.__inject_interfaces(root)
return root
def __inject_interfaces(self, root):
nodes = []
def inject_attributes(node, _, context):
nodes.append((node, context.is_root))
self.grammar.traverse(root, inject_attributes)
for (node, is_root) in nodes:
node_type = type(node)
(a, s, _) = self.__interfaces[node_type]
node.__class__ = (s if is_root else a)
def __build_attribute_interfaces(self):
interfaces = {}
for symbol in self.grammar.productions:
sym_type = symbol
sym_name = symbol.__name__
gen_type = sym_type.__class__
def new_interface(name, attributes):
t_symbol = DemandDrivenBase.SymbolInterface
t_attribute = DemandDrivenBase.EvaluableAttribute
name = f'{sym_name}__{name}Mixin'
base = (sym_type, t_symbol)
cls = gen_type(name, base, {'__original_type__': sym_type, '__attributes__': {}})
for attribute in attributes:
evaluable_attribute = t_attribute(self, attribute)
setattr(cls, attribute, evaluable_attribute)
getattr(cls, '__attributes__')[attribute] = evaluable_attribute
return cls
g = self.grammar
cls_i = new_interface('InheritedAttrs', g.inherited_attributes[symbol])
cls_s = new_interface('SyntheticAttrs', g.synthesized_attributes[symbol])
cls_a = new_interface('AllAttrs', g.attributes[symbol])
interfaces[symbol] = (cls_a, cls_s, cls_i)
return interfaces
class EvaluableAttribute():
def __init__(self, evaluator, name):
self.__evaluator = evaluator
self.__name = name
def __get__(self, instance, owner):
if (self.__name in instance.__dict__):
return instance.__dict__[self.__name]
return self.__evaluator.evaluate(instance, self.__name)
def __set__(self):
raise AttributeError('Cannot override attributes')
class SymbolInterface():
__attributes__: dict
def __setattr__(self, name, value):
raise self.ImmutableError(f'Cannot set {name} on {self.__class__.__qualname__} object. The object has been marked immutable.')
def __delattr__(self, item):
raise self.ImmutableError(f'Cannot delete {item} on {self.__class__.__qualname__} object. The object has been marked immutable.')
def __is_intrinsic_attribute__(self, attribute):
if (attribute in self.__dict__):
return True
return False
class ImmutableError(AttributeError):
pass |
def test_json_Message_Sticker_without_thumbnail():
json_string = '{"message_id": 21552, "from": {"id": , "is_bot": false, "first_name": " r ", "username": "Purya", "language_code": "en"}, "chat": {"id": -, "title": "123", "type": "supergroup"}, "date": , "sticker": {"type": "regular", "width": 368, "height": 368, "emoji": "", "set_name": "ipuryapack", "is_animated": false, "is_video": true, "file_id": "CAACAgQAAx0CThS-5gACVDBfA4-toaZg4aUQGL5HWerSKoqaJQACArADwPvHBfcsY4I5C3feGgQ", "file_unique_id": "AgADfAADsPvHWQ", "file_size": 14602}}'
msg = types.Message.de_json(json_string)
assert (msg.sticker.height == 368)
assert (msg.sticker.thumbnail is None)
assert (msg.content_type == 'sticker') |
class FigureModel(Model):
def get_data_generator(self, document_features_context: DocumentFeaturesContext) -> FullTextDataGenerator:
return FullTextDataGenerator(document_features_context=document_features_context)
def get_semantic_extractor(self) -> FigureSemanticExtractor:
return FigureSemanticExtractor()
def get_tei_training_data_generator(self) -> FigureTeiTrainingDataGenerator:
return FigureTeiTrainingDataGenerator()
def get_training_tei_parser(self) -> FigureTrainingTeiParser:
return FigureTrainingTeiParser() |
class MockBQEncodingHandlers(StructuredDatasetEncoder):
def __init__(self):
super().__init__(pd.DataFrame, 'bq', '')
def encode(self, ctx: FlyteContext, structured_dataset: StructuredDataset, structured_dataset_type: StructuredDatasetType) -> literals.StructuredDataset:
return literals.StructuredDataset(uri='bq://bucket/key', metadata=StructuredDatasetMetadata(structured_dataset_type)) |
def test_load_schema_accepts_custom_repository():
class LocalSchemaRepository(AbstractSchemaRepository):
def __init__(self, schemas):
self.schemas = schemas
def load(self, subject):
return self.schemas.get(subject)
repo = LocalSchemaRepository({'A': {'name': 'A', 'type': 'record', 'fields': [{'name': 'foo', 'type': 'B'}]}, 'B': {'name': 'B', 'type': 'record', 'fields': [{'name': 'bar', 'type': 'string'}]}})
loaded_schema = fastavro.schema.load_schema('A', repo=repo, _write_hint=False)
expected_schema = {'name': 'A', 'type': 'record', 'fields': [{'name': 'foo', 'type': {'name': 'B', 'type': 'record', 'fields': [{'name': 'bar', 'type': 'string'}]}}]}
assert (loaded_schema == expected_schema) |
class BoardList(UserControl):
id_counter = itertools.count()
def __init__(self, board: 'Board', store: DataStore, title: str, color: str=''):
super().__init__()
self.board_list_id = next(BoardList.id_counter)
self.store: DataStore = store
self.board = board
self.title = title
self.color = color
self.items = Column([], tight=True, spacing=4)
self.items.controls = self.store.get_items(self.board_list_id)
def build(self):
self.new_item_field = TextField(label='new card name', height=50, bgcolor=colors.WHITE, on_submit=self.add_item_handler)
self.end_indicator = Container(bgcolor=colors.BLACK26, border_radius=border_radius.all(30), height=3, width=200, opacity=0.0)
self.edit_field = Row([TextField(value=self.title, width=150, height=40, content_padding=padding.only(left=10, bottom=10)), TextButton(text='Save', on_click=self.save_title)])
self.header = Row(controls=[Text(value=self.title, style='titleMedium', text_align='left', overflow='clip', expand=True), Container(PopupMenuButton(items=[PopupMenuItem(content=Text(value='Edit', style='labelMedium', text_align='center', color=self.color), on_click=self.edit_title), PopupMenuItem(), PopupMenuItem(content=Text(value='Delete', style='labelMedium', text_align='center', color=self.color), on_click=self.delete_list), PopupMenuItem(), PopupMenuItem(content=Text(value='Move List', style='labelMedium', text_align='center', color=self.color))]), padding=padding.only(right=(- 10)))], alignment='spaceBetween')
self.inner_list = Container(content=Column([self.header, self.new_item_field, TextButton(content=Row([Icon(icons.ADD), Text('add card', color=colors.BLACK38)], tight=True), on_click=self.add_item_handler), self.items, self.end_indicator], spacing=4, tight=True, data=self.title), width=250, border=border.all(2, colors.BLACK12), border_radius=border_radius.all(5), bgcolor=(self.color if (self.color != '') else colors.BACKGROUND), padding=padding.only(bottom=10, right=10, left=10, top=5))
self.view = DragTarget(group='items', content=Draggable(group='lists', content=DragTarget(group='lists', content=self.inner_list, data=self, on_accept=self.list_drag_accept, on_will_accept=self.list_will_drag_accept, on_leave=self.list_drag_leave)), data=self, on_accept=self.item_drag_accept, on_will_accept=self.item_will_drag_accept, on_leave=self.item_drag_leave)
return self.view
def item_drag_accept(self, e):
src = self.page.get_control(e.src_id)
self.add_item(src.data.item_text)
src.data.list.remove_item(src.data)
self.end_indicator.opacity = 0.0
self.update()
def item_will_drag_accept(self, e):
if (e.data == 'true'):
self.end_indicator.opacity = 1.0
self.update()
def item_drag_leave(self, e):
self.end_indicator.opacity = 0.0
self.update()
def list_drag_accept(self, e):
src = self.page.get_control(e.src_id)
l = self.board.board_lists
to_index = l.index(e.control.data)
from_index = l.index(src.content.data)
(l[to_index], l[from_index]) = (l[from_index], l[to_index])
self.inner_list.border = border.all(2, colors.BLACK12)
self.board.update()
self.update()
def list_will_drag_accept(self, e):
if (e.data == 'true'):
self.inner_list.border = border.all(2, colors.BLACK)
self.update()
def list_drag_leave(self, e):
self.inner_list.border = border.all(2, colors.BLACK12)
self.update()
def delete_list(self, e):
self.board.remove_list(self, e)
def edit_title(self, e):
self.header.controls[0] = self.edit_field
self.header.controls[1].visible = False
self.update()
def save_title(self, e):
self.title = self.edit_field.controls[0].value
self.header.controls[0] = Text(value=self.title, style='titleMedium', text_align='left', overflow='clip', expand=True)
self.header.controls[1].visible = True
self.update()
def add_item_handler(self, e):
if (self.new_item_field.value == ''):
return
self.add_item()
def add_item(self, item: str=None, chosen_control: Draggable=None, swap_control: Draggable=None):
controls_list = [x.controls[1] for x in self.items.controls]
to_index = (controls_list.index(swap_control) if (swap_control in controls_list) else None)
from_index = (controls_list.index(chosen_control) if (chosen_control in controls_list) else None)
control_to_add = Column([Container(bgcolor=colors.BLACK26, border_radius=border_radius.all(30), height=3, alignment=alignment.center_right, width=200, opacity=0.0)])
if ((from_index is not None) and (to_index is not None)):
self.items.controls.insert(to_index, self.items.controls.pop(from_index))
self.set_indicator_opacity(swap_control, 0.0)
elif (to_index is not None):
new_item = Item(self, self.store, item)
control_to_add.controls.append(new_item)
self.items.controls.insert(to_index, control_to_add)
else:
new_item = (Item(self, self.store, item) if item else Item(self, self.store, self.new_item_field.value))
control_to_add.controls.append(new_item)
self.items.controls.append(control_to_add)
self.store.add_item(self.board_list_id, new_item)
self.new_item_field.value = ''
self.view.update()
self.page.update()
def remove_item(self, item: Item):
controls_list = [x.controls[1] for x in self.items.controls]
del self.items.controls[controls_list.index(item)]
self.store.remove_item(self.board_list_id, item.item_id)
self.view.update()
def set_indicator_opacity(self, item, opacity):
controls_list = [x.controls[1] for x in self.items.controls]
self.items.controls[controls_list.index(item)].controls[0].opacity = opacity
self.view.update() |
class ServerInfo():
def format_bytes(size) -> str:
factor = 1024
for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']:
if (abs(size) < factor):
return f'{size:.2f} {unit}B'
size /= factor
return f'{size:.2f} YB'
def fmt_seconds(seconds: int) -> str:
(days, rem) = divmod(int(seconds), 86400)
(hours, rem) = divmod(rem, 3600)
(minutes, seconds) = divmod(rem, 60)
parts = []
if days:
parts.append('{} '.format(days))
if hours:
parts.append('{} '.format(hours))
if minutes:
parts.append('{} '.format(minutes))
if seconds:
parts.append('{} '.format(seconds))
if (len(parts) == 0):
return '0 '
else:
return ' '.join(parts)
def fmt_timedelta(td: timedelta) -> str:
total_seconds = round(td.total_seconds())
return ServerInfo.fmt_seconds(total_seconds)
def get_cpu_info() -> dict:
cpu_info = {'usage': round(psutil.cpu_percent(interval=1, percpu=False), 2)}
cpu_freq = psutil.cpu_freq()
cpu_info['max_freq'] = round(cpu_freq.max, 2)
cpu_info['min_freq'] = round(cpu_freq.min, 2)
cpu_info['current_freq'] = round(cpu_freq.current, 2)
cpu_info['logical_num'] = psutil.cpu_count(logical=True)
cpu_info['physical_num'] = psutil.cpu_count(logical=False)
return cpu_info
def get_mem_info() -> dict:
mem = psutil.virtual_memory()
return {'total': round((((mem.total / 1024) / 1024) / 1024), 2), 'used': round((((mem.used / 1024) / 1024) / 1024), 2), 'free': round((((mem.available / 1024) / 1024) / 1024), 2), 'usage': round(mem.percent, 2)}
def get_sys_info() -> dict:
try:
with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as sk:
sk.connect(('8.8.8.8', 80))
ip = sk.getsockname()[0]
except socket.gaierror:
ip = '127.0.0.1'
return {'name': socket.gethostname(), 'ip': ip, 'os': platform.system(), 'arch': platform.machine()}
def get_disk_info() -> List[dict]:
disk_info = []
for disk in psutil.disk_partitions():
usage = psutil.disk_usage(disk.mountpoint)
disk_info.append({'dir': disk.mountpoint, 'type': disk.fstype, 'device': disk.device, 'total': ServerInfo.format_bytes(usage.total), 'free': ServerInfo.format_bytes(usage.free), 'used': ServerInfo.format_bytes(usage.used), 'usage': f'{round(usage.percent, 2)} %'})
return disk_info
def get_service_info():
process = psutil.Process(os.getpid())
mem_info = process.memory_info()
start_time = timezone.f_datetime(datetime.utcfromtimestamp(process.create_time()).replace(tzinfo=tz.utc))
return {'name': 'Python3', 'version': platform.python_version(), 'home': sys.executable, 'cpu_usage': f'{round(process.cpu_percent(interval=1), 2)} %', 'mem_vms': ServerInfo.format_bytes(mem_info.vms), 'mem_rss': ServerInfo.format_bytes(mem_info.rss), 'mem_free': ServerInfo.format_bytes((mem_info.vms - mem_info.rss)), 'startup': start_time, 'elapsed': f'{ServerInfo.fmt_timedelta((timezone.now() - start_time))}'} |
def gtk_builder_translation_hack(builder):
translatable_properties = ['label', 'text', 'title', 'tooltip-text']
for widget in builder.get_objects():
widget_properties = [prop.name for prop in widget.list_properties()]
for translatable_property in translatable_properties:
if (translatable_property not in widget_properties):
continue
text = widget.get_property(translatable_property)
if (not text):
continue
widget.set_property(translatable_property, gettext(text)) |
def main(style, files):
announcements = []
for infile in files:
if os.path.isfile(infile):
with open(infile, encoding='utf8') as f:
token_info = yaml.safe_load(f.read())
announcements.append(STYLE_TO_FUNC[style]['each'](token_info))
wrap_style = (STYLE_TO_FUNC[style]['wrap'] if ('wrap' in STYLE_TO_FUNC[style]) else print_all_wrap)
wrap_style(announcements) |
class CmdModules(cmd.Cmd):
identchars = (cmd.Cmd.identchars + ':')
doc_header = 'Modules and commands (type :help <module>):'
nohelp = '[!] No help on %s'
def complete(self, text, state):
if (state == 0):
origline = readline.get_line_buffer()
if (origline and (not origline.startswith(':'))):
return None
line = origline.lstrip().lstrip(':')
stripped = (len(origline) - len(line))
begidx = (readline.get_begidx() - stripped)
endidx = (readline.get_endidx() - stripped)
if (begidx > 0):
(cmd, args, foo) = self.parseline(line)
if (cmd == ''):
compfunc = self.completedefault
else:
try:
compfunc = getattr(self, ('complete_' + cmd))
except AttributeError:
compfunc = self.completedefault
else:
compfunc = self.completenames
self.completion_matches = compfunc(text, line, begidx, endidx)
try:
if self.completion_matches[state].startswith('alias_'):
if (self.session.get('default_shell') == 'shell_php'):
return self.completion_matches[state][6:]
else:
return ''
else:
return self.completion_matches[state]
except IndexError:
return None
def onecmd(self, line):
(cmd, arg, line) = self.parseline(line)
if (not line):
return self.emptyline()
if (cmd in (None, '')):
return self.default(line)
self.lastcmd = line
if (line == 'EOF'):
raise EOFError()
if cmd:
try:
func = getattr(self, ('do_' + cmd.lstrip(':')))
except AttributeError:
if ((self.session.get('default_shell') == 'shell_php') or (cmd.lstrip(':') == 'cd')):
try:
func = getattr(self, ('do_alias_' + cmd.lstrip(':')))
except AttributeError:
pass
else:
return func(arg, cmd)
else:
return func(arg, cmd)
return self.default(line)
def _print_modules(self):
data = []
for (module_group, names) in modules.loaded_tree.items():
for module_name in names:
data.append([(':%s' % module_name), modules.loaded[module_name].info.get('description', '')])
if data:
log.info(utils.prettify.tablify(data, table_border=False))
def _print_command_replacements(self):
data = []
for (module_name, module) in modules.loaded.items():
if module.aliases:
data.append([', '.join(module.aliases), module_name])
if data:
log.info(utils.prettify.tablify(data, table_border=False))
def do_help(self, arg, command):
print()
self._print_modules()
if (self.session['shell_sh']['status'] == Status.RUN):
print()
return
log.info(messages.terminal.help_no_shell)
self._print_command_replacements()
print() |
def analyze_last200blocks(block, blockdata):
recent_blocks = blockdata.loc[((blockdata['block_number'] > (block - 200)), ['mingasprice', 'block_number'])]
hashpower = recent_blocks.groupby('mingasprice').count()
hashpower = hashpower.rename(columns={'block_number': 'count'})
hashpower['cum_blocks'] = hashpower['count'].cumsum()
totalblocks = hashpower['count'].sum()
hashpower['hashp_pct'] = ((hashpower['cum_blocks'] / totalblocks) * 100)
blockinterval = recent_blocks.sort_values('block_number').diff()
blockinterval.loc[((blockinterval['block_number'] > 1), 'time_mined')] = np.nan
blockinterval.loc[((blockinterval['time_mined'] < 0), 'time_mined')] = np.nan
avg_timemined = blockinterval['time_mined'].mean()
if np.isnan(avg_timemined):
avg_timemined = 15
return (hashpower, avg_timemined) |
def read_exchanges_config(config_dir) -> dict[(str, Any)]:
exchanges_config = {}
for exchange_path in config_dir.joinpath('exchanges').glob('*.yaml'):
exchange_key_unicode = exchange_path.stem
zone_keys = exchange_key_unicode.split(EXCHANGE_FILENAME_ZONE_SEPARATOR)
assert (len(zone_keys) == 2)
exchange_key = '->'.join(zone_keys)
exchanges_config[exchange_key] = yaml.load(open(exchange_path, encoding='utf-8'))
return exchanges_config |
_settings(ROOT_URLCONF='tests.test_renderers')
class CacheRenderTest(TestCase):
def test_head_caching(self):
response = self.client.head('/cache')
cache.set('key', response)
cached_response = cache.get('key')
assert isinstance(cached_response, Response)
assert (cached_response.content == response.content)
assert (cached_response.status_code == response.status_code)
def test_get_caching(self):
response = self.client.get('/cache')
cache.set('key', response)
cached_response = cache.get('key')
assert isinstance(cached_response, Response)
assert (cached_response.content == response.content)
assert (cached_response.status_code == response.status_code) |
def ecrecover(evm: Evm) -> None:
data = evm.message.data
charge_gas(evm, GAS_ECRECOVER)
message_hash_bytes = buffer_read(data, U256(0), U256(32))
message_hash = Hash32(message_hash_bytes)
v = U256.from_be_bytes(buffer_read(data, U256(32), U256(32)))
r = U256.from_be_bytes(buffer_read(data, U256(64), U256(32)))
s = U256.from_be_bytes(buffer_read(data, U256(96), U256(32)))
if ((v != 27) and (v != 28)):
return
if ((0 >= r) or (r >= SECP256K1N)):
return
if ((0 >= s) or (s >= SECP256K1N)):
return
try:
public_key = secp256k1_recover(r, s, (v - 27), message_hash)
except ValueError:
return
address = keccak256(public_key)[12:32]
padded_address = left_pad_zero_bytes(address, 32)
evm.output = padded_address |
class CustomWatcher(DefaultWatcher):
def __init__(self, root_path: Path, config: Config):
default_includes = ['*.py']
self.includes = [default for default in default_includes if (default not in config.reload_excludes)]
self.includes.extend(config.reload_includes)
self.includes = list(set(self.includes))
default_excludes = ['.*', '.py[cod]', '.sw.*', '~*']
self.excludes = [default for default in default_excludes if (default not in config.reload_includes)]
self.excludes.extend(config.reload_excludes)
self.excludes = list(set(self.excludes))
self.watched_dirs: dict[(str, bool)] = {}
self.watched_files: dict[(str, bool)] = {}
self.dirs_includes = set(config.reload_dirs)
self.dirs_excludes = set(config.reload_dirs_excludes)
self.resolved_root = root_path
super().__init__(str(root_path))
def should_watch_file(self, entry: 'DirEntry') -> bool:
cached_result = self.watched_files.get(entry.path)
if (cached_result is not None):
return cached_result
entry_path = Path(entry)
if ((entry_path.parent == Path.cwd()) and (Path.cwd() not in self.dirs_includes)):
self.watched_files[entry.path] = False
return False
for include_pattern in self.includes:
if str(entry_path).endswith(include_pattern):
self.watched_files[entry.path] = True
return True
if entry_path.match(include_pattern):
for exclude_pattern in self.excludes:
if entry_path.match(exclude_pattern):
self.watched_files[entry.path] = False
return False
self.watched_files[entry.path] = True
return True
self.watched_files[entry.path] = False
return False
def should_watch_dir(self, entry: 'DirEntry') -> bool:
cached_result = self.watched_dirs.get(entry.path)
if (cached_result is not None):
return cached_result
entry_path = Path(entry)
if (entry_path in self.dirs_excludes):
self.watched_dirs[entry.path] = False
return False
for exclude_pattern in self.excludes:
if entry_path.match(exclude_pattern):
is_watched = False
if (entry_path in self.dirs_includes):
is_watched = True
for directory in self.dirs_includes:
if (directory in entry_path.parents):
is_watched = True
if is_watched:
logger.debug("WatchGodReload detected a new excluded dir '%s' in '%s'; Adding to exclude list.", entry_path.relative_to(self.resolved_root), str(self.resolved_root))
self.watched_dirs[entry.path] = False
self.dirs_excludes.add(entry_path)
return False
if (entry_path in self.dirs_includes):
self.watched_dirs[entry.path] = True
return True
for directory in self.dirs_includes:
if (directory in entry_path.parents):
self.watched_dirs[entry.path] = True
return True
for include_pattern in self.includes:
if entry_path.match(include_pattern):
logger.info("WatchGodReload detected a new reload dir '%s' in '%s'; Adding to watch list.", str(entry_path.relative_to(self.resolved_root)), str(self.resolved_root))
self.dirs_includes.add(entry_path)
self.watched_dirs[entry.path] = True
return True
self.watched_dirs[entry.path] = False
return False |
def test_traverse_overridden():
provider1 = providers.Object('bar')
provider2 = providers.Object('baz')
provider3 = providers.Dict(bar=provider1, baz=provider2)
provider = providers.Dict(foo='foo')
provider.override(provider3)
all_providers = list(provider.traverse())
assert (len(all_providers) == 3)
assert (provider1 in all_providers)
assert (provider2 in all_providers)
assert (provider3 in all_providers) |
_auth_type(BFD_AUTH_KEYED_MD5)
class KeyedMD5(BFDAuth):
_PACK_STR = '!BBL16s'
_PACK_STR_LEN = struct.calcsize(_PACK_STR)
def __init__(self, auth_key_id, seq, auth_key=None, digest=None, auth_len=None):
self.auth_key_id = auth_key_id
self.seq = seq
self.auth_key = auth_key
self.digest = digest
super(KeyedMD5, self).__init__(auth_len)
def __len__(self):
return 24
def parser(cls, buf):
(auth_type, auth_len) = cls.parser_hdr(buf)
assert (auth_type == cls.auth_type)
assert (auth_len == 24)
(auth_key_id, reserved, seq, digest) = struct.unpack_from(cls._PACK_STR, buf[cls._PACK_HDR_STR_LEN:])
assert (reserved == 0)
msg = cls(auth_key_id=auth_key_id, seq=seq, auth_key=None, digest=digest)
return (msg, None, None)
def serialize(self, payload, prev):
assert ((self.auth_key is not None) and (len(self.auth_key) <= 16))
assert isinstance(prev, bfd)
bfd_bin = prev.pack()
auth_hdr_bin = self.serialize_hdr()
auth_data_bin = struct.pack(self._PACK_STR, self.auth_key_id, 0, self.seq, (self.auth_key + (b'\x00' * (len(self.auth_key) - 16))))
h = hashlib.md5()
h.update(((bfd_bin + auth_hdr_bin) + auth_data_bin))
self.digest = h.digest()
return (auth_hdr_bin + struct.pack(self._PACK_STR, self.auth_key_id, 0, self.seq, self.digest))
def authenticate(self, prev, auth_keys=None):
auth_keys = (auth_keys if auth_keys else {})
assert isinstance(prev, bfd)
if (self.digest is None):
return False
if (self.auth_key_id not in auth_keys):
return False
auth_key = auth_keys[self.auth_key_id]
bfd_bin = prev.pack()
auth_hdr_bin = self.serialize_hdr()
auth_data_bin = struct.pack(self._PACK_STR, self.auth_key_id, 0, self.seq, (auth_key + (b'\x00' * (len(auth_key) - 16))))
h = hashlib.md5()
h.update(((bfd_bin + auth_hdr_bin) + auth_data_bin))
if (self.digest == h.digest()):
return True
else:
return False |
def key_press(win, ev, c):
new_idx = None
s = c.get_scale()
key = chr((ev.keyval & 255))
if (key == '+'):
c.set_scale((s * 1.2))
elif (key == '-'):
c.set_scale((s * 0.8))
elif (key == 'p'):
print_pdf(c)
elif (key == 'f'):
new_idx = (c.cursor_idx + 1)
elif (key == 'F'):
new_idx = (c.cursor_idx + 20)
elif (key == 'a'):
new_idx = (c.cursor_idx + 1)
elif (key == 'b'):
new_idx = (c.cursor_idx - 1)
elif (key == 'B'):
new_idx = (c.cursor_idx - 20)
elif (key == 'r'):
new_idx = 0
elif (key == '0'):
new_idx = 0
elif (ev.keyval <= 255):
gtk.main_quit()
if ((new_idx is not None) and ((new_idx < 1) or (new_idx >= len(c.points)))):
new_idx = 1
for a in c.arrows:
a.remove()
c.arrow = []
else:
print(c.get_scale())
if (new_idx is not None):
jumpto = True
if (key == 'f'):
jumpto = c.points[new_idx][1]
elif (key == 'b'):
jumpto = c.points[c.cursor_idx][1]
ox = c.points[c.cursor_idx][0].x
oy = c.points[c.cursor_idx][0].y
c.cursor_idx = new_idx
cx = c.points[c.cursor_idx][0].x
cy = c.points[c.cursor_idx][0].y
if jumpto:
c.cursor.set_simple_transform(cx, cy, 1, 0.0)
else:
if (key == 'f'):
p = Points([(ox, oy), (cx, cy)])
c.arrows.append(Polyline(parent=c.get_root_item(), points=p, line_width=0.25, end_arrow=True, arrow_width=3, arrow_tip_length=2, arrow_length=2, stroke_color_rgba=51))
c.cursor.animate(cx, cy, 1, (- 360.0), absolute=True, duration=150, step_time=30, type=0)
print(new_idx, c.points[c.cursor_idx][0], c.points[c.cursor_idx][0].att())
else:
c.cursor.stop_animation() |
def extract_added_entries(entries, url):
user_id = parse_graph_variable(url)['userId']
pin_entry_key = (user_id, is_media(url))
pinned_entry = pin_entry_cache.get(pin_entry_key)
cursor = None
has_timeline = False
for entry in entries:
if (entry['content']['entryType'] == 'TimelineTimelineItem'):
has_timeline = True
if ('result' not in entry['content']['itemContent']['tweet_results']):
continue
ep = tweet_result_to_episode(entry['content']['itemContent']['tweet_results']['result'])
if (not ep):
continue
if (pinned_entry and (url_to_id(pinned_entry.url) > url_to_id(ep.url))):
(yield pinned_entry)
del pin_entry_cache[pin_entry_key]
pinned_entry = None
(yield ep)
if ((entry['content']['entryType'] == 'TimelineTimelineCursor') and (entry['content']['cursorType'] == 'Bottom')):
cursor = entry['content']['value']
if (cursor and has_timeline):
endpoint = (user_media_graph if is_media(url) else user_tweets_graph)
next_page_cache[url] = endpoint(userId=user_id, cursor=cursor)
if ((not has_timeline) and pinned_entry):
(yield pinned_entry)
del pin_entry_cache[pin_entry_key] |
def test_tokenize_floats():
token = tokenize_json('[100.0, 1.0E+2, 1E+2]')
expected = ListToken([ScalarToken(100.0, 1, 5), ScalarToken(100.0, 8, 13), ScalarToken(100.0, 16, 19)], 0, 20)
assert (token == expected)
assert (token.value == [100.0, 100.0, 100.0])
assert (token.lookup([0]).value == 100.0)
assert (token.lookup([0]).string == '100.0')
assert (token.lookup([0]).start.char_index == 1)
assert (token.lookup([0]).end.char_index == 5) |
class Solution(object):
def match_note_to_magazine(self, ransom_note, magazine):
if ((ransom_note is None) or (magazine is None)):
raise TypeError('ransom_note or magazine cannot be None')
seen_chars = {}
for char in magazine:
if (char in seen_chars):
seen_chars[char] += 1
else:
seen_chars[char] = 1
for char in ransom_note:
try:
seen_chars[char] -= 1
except KeyError:
return False
if (seen_chars[char] < 0):
return False
return True |
class AlienInvasion():
def __init__(self):
pygame.init()
self.clock = pygame.time.Clock()
self.settings = Settings()
self.screen = pygame.display.set_mode((self.settings.screen_width, self.settings.screen_height))
pygame.display.set_caption('Alien Invasion')
self.stats = GameStats(self)
self.ship = Ship(self)
self.bullets = pygame.sprite.Group()
self.aliens = pygame.sprite.Group()
self._create_fleet()
self.game_active = False
self.play_button = Button(self, 'Play')
def run_game(self):
while True:
self._check_events()
if self.game_active:
self.ship.update()
self._update_bullets()
self._update_aliens()
self._update_screen()
self.clock.tick(60)
def _check_events(self):
for event in pygame.event.get():
if (event.type == pygame.QUIT):
sys.exit()
elif (event.type == pygame.KEYDOWN):
self._check_keydown_events(event)
elif (event.type == pygame.KEYUP):
self._check_keyup_events(event)
elif (event.type == pygame.MOUSEBUTTONDOWN):
mouse_pos = pygame.mouse.get_pos()
self._check_play_button(mouse_pos)
def _check_play_button(self, mouse_pos):
button_clicked = self.play_button.rect.collidepoint(mouse_pos)
if (button_clicked and (not self.game_active)):
self._start_game()
def _start_game(self):
self.stats.reset_stats()
self.game_active = True
self.bullets.empty()
self.aliens.empty()
self._create_fleet()
self.ship.center_ship()
pygame.mouse.set_visible(False)
def _check_keydown_events(self, event):
if (event.key == pygame.K_RIGHT):
self.ship.moving_right = True
elif (event.key == pygame.K_LEFT):
self.ship.moving_left = True
elif (event.key == pygame.K_q):
sys.exit()
elif (event.key == pygame.K_SPACE):
self._fire_bullet()
elif ((event.key == pygame.K_p) and (not self.game_active)):
self._start_game()
def _check_keyup_events(self, event):
if (event.key == pygame.K_RIGHT):
self.ship.moving_right = False
elif (event.key == pygame.K_LEFT):
self.ship.moving_left = False
def _fire_bullet(self):
if (len(self.bullets) < self.settings.bullets_allowed):
new_bullet = Bullet(self)
self.bullets.add(new_bullet)
def _update_bullets(self):
self.bullets.update()
for bullet in self.bullets.copy():
if (bullet.rect.bottom <= 0):
self.bullets.remove(bullet)
self._check_bullet_alien_collisions()
def _check_bullet_alien_collisions(self):
collisions = pygame.sprite.groupcollide(self.bullets, self.aliens, True, True)
if (not self.aliens):
self.bullets.empty()
self._create_fleet()
def _ship_hit(self):
if (self.stats.ships_left > 0):
self.stats.ships_left -= 1
self.bullets.empty()
self.aliens.empty()
self._create_fleet()
self.ship.center_ship()
sleep(0.5)
else:
self.game_active = False
pygame.mouse.set_visible(True)
def _update_aliens(self):
self._check_fleet_edges()
self.aliens.update()
if pygame.sprite.spritecollideany(self.ship, self.aliens):
self._ship_hit()
self._check_aliens_bottom()
def _check_aliens_bottom(self):
for alien in self.aliens.sprites():
if (alien.rect.bottom >= self.settings.screen_height):
self._ship_hit()
break
def _create_fleet(self):
alien = Alien(self)
(alien_width, alien_height) = alien.rect.size
(current_x, current_y) = (alien_width, alien_height)
while (current_y < (self.settings.screen_height - (3 * alien_height))):
while (current_x < (self.settings.screen_width - (2 * alien_width))):
self._create_alien(current_x, current_y)
current_x += (2 * alien_width)
current_x = alien_width
current_y += (2 * alien_height)
def _create_alien(self, x_position, y_position):
new_alien = Alien(self)
new_alien.x = x_position
new_alien.rect.x = x_position
new_alien.rect.y = y_position
self.aliens.add(new_alien)
def _check_fleet_edges(self):
for alien in self.aliens.sprites():
if alien.check_edges():
self._change_fleet_direction()
break
def _change_fleet_direction(self):
for alien in self.aliens.sprites():
alien.rect.y += self.settings.fleet_drop_speed
self.settings.fleet_direction *= (- 1)
def _update_screen(self):
self.screen.fill(self.settings.bg_color)
for bullet in self.bullets.sprites():
bullet.draw_bullet()
self.ship.blitme()
self.aliens.draw(self.screen)
if (not self.game_active):
self.play_button.draw_button()
pygame.display.flip() |
def start_ha_master(host, port):
server_uri = ((host + ':') + str(port))
storage = DbEventStorage()
ha_manager = SimpleNotificationServerHaManager()
ha_storage = DbHighAvailabilityStorage()
service = HighAvailableNotificationService(storage, ha_manager, server_uri, ha_storage)
master = NotificationServer(service, port=port)
master.run()
return master |
class OptionSeriesLollipopDataDatalabels(Options):
def align(self):
return self._config_get('center')
def align(self, text: str):
self._config(text, js_type=False)
def allowOverlap(self):
return self._config_get(False)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self) -> 'OptionSeriesLollipopDataDatalabelsAnimation':
return self._config_sub_data('animation', OptionSeriesLollipopDataDatalabelsAnimation)
def backgroundColor(self):
return self._config_get(None)
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(0)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def crop(self):
return self._config_get(True)
def crop(self, flag: bool):
self._config(flag, js_type=False)
def defer(self):
return self._config_get(True)
def defer(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def filter(self) -> 'OptionSeriesLollipopDataDatalabelsFilter':
return self._config_sub_data('filter', OptionSeriesLollipopDataDatalabelsFilter)
def format(self):
return self._config_get('point.value')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get(None)
def formatter(self, value: Any):
self._config(value, js_type=False)
def inside(self):
return self._config_get(None)
def inside(self, flag: bool):
self._config(flag, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, flag: bool):
self._config(flag, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def overflow(self):
return self._config_get('justify')
def overflow(self, text: str):
self._config(text, js_type=False)
def padding(self):
return self._config_get(5)
def padding(self, num: float):
self._config(num, js_type=False)
def position(self):
return self._config_get('center')
def position(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def shape(self):
return self._config_get('square')
def shape(self, text: str):
self._config(text, js_type=False)
def style(self):
return self._config_get(None)
def style(self, value: Any):
self._config(value, js_type=False)
def textPath(self) -> 'OptionSeriesLollipopDataDatalabelsTextpath':
return self._config_sub_data('textPath', OptionSeriesLollipopDataDatalabelsTextpath)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('bottom')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def xHigh(self):
return self._config_get(0)
def xHigh(self, num: float):
self._config(num, js_type=False)
def xLow(self):
return self._config_get(0)
def xLow(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(0)
def y(self, num: float):
self._config(num, js_type=False)
def yHigh(self):
return self._config_get(0)
def yHigh(self, num: float):
self._config(num, js_type=False)
def yLow(self):
return self._config_get(0)
def yLow(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(6)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def get_all_board_moderators_by_board(board: BoardModel) -> 'List[BoardModeratorModel]':
with session() as s:
bms = s.query(BoardModeratorOrmModel).filter_by(board_id=board.id).all()
res = list(map((lambda i: BoardModeratorModel.from_orm_model(i)), bms))
s.commit()
return res |
def get_registered_stattest_name(stattest_func: Optional[PossibleStatTestType], feature_type: ColumnType=None) -> str:
stattest_name = get_registered_stattest(stattest_func, feature_type).name
if stattest_name:
return stattest_name
raise StatTestNotFoundError(f'No registered stattest for function {stattest_func}. Please register it') |
class CreditCardPaymentEntityTest(QuickbooksTestCase):
def setUp(self):
time.sleep(3)
super(CreditCardPaymentEntityTest, self).setUp()
self.account_number = datetime.now().strftime('%d%H%M')
self.name = 'Test CreditCardPaymentEntityTest {0}'.format(self.account_number)
def test_create(self):
credit_card_account = Account()
credit_card_account.Name = 'Credit Card Account {0}'.format(self.account_number)
credit_card_account.AccountType = 'Credit Card'
credit_card_account.AccountSubType = 'CreditCard'
credit_card_account.save(qb=self.qb_client)
accounts = Account.where("Classification = 'Asset' AND FullyQualifiedName != 'Accounts Receivable (A/R)'", max_results=1, qb=self.qb_client)
from_account = accounts[0]
to_account = credit_card_account
credit_card_payment = CreditCardPayment()
credit_card_payment.Amount = 100
credit_card_payment.BankAccountRef = from_account.to_ref()
credit_card_payment.CreditCardAccountRef = to_account.to_ref()
credit_card_payment.save(qb=self.qb_client)
query_credit_card_payment = CreditCardPayment.get(credit_card_payment.Id, qb=self.qb_client)
self.assertEqual(query_credit_card_payment.Id, credit_card_payment.Id)
self.assertEqual(query_credit_card_payment.Amount, 100)
self.assertEqual(query_credit_card_payment.BankAccountRef.value, from_account.Id)
self.assertEqual(query_credit_card_payment.CreditCardAccountRef.value, to_account.Id)
transfer = Transfer()
transfer.Amount = 100
transfer.FromAccountRef = to_account.to_ref()
transfer.ToAccountRef = from_account.to_ref()
transfer.save(qb=self.qb_client)
def test_update(self):
credit_card_payment = CreditCardPayment.all(max_results=1, qb=self.qb_client)[0]
credit_card_payment.Amount += 1
credit_card_payment.save(qb=self.qb_client)
query_credit_card_payment = CreditCardPayment.get(credit_card_payment.Id, qb=self.qb_client)
self.assertEqual(query_credit_card_payment.Amount, credit_card_payment.Amount) |
class OptionPlotoptionsParetoSonificationDefaultinstrumentoptionsMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class GetLogObserver():
component_prefix = ''
event_levels = {twisted_logger.LogLevel.debug: '??', twisted_logger.LogLevel.info: '..', twisted_logger.LogLevel.warn: 'WW', twisted_logger.LogLevel.error: 'EE', twisted_logger.LogLevel.critical: '!!'}
def format_log_event(self, event):
prefix = event.get('prefix', '')
if prefix:
event['log_system'] = prefix
else:
lvl = event.get('log_level', twisted_logger.LogLevel.info)
event['log_system'] = self.event_levels.get(lvl, '-')
event['log_format'] = str(event.get('log_format', ''))
component_prefix = (self.component_prefix or '')
log_msg = twisted_logger.formatEventAsClassicLogText(event, formatTime=(lambda e: twisted_logger.formatTime(e, _TIME_FORMAT)))
return f'{component_prefix}{log_msg}'
def __call__(self, outfile):
return twisted_logger.FileLogObserver(outfile, self.format_log_event) |
class ListenerSocket(threading.Thread):
MAX_CONNECTIONS = 10
def __init__(self, port):
threading.Thread.__init__(self)
threading.Thread.daemon = True
self.__port = port
self.__sock = self.create()
self.__clients = []
def create(self) -> socket.socket:
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(('127.0.0.1', self.__port))
sock.listen(self.MAX_CONNECTIONS)
except socket.error as error:
Style.neg_sys_msg(error)
sys.exit(1)
else:
Style.pos_sys_msg('Created socket and bound to hidden service forward port')
return sock
def run(self):
Style.pos_sys_msg('Listening for clients')
while True:
try:
client_objects = self.__sock.accept()
client = Client(client_objects)
self.__clients.append(client)
Style.client_connect_msg()
except socket.error as error:
Style.neg_sys_msg(error)
sys.exit(1)
def get_clients(self):
return self.__clients
def get_client(self, index):
try:
return self.__clients[index]
except IndexError:
Style.neg_sys_msg('Client Index out of range.')
def del_client(self, index):
try:
del self.__clients[index]
except IndexError:
Style.neg_sys_msg('Client Index out of range.') |
_player
def test_mpris2_popup(mpris_manager):
number = len(mpris_manager.c.internal_windows())
widget = mpris_manager.c.widget['mpris2']
widget.play_pause()
wait_for_player(widget)
widget.toggle_player()
assert_window_count(mpris_manager, (number + 1))
control_test = [('title', 'Never Gonna Give You Up'), ('artist', 'Rick Astley'), ('album', 'Rick Rolled')]
for (name, expected) in control_test:
(_, value) = widget.eval(f"self.extended_popup._updateable_controls['{name}'].text")
assert (value == expected)
(_, position) = widget.eval("self.extended_popup._updateable_controls['progress'].value")
assert (position == '0')
assert_is_playing(widget)
widget.eval("self.extended_popup._updateable_controls['stop'].button_press(0, 0, 1)")
assert_is_playing(widget, False) |
def upgrade():
op.drop_constraint(u'association_ticket_tag_id_fkey', 'association', type_='foreignkey')
op.drop_constraint(u'association_ticket_id_fkey', 'association', type_='foreignkey')
op.create_foreign_key(None, 'association', 'ticket_tag', ['ticket_tag_id'], ['id'], ondelete='CASCADE')
op.create_foreign_key(None, 'association', 'ticket', ['ticket_id'], ['id'], ondelete='CASCADE')
op.drop_constraint(u'booked_ticket_ticket_id_fkey', 'booked_ticket', type_='foreignkey')
op.drop_constraint(u'booked_ticket_user_id_fkey', 'booked_ticket', type_='foreignkey')
op.create_foreign_key(None, 'booked_ticket', 'user', ['user_id'], ['id'], ondelete='CASCADE')
op.create_foreign_key(None, 'booked_ticket', 'ticket', ['ticket_id'], ['id'], ondelete='CASCADE')
op.drop_constraint(u'email_notification_user_id_fkey', 'email_notification', type_='foreignkey')
op.drop_constraint(u'email_notification_event_id_fkey', 'email_notification', type_='foreignkey')
op.create_foreign_key(None, 'email_notification', 'events', ['event_id'], ['id'], ondelete='CASCADE')
op.create_foreign_key(None, 'email_notification', 'user', ['user_id'], ['id'], ondelete='CASCADE')
op.drop_constraint(u'events_creator_id_fkey', 'events', type_='foreignkey')
op.create_foreign_key(None, 'events', 'user', ['creator_id'], ['id'], ondelete='CASCADE')
op.drop_constraint(u'orders_user_id_fkey', 'orders', type_='foreignkey')
op.drop_constraint(u'orders_event_id_fkey', 'orders', type_='foreignkey')
op.drop_constraint(u'orders_marketer_id_fkey', 'orders', type_='foreignkey')
op.create_foreign_key(None, 'orders', 'user', ['marketer_id'], ['id'], ondelete='SET NULL')
op.create_foreign_key(None, 'orders', 'user', ['user_id'], ['id'], ondelete='SET NULL')
op.create_foreign_key(None, 'orders', 'events', ['event_id'], ['id'], ondelete='SET NULL')
op.drop_constraint(u'orders_tickets_ticket_id_fkey', 'orders_tickets', type_='foreignkey')
op.drop_constraint(u'orders_tickets_order_id_fkey', 'orders_tickets', type_='foreignkey')
op.create_foreign_key(None, 'orders_tickets', 'ticket', ['ticket_id'], ['id'], ondelete='CASCADE')
op.create_foreign_key(None, 'orders_tickets', 'orders', ['order_id'], ['id'], ondelete='CASCADE')
op.drop_constraint(u'ticket_holders_ticket_id_fkey', 'ticket_holders', type_='foreignkey')
op.create_foreign_key(None, 'ticket_holders', 'ticket', ['ticket_id'], ['id'], ondelete='CASCADE')
op.drop_constraint(u'ticket_tag_event_id_fkey', 'ticket_tag', type_='foreignkey')
op.create_foreign_key(None, 'ticket_tag', 'events', ['event_id'], ['id'], ondelete='CASCADE')
op.drop_constraint(u'user_system_role_user_id_fkey', 'user_system_role', type_='foreignkey')
op.drop_constraint(u'user_system_role_role_id_fkey', 'user_system_role', type_='foreignkey')
op.create_foreign_key(None, 'user_system_role', 'custom_sys_role', ['role_id'], ['id'], ondelete='CASCADE')
op.create_foreign_key(None, 'user_system_role', 'user', ['user_id'], ['id'], ondelete='CASCADE')
op.drop_constraint(u'users_events_roles_role_id_fkey', 'users_events_roles', type_='foreignkey')
op.drop_constraint(u'users_events_roles_user_id_fkey', 'users_events_roles', type_='foreignkey')
op.create_foreign_key(None, 'users_events_roles', 'role', ['role_id'], ['id'], ondelete='CASCADE')
op.create_foreign_key(None, 'users_events_roles', 'user', ['user_id'], ['id'], ondelete='CASCADE') |
class ReferenceSegmenterSemanticExtractor(ModelSemanticExtractor):
def iter_semantic_content_for_entity_blocks(self, entity_tokens: Iterable[Tuple[(str, LayoutBlock)]], **kwargs) -> Iterable[SemanticContentWrapper]:
entity_tokens = list(entity_tokens)
LOGGER.debug('entity_tokens: %s', entity_tokens)
ids_iterator = iter(iter_ids('b'))
ref: Optional[SemanticRawReference] = None
is_first_ref = True
for (name, layout_block) in entity_tokens:
if (name == '<label>'):
if (not ref):
ref = SemanticRawReference(content_id=next(ids_iterator, '?'))
ref.add_content(SemanticLabel(layout_block=layout_block))
continue
if (name == '<reference>'):
if ((not ref) and is_first_ref and (not is_looks_like_reference(layout_block))):
(yield SemanticHeading(layout_block=layout_block))
is_first_ref = False
continue
if (not ref):
ref = SemanticRawReference(content_id=next(ids_iterator, '?'))
ref.add_content(SemanticRawReferenceText(layout_block=layout_block))
(yield ref)
ref = None
is_first_ref = False
continue
(yield SemanticNote(layout_block=layout_block, note_type=name))
if ref:
(yield ref) |
def get_thermoanalysis_from_hess_h5(h5_fn, T=T_DEFAULT, p=p_DEFAULT, point_group='c1', return_geom=False):
with h5py.File(h5_fn, 'r') as handle:
masses = handle['masses'][:]
vibfreqs = handle['vibfreqs'][:]
coords3d = handle['coords3d'][:]
energy = handle.attrs['energy']
mult = handle.attrs['mult']
atoms = handle.attrs['atoms']
thermo_dict = {'masses': masses, 'wavenumbers': vibfreqs, 'coords3d': coords3d, 'scf_energy': energy, 'mult': mult}
qcd = QCData(thermo_dict, point_group=point_group)
thermo = thermochemistry(qcd, temperature=T, pressure=p)
if return_geom:
geom = Geometry(atoms=atoms, coords=coords3d)
return (thermo, geom)
else:
return thermo |
class Migration(migrations.Migration):
dependencies = [('frontend', '0056_auto__1301')]
operations = [migrations.AddField(model_name='measure', name='denominator_bnf_codes', field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=15), default=[], size=None), preserve_default=False), migrations.AddField(model_name='measure', name='denominator_bnf_codes_query', field=models.CharField(max_length=10000, null=True)), migrations.AddField(model_name='measure', name='denominator_is_list_of_bnf_codes', field=models.BooleanField(default=True))] |
class EmailNotificationList(ResourceList):
def query(self, view_kwargs):
query_ = self.session.query(EmailNotification)
if view_kwargs.get('user_id'):
user = safe_query_kwargs(User, view_kwargs, 'user_id')
query_ = query_.join(User).filter((User.id == user.id))
return query_
view_kwargs = True
methods = ['GET']
decorators = (api.has_permission('is_user_itself', fetch='user_id', model=EmailNotification),)
schema = EmailNotificationSchema
data_layer = {'session': db.session, 'model': EmailNotification, 'methods': {'query': query}} |
class OptionPlotoptionsVectorSonificationDefaultspeechoptions(Options):
def activeWhen(self) -> 'OptionPlotoptionsVectorSonificationDefaultspeechoptionsActivewhen':
return self._config_sub_data('activeWhen', OptionPlotoptionsVectorSonificationDefaultspeechoptionsActivewhen)
def language(self):
return self._config_get('en-US')
def language(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionPlotoptionsVectorSonificationDefaultspeechoptionsMapping':
return self._config_sub_data('mapping', OptionPlotoptionsVectorSonificationDefaultspeechoptionsMapping)
def pointGrouping(self) -> 'OptionPlotoptionsVectorSonificationDefaultspeechoptionsPointgrouping':
return self._config_sub_data('pointGrouping', OptionPlotoptionsVectorSonificationDefaultspeechoptionsPointgrouping)
def preferredVoice(self):
return self._config_get(None)
def preferredVoice(self, text: str):
self._config(text, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def type(self):
return self._config_get('speech')
def type(self, text: str):
self._config(text, js_type=False) |
class ProPhotoRGBLinear(sRGB):
BASE = 'xyz-d50'
NAME = 'prophoto-rgb-linear'
SERIALIZE = ('--prophoto-rgb-linear',)
WHITE = WHITES['2deg']['D50']
def to_base(self, coords: Vector) -> Vector:
return lin_prophoto_to_xyz(coords)
def from_base(self, coords: Vector) -> Vector:
return xyz_to_lin_prophoto(coords) |
def run(bench_path):
source_patch_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'patches.txt')
target_patch_file = os.path.join(os.path.abspath(bench_path), 'patches.txt')
with open(source_patch_file) as f:
patches = [p.strip() for p in f.read().splitlines() if (p.strip() and (not p.strip().startswith('#')))]
executed_patches = []
if os.path.exists(target_patch_file):
with open(target_patch_file) as f:
executed_patches = f.read().splitlines()
try:
for patch in patches:
if (patch not in executed_patches):
module = importlib.import_module(patch.split()[0])
execute = getattr(module, 'execute')
result = execute(bench_path)
if (not result):
executed_patches.append(patch)
finally:
with open(target_patch_file, 'w') as f:
f.write('\n'.join(executed_patches))
f.write('\n') |
def test_delete_stream_user_accessible_error(db, client, user, jwt):
stream = get_stream(db)
email = ''
user._email = email
AttendeeOrderSubFactory(event=stream.rooms[0].event, order__status='completed', email=email)
db.session.commit()
response = client.delete(f'/v1/video-streams/{stream.id}', content_type='application/vnd.api+json', headers=jwt)
assert (response.status_code == 403)
assert (json.loads(response.data)['errors'][0]['detail'] == "You don't have access to the provided event") |
def denumpy_all(obj):
if isinstance(obj, (list, tuple)):
return fmap(denumpy_all, obj)
elif isinstance(obj, dict):
return _coconut.dict(((denumpy_all(k), denumpy_all(v)) for (k, v) in obj.items()))
elif isnumpy(obj):
return denumpy(obj)
else:
return obj |
class OptionSonificationDefaultinstrumentoptionsMappingHighpass(Options):
def frequency(self) -> 'OptionSonificationDefaultinstrumentoptionsMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionSonificationDefaultinstrumentoptionsMappingHighpassFrequency)
def resonance(self) -> 'OptionSonificationDefaultinstrumentoptionsMappingHighpassResonance':
return self._config_sub_data('resonance', OptionSonificationDefaultinstrumentoptionsMappingHighpassResonance) |
def test():
assert ('for ent in doc.ents' in __solution__), 'Iterierst du uber die Entitaten?'
assert ('print(ent.text, ent.label_)' in __solution__), 'Druckst du den Text und das Label?'
__msg__.good('Gute Arbeit! Bisher lag das Modell jedes Mal richtig. In der nachsten Ubung siehst du was passiert, wenn das Modell einmal daneben liegt, und wie du das anpassen kannst.') |
def test_remote_list_signals(remote):
ctx = FlyteContextManager.current_context()
wfeid = WorkflowExecutionIdentifier('p', 'd', 'execid')
signal_id = SignalIdentifier(signal_id='sigid', execution_id=wfeid).to_flyte_idl()
lt = TypeEngine.to_literal_type(int)
signal = Signal(id=signal_id, type=lt.to_flyte_idl(), value=TypeEngine.to_literal(ctx, 3, int, lt).to_flyte_idl())
mock_client = MagicMock()
mock_client.list_signals.return_value = SignalList(signals=[signal], token='')
remote._client = mock_client
res = remote.list_signals('execid', 'p', 'd', limit=10)
assert (len(res) == 1) |
def _alter_columns(column_action, columns, table):
column_action = column_action.upper()
for (old_column, new_column) in columns.items():
try:
COLUMN_ACTION_MAPPING.get(column_action)(table, old_column, new_column)
except OperationalError:
LOGGER.info('Failed to update db schema, table=%s', table.name)
except Exception:
LOGGER.exception('Unexpected error happened when attempting to update database schema, table: %s', table.name) |
class OptionSeriesFunnelSonificationContexttracksMappingLowpass(Options):
def frequency(self) -> 'OptionSeriesFunnelSonificationContexttracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionSeriesFunnelSonificationContexttracksMappingLowpassFrequency)
def resonance(self) -> 'OptionSeriesFunnelSonificationContexttracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionSeriesFunnelSonificationContexttracksMappingLowpassResonance) |
_ns.route('/custom-dir/<ownername>/<dirname>/<uuid>/<package_name>/', methods=['POST'])
def webhooks_coprdir_custom(ownername, dirname, uuid, package_name):
try:
copr = CoprsLogic.get_by_ownername_and_dirname(ownername, dirname)
except ObjectNotFound:
return ('PROJECT_NOT_FOUND\n', 404)
try:
package = ComplexLogic.get_package(copr, package_name)
except ObjectNotFound:
return ('PACKAGE_NOT_FOUND\n', 404)
if (copr.webhook_secret != uuid):
return ('BAD_UUID\n', 403)
try:
copr_dir = CoprDirsLogic.get_or_create(copr, dirname)
except BadRequest:
return ('CANT_CREATE_DIRECTORY\n', 400)
except MalformedArgumentException:
return ('MALFORMED_COPR_DIRNAME\n', 400)
return custom_build_submit(copr, package, copr_dir) |
def fmap(func, obj, **kwargs):
starmap_over_mappings = kwargs.pop('starmap_over_mappings', False)
if kwargs:
raise _coconut.TypeError(('fmap() got unexpected keyword arguments ' + _coconut.repr(kwargs)))
obj_fmap = _coconut.getattr(obj, '__fmap__', None)
if (obj_fmap is not None):
try:
result = obj_fmap(func)
except _coconut.NotImplementedError:
pass
else:
if (result is not _coconut.NotImplemented):
return result
if (obj.__class__.__module__ in _coconut.jax_numpy_modules):
import jax.numpy as jnp
return jnp.vectorize(func)(obj)
if (obj.__class__.__module__ in _coconut.numpy_modules):
return _coconut.numpy.vectorize(func)(obj)
obj_aiter = _coconut.getattr(obj, '__aiter__', None)
if ((obj_aiter is not None) and (_coconut_amap is not None)):
try:
aiter = obj_aiter()
except _coconut.NotImplementedError:
pass
else:
if (aiter is not _coconut.NotImplemented):
return _coconut_amap(func, aiter)
if starmap_over_mappings:
return _coconut_base_makedata(obj.__class__, (_coconut_starmap(func, obj.items()) if _coconut.isinstance(obj, _coconut.abc.Mapping) else _coconut_map(func, obj)))
else:
return _coconut_base_makedata(obj.__class__, _coconut_map(func, (obj.items() if _coconut.isinstance(obj, _coconut.abc.Mapping) else obj))) |
class MatchableResource(object):
TASK_RESOURCE = _matchable_resource.TASK_RESOURCE
CLUSTER_RESOURCE = _matchable_resource.CLUSTER_RESOURCE
EXECUTION_QUEUE = _matchable_resource.EXECUTION_QUEUE
EXECUTION_CLUSTER_LABEL = _matchable_resource.EXECUTION_CLUSTER_LABEL
QUALITY_OF_SERVICE_SPECIFICATION = _matchable_resource.QUALITY_OF_SERVICE_SPECIFICATION
PLUGIN_OVERRIDE = _matchable_resource.PLUGIN_OVERRIDE
def enum_to_string(cls, val):
if (val == cls.TASK_RESOURCE):
return 'TASK_RESOURCE'
elif (val == cls.CLUSTER_RESOURCE):
return 'CLUSTER_RESOURCE'
elif (val == cls.EXECUTION_QUEUE):
return 'EXECUTION_QUEUE'
elif (val == cls.EXECUTION_CLUSTER_LABEL):
return 'EXECUTION_CLUSTER_LABEL'
elif (val == cls.QUALITY_OF_SERVICE_SPECIFICATION):
return 'QUALITY_OF_SERVICE_SPECIFICATION'
else:
return '<UNKNOWN>'
def string_to_enum(cls, val):
if (val == 'TASK_RESOURCE'):
return cls.TASK_RESOURCE
elif (val == 'CLUSTER_RESOURCE'):
return cls.CLUSTER_RESOURCE
elif (val == 'EXECUTION_QUEUE'):
return cls.EXECUTION_QUEUE
elif (val == 'EXECUTION_CLUSTER_LABEL'):
return cls.EXECUTION_CLUSTER_LABEL
elif (val == cls.QUALITY_OF_SERVICE_SPECIFICATION):
return 'QUALITY_OF_SERVICE_SPECIFICATION'
else:
return '<UNKNOWN>' |
class TestEjectCommands(AEATestCaseMany):
def test_eject_commands_positive(self):
agent_name = 'test_aea'
self.create_agents(agent_name)
self.set_agent_context(agent_name)
cwd = os.path.join(self.t, agent_name)
self.add_item('connection', str(GYM_CONNECTION_PUBLIC_ID))
self.add_item('skill', str(GYM_SKILL_PUBLIC_ID))
self.add_item('contract', str(ERC1155_PUBLIC_ID))
self.eject_item('skill', str(GYM_SKILL_PUBLIC_ID))
assert ('gym' not in os.listdir(os.path.join(cwd, 'vendor', 'fetchai', 'skills')))
assert ('gym' in os.listdir(os.path.join(cwd, 'skills')))
self.eject_item('connection', str(GYM_CONNECTION_PUBLIC_ID))
assert ('gym' not in os.listdir(os.path.join(cwd, 'vendor', 'fetchai', 'connections')))
assert ('gym' in os.listdir(os.path.join(cwd, 'connections')))
self.eject_item('protocol', str(GymMessage.protocol_id))
assert ('gym' not in os.listdir(os.path.join(cwd, 'vendor', 'fetchai', 'protocols')))
assert ('gym' in os.listdir(os.path.join(cwd, 'protocols')))
self.eject_item('contract', str(ERC1155_PUBLIC_ID))
assert ('erc1155' not in os.listdir(os.path.join(cwd, 'vendor', 'fetchai', 'contracts')))
assert ('erc1155' in os.listdir(os.path.join(cwd, 'contracts'))) |
def pytest_runtest_setup(item):
is_unit = item.module.__name__.startswith('tests.unit')
is_integration = item.module.__name__.startswith('tests.integration')
mode = item.config.getoption('--mode')
if ((mode == 'unit') and (not is_unit)):
pytest.skip('test is a unit test', allow_module_level=True)
elif ((mode == 'integration') and (not is_integration)):
pytest.skip('test is an integration test', allow_module_level=True)
if is_integration:
item.fixturenames.append('poll_bot') |
def make_rst(inpath, outpath):
with open(inpath, 'r') as infile:
with open(outpath, 'w+') as outfile:
base = os.path.basename(inpath).partition('.')[0]
title = ('%s' % base.replace('_', ' ').title())
outfile.write(((((title + '\n') + ('=' * len(title))) + ('\n' * 2)) + '::\n\n'))
for line in infile:
outfile.write(((' ' * 4) + line)) |
def test_defaults():
config = '\n '
r = helm_template(config)
assert (name in r['daemonset'])
assert ((name + '-metrics') in r['deployment'])
assert (kube_state_metric_name in r['deployment'])
assert (r['deployment'][(name + '-metrics')]['spec']['template']['spec']['containers'][0]['env'][1]['value'] == '$(RELEASE_NAME_KUBE_STATE_METRICS_SERVICE_HOST):$(RELEASE_NAME_KUBE_STATE_METRICS_SERVICE_PORT_HTTP)')
c = r['daemonset'][name]['spec']['template']['spec']['containers'][0]
assert (c['name'] == project)
assert c['image'].startswith((('docker.elastic.co/beats/' + project) + ':'))
assert (c['env'][0]['name'] == 'POD_NAMESPACE')
assert (c['env'][0]['valueFrom']['fieldRef']['fieldPath'] == 'metadata.namespace')
assert ('curl --fail 127.0.0.1:5066' in c['livenessProbe']['exec']['command'][(- 1)])
assert ('metricbeat test output' in c['readinessProbe']['exec']['command'][(- 1)])
assert (r['daemonset'][name]['spec']['template']['spec']['tolerations'] == [])
assert ('hostNetwork' not in r['daemonset'][name]['spec']['template']['spec'])
assert ('dnsPolicy' not in r['daemonset'][name]['spec']['template']['spec'])
assert ('hostNetwork' not in r['deployment'][(name + '-metrics')]['spec']['template']['spec'])
assert ('dnsPolicy' not in r['deployment'][(name + '-metrics')]['spec']['template']['spec'])
assert (r['deployment'][(name + '-metrics')]['spec']['template']['spec']['tolerations'] == [])
assert (r['daemonset'][name]['spec']['template']['spec']['containers'][0]['securityContext']['runAsUser'] == 0)
assert (r['daemonset'][name]['spec']['template']['spec']['containers'][0]['securityContext']['privileged'] == False)
assert (r['deployment'][(name + '-metrics')]['spec']['template']['spec']['containers'][0]['securityContext']['runAsUser'] == 0)
assert (r['deployment'][(name + '-metrics')]['spec']['template']['spec']['containers'][0]['securityContext']['privileged'] == False)
assert ('imagePullSecrets' not in r['daemonset'][name]['spec']['template']['spec'])
assert (r['daemonset'][name]['spec']['updateStrategy']['type'] == 'RollingUpdate')
assert (r['daemonset'][name]['spec']['template']['spec']['serviceAccountName'] == name)
cfg = r['configmap']
assert ((name + '-config') not in cfg)
assert ((name + '-daemonset-config') in cfg)
assert ((name + '-deployment-config') in cfg)
assert ('metricbeat.yml' in cfg[(name + '-daemonset-config')]['data'])
assert ('metricbeat.yml' in cfg[(name + '-deployment-config')]['data'])
assert ('module: system' in cfg[(name + '-daemonset-config')]['data']['metricbeat.yml'])
assert ('module: system' not in cfg[(name + '-deployment-config')]['data']['metricbeat.yml'])
assert ('state_pod' not in cfg[(name + '-daemonset-config')]['data']['metricbeat.yml'])
assert ('state_pod' in cfg[(name + '-deployment-config')]['data']['metricbeat.yml'])
daemonset = r['daemonset'][name]['spec']['template']['spec']
assert ({'configMap': {'name': (name + '-config'), 'defaultMode': 384}, 'name': (project + '-config')} not in daemonset['volumes'])
assert ({'configMap': {'name': (name + '-daemonset-config'), 'defaultMode': 384}, 'name': (project + '-config')} in daemonset['volumes'])
assert ({'name': 'data', 'hostPath': {'path': (('/var/lib/' + name) + '-default-data'), 'type': 'DirectoryOrCreate'}} in daemonset['volumes'])
assert ({'mountPath': '/usr/share/metricbeat/metricbeat.yml', 'name': (project + '-config'), 'subPath': 'metricbeat.yml', 'readOnly': True} in daemonset['containers'][0]['volumeMounts'])
deployment = r['deployment'][(name + '-metrics')]['spec']['template']['spec']
assert ({'configMap': {'name': (name + '-config'), 'defaultMode': 384}, 'name': (project + '-config')} not in deployment['volumes'])
assert ({'configMap': {'name': (name + '-deployment-config'), 'defaultMode': 384}, 'name': (project + '-config')} in deployment['volumes'])
assert ({'mountPath': '/usr/share/metricbeat/metricbeat.yml', 'name': (project + '-config'), 'subPath': 'metricbeat.yml', 'readOnly': True} in deployment['containers'][0]['volumeMounts'])
assert (daemonset['containers'][0]['resources'] == {'requests': {'cpu': '100m', 'memory': '100Mi'}, 'limits': {'cpu': '1000m', 'memory': '200Mi'}})
assert (deployment['containers'][0]['resources'] == {'requests': {'cpu': '100m', 'memory': '100Mi'}, 'limits': {'cpu': '1000m', 'memory': '200Mi'}})
assert ('hostAliases' not in r['daemonset'][name]['spec']['template']['spec'])
assert ('hostAliases' not in r['deployment'][(name + '-metrics')]['spec']['template']['spec']) |
class FFTShift(Computation):
def __init__(self, arr_t, axes=None):
Computation.__init__(self, [Parameter('output', Annotation(arr_t, 'o')), Parameter('input', Annotation(arr_t, 'i')), Parameter('inverse', Annotation(numpy.int32), default=0)])
if (axes is None):
axes = tuple(range(len(arr_t.shape)))
else:
axes = tuple(axes)
self._axes = axes
def _build_trivial_plan(self, plan_factory, output, input_):
plan = plan_factory()
copy_trf = copy(input_, out_arr_t=output)
copy_comp = PureParallel.from_trf(copy_trf, copy_trf.input)
plan.computation_call(copy_comp, output, input_)
return plan
def _build_plan(self, plan_factory, device_params, output, input_, inverse):
if (helpers.product([input_.shape[i] for i in self._axes]) == 1):
return self._build_trivial_plan(plan_factory, output, input_)
plan = plan_factory()
axes = tuple(sorted(self._axes))
shape = list(input_.shape)
if all((((shape[axis] % 2) == 0) for axis in axes)):
shape[axes[0]] //= 2
plan.kernel_call(TEMPLATE.get_def('fftshift_inplace'), [output, input_], kernel_name='kernel_fftshift_inplace', global_size=shape, render_kwds=dict(axes=axes))
else:
temp = plan.temp_array_like(output)
plan.kernel_call(TEMPLATE.get_def('fftshift_outplace'), [temp, input_, inverse], kernel_name='kernel_fftshift_outplace', global_size=shape, render_kwds=dict(axes=axes))
copy_trf = copy(input_, out_arr_t=output)
copy_comp = PureParallel.from_trf(copy_trf, copy_trf.input)
plan.computation_call(copy_comp, output, temp)
return plan |
class LittleSistersEssayTest(unittest.TestCase):
.task(taskno=1)
def test_capitalize_word(self):
actual_result = capitalize_title('canopy')
expected = 'Canopy'
error_message = f'Called capitalize_title("canopy"). The function returned "{actual_result}", but the tests expected "{expected}" for the title.'
self.assertEqual(actual_result, expected, msg=error_message)
.task(taskno=1)
def test_capitalize_title(self):
actual_result = capitalize_title('fish are cold blooded')
expected = 'Fish Are Cold Blooded'
error_message = f'Called capitalize_title("fish are cold blooded"). The function returned "{actual_result}", but the tests expected "{expected}" for the title.'
self.assertEqual(actual_result, expected, msg=error_message)
.task(taskno=2)
def test_sentence_ending(self):
actual_result = check_sentence_ending('Snails can sleep for 3 years.')
expected = True
error_message = f'Called check_sentence_ending("Snails can sleep for 3 years."). The function returned {actual_result}, but the tests expected {expected} for a period ending.'
self.assertEqual(actual_result, expected, msg=error_message)
.task(taskno=2)
def test_sentence_ending_without_period(self):
actual_result = check_sentence_ending('Fittonia are nice')
expected = False
error_message = f'Called check_sentence_ending("Fittonia are nice"). The function returned {actual_result}, but the tests expected {expected} for a period ending.'
self.assertEqual(actual_result, expected, msg=error_message)
.task(taskno=3)
def test_remove_extra_spaces_only_start(self):
actual_result = clean_up_spacing(' A rolling stone gathers no moss')
expected = 'A rolling stone gathers no moss'
error_message = f'Called clean_up_spacing(" A rolling stone gathers no moss"). The function returned "{actual_result}", but the tests expected "{expected}" as a cleaned string.'
self.assertEqual(actual_result, expected, msg=error_message)
.task(taskno=3)
def test_remove_extra_spaces(self):
actual_result = clean_up_spacing(" Elephants can't jump. ")
expected = "Elephants can't jump."
error_message = f"""Called clean_up_spacing(" Elephants can't jump. ")The function returned "{actual_result}", but the tests expected "{expected}" as a cleaned string."""
self.assertEqual(actual_result, expected, msg=error_message)
.task(taskno=4)
def test_replace_word_choice(self):
actual_result = replace_word_choice('Animals are cool.', 'cool', 'awesome')
expected = 'Animals are awesome.'
error_message = f'Called replace_word_choice("Animals are cool.", "cool", "awesome"). The function returned "{actual_result}", but the tests expected "{expected}" after the word replacement.'
self.assertEqual(actual_result, expected, msg=error_message)
.task(taskno=4)
def test_replace_word_not_exist(self):
actual_result = replace_word_choice('Animals are cool.', 'small', 'tiny')
expected = 'Animals are cool.'
error_message = f'Called replace_word_choice("Animals are cool.", "small", "tiny"). The function returned "{actual_result}", but the tests expected "{expected}", because the word to be replaced is not in the sentence.'
self.assertEqual(actual_result, expected, msg=error_message) |
class Pause(AbstractCommand):
def setup(self, subparsers):
parser = subparsers.add_parser('pause', help='Pause a cluster by shutting down existing VMs, retaining disks and configuration.')
parser.set_defaults(func=self)
parser.add_argument('cluster', help='name of the cluster')
parser.add_argument('--yes', action='store_true', default=False, help='Assume `yes` to all queries and do not prompt.')
def execute(self):
cluster_name = self.params.cluster
creator = make_creator(self.params.config, storage_path=self.params.storage)
try:
cluster = creator.load_cluster(cluster_name)
except (ClusterNotFound, ConfigurationError) as e:
log.error('Cannot load cluster `%s`: %s', cluster_name, e)
return os.EX_NOINPUT
if (not self.params.yes):
confirm_or_abort('Do you want really want to pause cluster `{cluster_name}`?'.format(cluster_name=cluster_name), msg='Aborting upon user request.')
print(('Pausing cluster `%s` ...' % cluster_name))
cluster.pause() |
def test_regression():
assert proves(expr('FA x, ~P(x, x)'), expr('~FA u, FA v, P(g(f(v)), g(u))'))
e1 = expr('z,x, ((Q(z)Q(x)))')
e2 = expr('z,x, ((Q(x)Q(z)))')
assert (not strict_proves(e1, e2))
assert strict_proves(e2, e1)
e3 = expr('y,z,x, (Q(x,z)->Q(y,x))')
e4 = expr('y,x, ((R(x,z)R(x))->R(y))')
assert strict_proves(e3, e4)
assert (not strict_proves(e4, e3))
assert (expr('Q(?x) & P(?y)').find_unification(expr('Q(a) & P(b)')) == _coconut.dict(((Var('x'), Const('a')), (Var('y'), Const('b'))))) |
def test_skipkeys():
pl = {42: 'aNumber', 'snake': 'aWord'}
data = plistlib.dumps(pl, skipkeys=True, sort_keys=False)
pl2 = plistlib.loads(data)
assert (pl2 == {'snake': 'aWord'})
fp = BytesIO()
plistlib.dump(pl, fp, skipkeys=True, sort_keys=False)
data = fp.getvalue()
pl2 = plistlib.loads(fp.getvalue())
assert (pl2 == {'snake': 'aWord'}) |
class Strategy():
def __init__(self, symbol, capital, start, end):
self.symbol = symbol
self.capital = capital
self.start = start
self.end = end
self.ts = None
self.rlog = None
self.tlog = None
self.dbal = None
self.stats = None
def _algo(self):
pf.TradeLog.cash = capital
for (i, row) in enumerate(self.ts.itertuples()):
date = row.Index.to_pydatetime()
end_flag = pf.is_last_row(self.ts, i)
if (self.tlog.shares == 0):
if ((row.month == 11) and row.first_dotm):
self.tlog.buy(date, row.close)
elif (((row.month == 5) and row.first_dotm) or end_flag):
self.tlog.sell(date, row.close)
self.dbal.append(date, row.close)
def run(self):
self.ts = pf.fetch_timeseries(self.symbol)
self.ts = pf.select_tradeperiod(self.ts, self.start, self.end, use_adj=True)
self.ts = pf.calendar(self.ts)
(self.ts, self.start) = pf.finalize_timeseries(self.ts, self.start, dropna=True, drop_columns=['open', 'high', 'low'])
self.tlog = pf.TradeLog(symbol)
self.dbal = pf.DailyBal()
self._algo()
self._get_logs()
self._get_stats()
def _get_logs(self):
self.rlog = self.tlog.get_log_raw()
self.tlog = self.tlog.get_log()
self.dbal = self.dbal.get_log(self.tlog)
def _get_stats(self):
s.stats = pf.stats(self.ts, self.tlog, self.dbal, self.capital) |
class OptionSeriesColumnpyramidDatalabels(Options):
def align(self):
return self._config_get('undefined')
def align(self, text: str):
self._config(text, js_type=False)
def allowOverlap(self):
return self._config_get(False)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self) -> 'OptionSeriesColumnpyramidDatalabelsAnimation':
return self._config_sub_data('animation', OptionSeriesColumnpyramidDatalabelsAnimation)
def backgroundColor(self):
return self._config_get(None)
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(0)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def crop(self):
return self._config_get(True)
def crop(self, flag: bool):
self._config(flag, js_type=False)
def defer(self):
return self._config_get(True)
def defer(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def filter(self) -> 'OptionSeriesColumnpyramidDatalabelsFilter':
return self._config_sub_data('filter', OptionSeriesColumnpyramidDatalabelsFilter)
def format(self):
return self._config_get('point.value')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get(None)
def formatter(self, value: Any):
self._config(value, js_type=False)
def inside(self):
return self._config_get(None)
def inside(self, flag: bool):
self._config(flag, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, flag: bool):
self._config(flag, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def overflow(self):
return self._config_get('justify')
def overflow(self, text: str):
self._config(text, js_type=False)
def padding(self):
return self._config_get(5)
def padding(self, num: float):
self._config(num, js_type=False)
def position(self):
return self._config_get('center')
def position(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def shape(self):
return self._config_get('square')
def shape(self, text: str):
self._config(text, js_type=False)
def style(self):
return self._config_get(None)
def style(self, value: Any):
self._config(value, js_type=False)
def textPath(self) -> 'OptionSeriesColumnpyramidDatalabelsTextpath':
return self._config_sub_data('textPath', OptionSeriesColumnpyramidDatalabelsTextpath)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('undefined')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get('undefined')
def y(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(6)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def parse_predef_search_cmd(cmd: str, editor: aqt.editor.Editor):
if (not check_index()):
return
index = get_index()
cmd = ' '.join(cmd.split()[1:])
stype = cmd.split(' ')[0]
limit = int(cmd.split(' ')[1])
decks = cmd.split(' ')[2:]
stamp = set_stamp()
index.lastSearch = (None, decks, stype, limit)
if (stype == 'lowestPerf'):
res = findNotesWithLowestPerformance(decks, limit, index.pinned)
elif (stype == 'highestPerf'):
res = findNotesWithHighestPerformance(decks, limit, index.pinned)
elif (stype == 'lastAdded'):
res = get_notes_by_created_date(index, editor, decks, limit, 'desc')
elif (stype == 'firstAdded'):
res = get_notes_by_created_date(index, editor, decks, limit, 'asc')
elif (stype == 'lastModified'):
res = get_last_modified_notes(index, decks, limit)
elif (stype == 'lowestRet'):
res = findNotesWithLowestPerformance(decks, limit, index.pinned, retOnly=True)
elif (stype == 'highestRet'):
res = findNotesWithHighestPerformance(decks, limit, index.pinned, retOnly=True)
elif (stype == 'longestText'):
res = findNotesWithLongestText(decks, limit, index.pinned)
elif (stype == 'randomUntagged'):
res = getRandomUntagged(decks, limit)
elif (stype == 'lastUntagged'):
res = get_last_untagged(decks, limit)
elif (stype == 'highestInterval'):
res = getSortedByInterval(decks, limit, index.pinned, 'desc')
elif (stype == 'lowestInterval'):
res = getSortedByInterval(decks, limit, index.pinned, 'asc')
elif (stype == 'lastReviewed'):
res = getLastReviewed(decks, limit)
elif (stype == 'lastLapses'):
res = getLastLapses(decks, limit)
elif (stype == 'longestTime'):
res = getByTimeTaken(decks, limit, 'desc')
elif (stype == 'shortestTime'):
res = getByTimeTaken(decks, limit, 'asc')
UI.print_search_results(['Anki', 'Predef. search', stype], res, stamp) |
('cuda.perm021fc_ccr_bias_permute.func_decl')
def gen_function_decl(func_attrs):
func_name = func_attrs['name']
input_ndims = len(func_attrs['input_accessors'][0].original_shapes)
weight_ndims = len(func_attrs['input_accessors'][1].original_shapes)
return common_bias.FUNC_DECL_TEMPLATE.render(func_name=func_name, input_ndims=input_ndims, weight_ndims=weight_ndims) |
def get_business_category_display_names(business_category_list):
business_category_display_name_list = []
for business_category in business_category_list:
display_name = BUSINESS_CATEGORIES_LOOKUP_DICT.get(business_category)
if display_name:
business_category_display_name_list.append(display_name)
return business_category_display_name_list |
class AlertPkt(object):
_ALERTMSG_PACK_STR = '!256s'
_ALERTPKT_PART_PACK_STR = '!IIIII65535s'
_ALERTPKT_SIZE = 65863
def __init__(self, alertmsg, pkth, dlthdr, nethdr, transhdr, data, val, pkt, event):
self.alertmsg = alertmsg
self.pkth = pkth
self.dlthdr = dlthdr
self.nethdr = nethdr
self.transhdr = transhdr
self.data = data
self.val = val
self.pkt = pkt
self.event = event
def parser(cls, buf):
alertmsg = struct.unpack_from(cls._ALERTMSG_PACK_STR, buf)
offset = calcsize(cls._ALERTMSG_PACK_STR)
pkth = PcapPktHdr32.parser(buf, offset)
offset += PcapPktHdr32._SIZE
(dlthdr, nethdr, transhdr, data, val, pkt) = struct.unpack_from(cls._ALERTPKT_PART_PACK_STR, buf, offset)
offset += calcsize(cls._ALERTPKT_PART_PACK_STR)
event = Event.parser(buf, offset)
msg = cls(alertmsg, pkth, dlthdr, nethdr, transhdr, data, val, pkt, event)
return msg |
class LLMAgentBase():
def __init__(self, api_key, model_name):
self.api_key = api_key
self.model_name = model_name
self.prompt_tpl = None
self.llm = None
self.llmchain = None
def _init_prompt(self, prompt=None):
prompt_tpl = PromptTemplate(input_variables=['content'], template=prompt)
print(f'Initialized prompt: {prompt_tpl}')
self.prompt_tpl = prompt_tpl
def init_llm(self, provider=None, model_name=None, temperature=0, create_default_chain=True):
provider = (provider or os.getenv('LLM_PROVIDER', 'openai'))
llm = None
if (provider == 'openai'):
model_name = (model_name or os.getenv('OPENAI_MODEL', 'gpt-3.5-turbo'))
llm = ChatOpenAI(model_name=model_name, temperature=temperature)
elif (provider == 'google'):
model_name = (model_name or os.getenv('GOOGLE_MODEL', 'gemini-pro'))
llm = ChatGoogleGenerativeAI(model=model_name, temperature=temperature)
else:
print(f'[ERROR] Non-supported LLM provider: {provider}')
raise
self.llm = llm
if create_default_chain:
self.llmchain = LLMChain(llm=self.llm, prompt=self.prompt_tpl)
print(f'LLM chain initalized, provider: {provider}, model_name: {model_name}, temperature: {temperature}')
def get_num_tokens(self, text):
return self.llm.get_num_tokens(text) |
def test_regression_cv_3_and_r2(load_diabetes_dataset):
(X, y) = load_diabetes_dataset
sel = SelectByShuffling(estimator=LinearRegression(), scoring='r2', cv=3, threshold=0.05, random_state=1)
sel.fit(X, y)
Xtransformed = X[[1, 2, 3, 4, 5, 8]].copy()
assert (sel.cv == 3)
assert (sel.scoring == 'r2')
assert (sel.threshold == 0.05)
assert (np.round(sel.initial_model_performance_, 3) == 0.489)
assert (sel.features_to_drop_ == [0, 6, 7, 9])
pd.testing.assert_frame_equal(sel.transform(X), Xtransformed) |
def replace_name(cat_id: int, column: str, data: int, talent_names: list[list[str]], new_data: dict[(Any, Any)]) -> dict[(str, Any)]:
new_data[cat_id][column] = data
if (('textID' in column) or ('tFxtID_F' in column)):
new_data[cat_id][column] = talent_names[data][1]
stop_at = '<br>'
if (stop_at in new_data[cat_id][column]):
index = new_data[cat_id][column].index(stop_at)
new_data[cat_id][column] = new_data[cat_id][column][:index]
return new_data |
class XmlFilter(filters.Filter):
default_capture = ['*|*']
break_tags = set()
def __init__(self, options, default_encoding='utf-8'):
self.user_break_tags = set()
super().__init__(options, default_encoding)
def get_default_config(self):
return {'comments': True, 'attributes': [], 'break_tags': [], 'ignores': [], 'captures': self.default_capture, 'namespaces': {}}
def setup(self):
self.user_break_tags = set(self.config['break_tags'])
self.comments = self.config['comments']
self.attributes = set(self.config['attributes'])
self.parser = 'xml'
self.type = 'xml'
ignores = ','.join(self.config['ignores'])
self.ignores = (sv.compile(ignores, self.config['namespaces']) if ignores.strip() else None)
captures = ','.join(self.config['captures'])
self.captures = (sv.compile(captures, self.config['namespaces']) if captures.strip() else None)
def _has_xml_encode(self, content):
encode = None
m = RE_XML_START.match(content)
if m:
if m.group(1):
m2 = RE_XML_ENCODE.match(m.group(1))
if m2:
enc = m2.group(2).decode('ascii')
try:
codecs.getencoder(enc)
encode = enc
except LookupError:
pass
else:
if m.group(2):
enc = 'utf-32-be'
text = m.group(2)
elif m.group(3):
enc = 'utf-32-le'
text = m.group(3)
elif m.group(4):
enc = 'utf-16-be'
text = m.group(4)
elif m.group(5):
enc = 'utf-16-le'
text = m.group(5)
try:
m2 = RE_XML_ENCODE_U.match(text.decode(enc))
except Exception:
m2 = None
if m2:
enc = m2.group(2)
try:
codecs.getencoder(enc)
encode = enc
except Exception:
pass
return encode
def header_check(self, content):
return self._has_xml_encode(content)
def is_break_tag(self, el):
name = el.name
return ((name in self.break_tags) or (name in self.user_break_tags))
def format_blocks(self):
block_text = []
for (el, text) in self._block_text.items():
content = ''.join(text)
if content:
block_text.append((content, self.construct_selector(el)))
return block_text
def construct_selector(self, el, attr=''):
selector = deque()
ancestor = el
while (ancestor and ancestor.parent):
if (ancestor is not el):
selector.appendleft(ancestor.name)
else:
tag = ancestor.name
prefix = ancestor.prefix
sel = ''
if prefix:
sel += (prefix + '|')
sel = tag
if attr:
sel += ('[%s]' % attr)
selector.appendleft(sel)
ancestor = ancestor.parent
return '>'.join(selector)
def extract_tag_metadata(self, el):
def reset(self):
def get_last_descendant(self, node):
if (node.next_sibling is not None):
last_descendant = node.next_sibling
else:
last_child = node
while (isinstance(last_child, bs4.Tag) and last_child.contents):
last_child = last_child.contents[(- 1)]
last_descendant = last_child.next_element
return last_descendant
def extract_attributes(self, node):
for attr in self.attributes:
value = node.attrs.get(attr, '').strip()
if value:
sel = self.construct_selector(node, attr=attr)
self._attributes.append((value, sel))
def extract_string(self, node, is_comments):
string = str(node).strip()
if string:
if is_comments:
sel = (self.construct_selector(node.parent) + '<!--comment-->')
self._comments.append((string, sel))
else:
self._block_text[self._current_block].append(string)
self._block_text[self._current_block].append(' ')
def pop_block(self, node, force=False):
while (self._block_stack and (node is self._block_stack[(- 1)][1])):
self._block_stack.pop((- 1))
self._current_block = self._block_stack[(- 1)][0]
def set_block(self, node, force=False):
self.pop_block(node, force)
if (force or self.is_break_tag(node)):
self._block_stack.append((node, self.get_last_descendant(node)))
self._block_text[node] = []
self._current_block = node
def to_text(self, root):
last_capture = None
last_capture_value = False
next_good = None
self._attributes = []
self._comments = []
self._block_text = OrderedDict()
self._block_stack = []
self.set_block(root, force=True)
self.extract_tag_metadata(root)
if (not (self.ignores.match(root) if self.ignores else None)):
capture = (self.captures.match(root) if (self.captures is not None) else None)
last_capture = root
last_capture_value = capture
if capture:
self.extract_attributes(root)
for node in root.descendants:
if (next_good is not None):
if (node is not next_good):
if (self.comments and isinstance(node, bs4.Comment)):
self.extract_string(node, True)
continue
next_good = None
if isinstance(node, bs4.Tag):
self.extract_tag_metadata(node)
self.set_block(node)
if (not (self.ignores.match(node) if self.ignores else None)):
capture = (self.captures.match(node) if (self.captures is not None) else None)
last_capture = node
last_capture_value = capture
if capture:
self.extract_attributes(node)
else:
next_good = self.get_last_descendant(node)
if (next_good is None):
break
else:
self.pop_block(node)
is_comments = isinstance(node, bs4.Comment)
if ((self.comments and is_comments) or ((not is_comments) and (not isinstance(node, NON_CONTENT)))):
parent = node.parent
if is_comments:
capture = True
elif (parent is last_capture):
capture = last_capture_value
elif (not (self.captures.match(parent) if (self.captures is not None) else None)):
capture = (self.captures.match(parent) if (self.captures is not None) else None)
last_capture = parent
last_capture_value = capture
if capture:
self.extract_string(node, is_comments)
elif self.comments:
for node in root.descendants:
if isinstance(node, bs4.Comment):
self.extract_string(node, True)
return (self.format_blocks(), self._attributes, self._comments)
def _filter(self, text, context, encoding):
content = []
(blocks, attributes, comments) = self.to_text(bs4.BeautifulSoup(text, self.parser))
if self.comments:
for (c, desc) in comments:
content.append(filters.SourceText(c, ((context + ': ') + desc), encoding, (self.type + 'comment')))
if self.attributes:
for (a, desc) in attributes:
content.append(filters.SourceText(a, ((context + ': ') + desc), encoding, (self.type + 'attribute')))
for (b, desc) in blocks:
content.append(filters.SourceText(b, ((context + ': ') + desc), encoding, (self.type + 'content')))
return content
def filter(self, source_file, encoding):
with codecs.open(source_file, 'r', encoding=encoding) as f:
text = f.read()
return self._filter(text, source_file, encoding)
def sfilter(self, source):
return self._filter(source.text, source.context, source.encoding) |
_os(*metadata.platforms)
def main():
powershell = 'C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe'
temposh = 'C:\\Users\\Public\\posh.exe'
binary = 'C:\\Users\\Public\\winword.exe'
common.copy_file(powershell, binary)
common.log('Dropping executable using fake winword')
common.execute([binary, '/c', f'Copy-Item {powershell} {temposh}'], timeout=10)
common.log('Executing it using fake winword')
common.execute([binary, '/c', temposh], kill=True)
common.remove_files(binary, temposh) |
class TopicPollVoteView(PermissionRequiredMixin, UpdateView):
form_class = TopicPollVoteForm
= ['post']
model = TopicPoll
def get_form_kwargs(self):
kwargs = super(ModelFormMixin, self).get_form_kwargs()
kwargs['poll'] = self.object
return kwargs
def form_valid(self, form):
user_kwargs = ({'voter': self.request.user} if self.request.user.is_authenticated else {'anonymous_key': self.request.user.forum_key})
if self.object.user_changes:
TopicPollVote.objects.filter(poll_option__poll=self.object, **user_kwargs).delete()
options = form.cleaned_data['options']
for option in options:
TopicPollVote.objects.create(poll_option=option, **user_kwargs)
return HttpResponseRedirect(self.get_success_url())
def form_invalid(self, form):
messages.error(self.request, form.errors[NON_FIELD_ERRORS])
return redirect(reverse('forum_conversation:topic', kwargs={'forum_slug': self.object.topic.forum.slug, 'forum_pk': self.object.topic.forum.pk, 'slug': self.object.topic.slug, 'pk': self.object.topic.pk}))
def get_success_url(self):
messages.success(self.request, _('Your vote has been cast.'))
return reverse('forum_conversation:topic', kwargs={'forum_slug': self.object.topic.forum.slug, 'forum_pk': self.object.topic.forum.pk, 'slug': self.object.topic.slug, 'pk': self.object.topic.pk})
def get_controlled_object(self):
return self.get_object()
def perform_permissions_check(self, user, obj, perms):
return self.request.forum_permission_handler.can_vote_in_poll(obj, user) |
def moss():
max_len = 0
max_ans = ''
datasets_name = sys._getframe().f_code.co_name
datasets_dir = './{}/'.format(datasets_name)
writer = open('./collect_datasets/{}.txt'.format(datasets_name), 'w')
base_dir_list = os.listdir(datasets_dir)
for base_dir in base_dir_list:
base_dir = (datasets_dir + base_dir)
print(base_dir)
fin = open(base_dir)
for lines in tqdm(fin):
lines = json.loads(lines)
print(type(lines))
print(len(lines))
exit()
prompt = line['instruction']
ans = ((line['context'] + ' ') + line['response'])
if (ans == 'nan'):
continue
if (len(str(ans)) > max_len):
max_len = len(ans)
max_ans = ans
item = {'prompt': prompt, 'output': ans, 'source': ((datasets_name + ':') + line['category'])}
item = json.dumps(item, ensure_ascii=False)
writer.write((item + '\n'))
print('max_len:', max_len)
print(max_ans) |
def ensure_refresh_token(f):
(f)
def wrapper(cls, root, info, refresh_token=None, *args, **kwargs):
if (refresh_token is None):
refresh_token = info.context.COOKIES.get(jwt_settings.JWT_REFRESH_TOKEN_COOKIE_NAME)
if (refresh_token is None):
raise exceptions.JSONWebTokenError(_('Refresh token is required'))
return f(cls, root, info, refresh_token, *args, **kwargs)
return wrapper |
_ExtendedCommunity.register_type(_ExtendedCommunity.FLOWSPEC_TPID_ACTION)
class BGPFlowSpecTPIDActionCommunity(_ExtendedCommunity):
_VALUE_PACK_STR = '!BHHH'
_VALUE_FIELDS = ['subtype', 'actions', 'tpid_1', 'tpid_2']
ACTION_NAME = 'tpid_action'
TI = (1 << 15)
TO = (1 << 14)
def __init__(self, **kwargs):
super(BGPFlowSpecTPIDActionCommunity, self).__init__()
kwargs['subtype'] = self.SUBTYPE_FLOWSPEC_TPID_ACTION
self.do_init(BGPFlowSpecTPIDActionCommunity, self, kwargs)
def parse_value(cls, buf):
(subtype, actions, tpid_1, tpid_2) = struct.unpack_from(cls._VALUE_PACK_STR, buf)
return {'subtype': subtype, 'actions': actions, 'tpid_1': tpid_1, 'tpid_2': tpid_2}
def serialize_value(self):
return struct.pack(self._VALUE_PACK_STR, self.subtype, self.actions, self.tpid_1, self.tpid_2) |
def _collect_security_dependencies(dependant: Dependant, dependency_callable: Callable) -> Iterable[Tuple[(Dependant, int)]]:
for (i, dep) in enumerate(dependant.dependencies):
if (dep.call == dependency_callable):
(yield (dependant, i))
continue
(yield from _collect_security_dependencies(dep, dependency_callable)) |
class RWLockFairD(RWLockableD):
def __init__(self, lock_factory: Callable[([], Lockable)]=threading.Lock, time_source: Callable[([], float)]=time.perf_counter) -> None:
self.v_read_count: int = 0
self.c_time_source = time_source
self.c_lock_read_count = lock_factory()
self.c_lock_read = lock_factory()
self.c_lock_write = lock_factory()
class _aReader(Lockable):
def __init__(self, p_RWLock: 'RWLockFairD') -> None:
self.c_rw_lock = p_RWLock
self.v_locked: bool = False
def acquire(self, blocking: bool=True, timeout: float=(- 1)) -> bool:
p_timeout = (None if (blocking and (timeout < 0)) else (timeout if blocking else 0))
c_deadline = (None if (p_timeout is None) else (self.c_rw_lock.c_time_source() + p_timeout))
if (not self.c_rw_lock.c_lock_read.acquire(blocking=True, timeout=((- 1) if (c_deadline is None) else max(0, (c_deadline - self.c_rw_lock.c_time_source()))))):
return False
if (not self.c_rw_lock.c_lock_read_count.acquire(blocking=True, timeout=((- 1) if (c_deadline is None) else max(0, (c_deadline - self.c_rw_lock.c_time_source()))))):
self.c_rw_lock.c_lock_read.release()
return False
self.c_rw_lock.v_read_count += 1
if (1 == self.c_rw_lock.v_read_count):
if (not self.c_rw_lock.c_lock_write.acquire(blocking=True, timeout=((- 1) if (c_deadline is None) else max(0, (c_deadline - self.c_rw_lock.c_time_source()))))):
self.c_rw_lock.v_read_count -= 1
self.c_rw_lock.c_lock_read_count.release()
self.c_rw_lock.c_lock_read.release()
return False
self.c_rw_lock.c_lock_read_count.release()
self.c_rw_lock.c_lock_read.release()
self.v_locked = True
return True
def release(self) -> None:
if (not self.v_locked):
raise RELEASE_ERR_CLS(RELEASE_ERR_MSG)
self.v_locked = False
self.c_rw_lock.c_lock_read_count.acquire()
self.c_rw_lock.v_read_count -= 1
if (0 == self.c_rw_lock.v_read_count):
self.c_rw_lock.c_lock_write.release()
self.c_rw_lock.c_lock_read_count.release()
def locked(self) -> bool:
return self.v_locked
class _aWriter(LockableD):
def __init__(self, p_RWLock: 'RWLockFairD') -> None:
self.c_rw_lock = p_RWLock
self.v_locked: bool = False
def acquire(self, blocking: bool=True, timeout: float=(- 1)) -> bool:
p_timeout = (None if (blocking and (timeout < 0)) else (timeout if blocking else 0))
c_deadline = (None if (p_timeout is None) else (self.c_rw_lock.c_time_source() + p_timeout))
if (not self.c_rw_lock.c_lock_read.acquire(blocking=True, timeout=((- 1) if (c_deadline is None) else max(0, (c_deadline - self.c_rw_lock.c_time_source()))))):
return False
if (not self.c_rw_lock.c_lock_write.acquire(blocking=True, timeout=((- 1) if (c_deadline is None) else max(0, (c_deadline - self.c_rw_lock.c_time_source()))))):
self.c_rw_lock.c_lock_read.release()
return False
self.v_locked = True
return True
def downgrade(self) -> Lockable:
if (not self.v_locked):
raise RELEASE_ERR_CLS(RELEASE_ERR_MSG)
self.c_rw_lock.v_read_count += 1
self.v_locked = False
self.c_rw_lock.c_lock_read.release()
result = self.c_rw_lock._aReader(p_RWLock=self.c_rw_lock)
result.v_locked = True
return result
def release(self) -> None:
if (not self.v_locked):
raise RELEASE_ERR_CLS(RELEASE_ERR_MSG)
self.v_locked = False
self.c_rw_lock.c_lock_write.release()
self.c_rw_lock.c_lock_read.release()
def locked(self) -> bool:
return self.v_locked
def gen_rlock(self) -> 'RWLockFairD._aReader':
return RWLockFairD._aReader(self)
def gen_wlock(self) -> 'RWLockFairD._aWriter':
return RWLockFairD._aWriter(self) |
.django_db
def test_can_register_as_installer_with_user_not_installer_and_event_with_installers_should_return_true(event1, user1, mocker):
mock_is_installer = mocker.patch('manager.templatetags.filters.is_installer')
mock_is_installer.return_value = False
event1.use_installers = True
event1.save()
assert filters.can_register_as_installer(user1, event1)
mock_is_installer.assert_called_once_with(user1, event1.event_slug) |
def validate_transaction_gas_estimation_dict(transaction_dict: Dict[(str, Any)], vm: VirtualMachineAPI) -> None:
transaction_signature = inspect.signature(vm.get_transaction_builder().new_transaction)
all_keys = set(transaction_signature.parameters.keys())
allowed_keys = all_keys.difference(FORBIDDEN_KEYS).union(DERIVED_KEYS)
spec_keys = set((RENAMED_KEYS.get(field_name, field_name) for field_name in allowed_keys))
superfluous_keys = set(transaction_dict).difference(spec_keys)
if superfluous_keys:
raise ValueError(('The following invalid fields were given in a transaction: %r. Only %r are allowed' % (list(sorted(superfluous_keys)), list(sorted(spec_keys))))) |
class AppLayoutExample(flx.Widget):
def init(self):
with flx.VBox():
flx.Label(style='background:#cfc;', wrap=1, text='Here is some content at the top for which we want to use minimal size. Thus the use of a VBox. Below is a splitter, with a box layout on the left and a fix layout on the right.')
with flx.HSplit(flex=1):
with flx.VBox(style='border:1px solid #777;'):
flx.Label(text='Flex 0 0 0')
with flx.HBox(flex=0):
self.b1 = flx.Button(text='Hi')
self.b2 = flx.Button(text='Helloooo world!')
self.b3 = flx.Button(text='Foo bar')
flx.Label(text='Flex 1 1 1')
with flx.HBox(flex=0):
self.b1 = flx.Button(flex=1, text='Hi')
self.b2 = flx.Button(flex=1, text='Helloooo world!')
self.b3 = flx.Button(flex=1, text='Foo bar')
flx.Label(text='Flex 1 0 3')
with flx.HBox(flex=0):
self.b1 = flx.Button(flex=1, text='Hi')
self.b2 = flx.Button(flex=0, text='Helloooo world!')
self.b3 = flx.Button(flex=3, text='Foo bar')
with flx.VFix(style='border:1px solid #777;'):
flx.Label(text='Flex 0 0 0 (space divided equally)', style='')
with flx.HFix():
self.b1 = flx.Button(text='Hi')
self.b2 = flx.Button(text='Helloooo world!')
self.b3 = flx.Button(text='Foo bar')
flx.Label(text='Flex 1 1 1', style='')
with flx.HFix():
self.b1 = flx.Button(flex=1, text='Hi')
self.b2 = flx.Button(flex=1, text='Helloooo world!')
self.b3 = flx.Button(flex=1, text='Foo bar')
flx.Label(text='Flex 1 0 3 (the widget with zero collapses')
with flx.HFix():
self.b1 = flx.Button(flex=1, text='Hi')
self.b2 = flx.Button(flex=0, text='Helloooo world!')
self.b3 = flx.Button(flex=3, text='Foo bar') |
_scope
class TestGetMultiAddressCommandNegativeBadConnectionId(AEATestCaseEmpty):
def test_run(self, password_or_none):
self.generate_private_key(FetchAICrypto.identifier, password=password_or_none)
self.add_private_key(FetchAICrypto.identifier, connection=True, password=password_or_none)
password_options = _get_password_option_args(password_or_none)
connection_id = 'some_author/some_connection:0.1.0'
with pytest.raises(Exception, match=f'Cannot find connection with the public id {connection_id}'):
self.run_cli_command('get-multiaddress', FetchAICrypto.identifier, '--connection', '--connection-id', connection_id, *password_options, cwd=self.current_agent_context) |
def group_tags(lines, tag_groups, bit_groups):
changes = 0
new_lines = set()
for line in lines:
line = line.strip()
if (not len(line)):
continue
(tag, bits, mode, _) = util.parse_db_line(line)
if (not bits):
bits = set()
else:
bits = set([util.parse_tagbit(b) for b in bits])
for (tag_group, bit_group) in zip(tag_groups, bit_groups):
if (tag in tag_group):
bit_coords = set([b[1] for b in bits])
for zero_bit in bit_group:
if (zero_bit not in bit_coords):
bits.add((False, zero_bit))
new_line = format_bits(tag, bits)
new_lines.add(new_line)
changes += 1
break
else:
new_lines.add(format_bits(tag, bits))
return (changes, new_lines) |
class CommonSegHasm(CommonSegAsm):
def scan(self, rom_bytes: bytes):
if ((self.rom_start is not None) and (self.rom_end is not None) and (self.rom_start != self.rom_end)):
self.scan_code(rom_bytes, is_hasm=True)
def split(self, rom_bytes: bytes):
if ((not (self.rom_start == self.rom_end)) and (self.spim_section is not None)):
out_path = self.out_path()
if (out_path and (not out_path.exists())):
out_path.parent.mkdir(parents=True, exist_ok=True)
self.print_file_boundaries()
with open(out_path, 'w', newline='\n') as f:
for line in self.get_file_header():
f.write((line + '\n'))
f.write(self.spim_section.disassemble()) |
class TestContainerInsight(unittest.TestCase):
def setUp(self) -> None:
self.test_time = .0
self.test_cluster_name = 'pci-tests'
self.test_instance_id = 'a4e3bc43995f473697f7ede38699fcbe'
self.test_status = 'COMPLETED'
self.test_exit_code = 1
self.class_name = 'ContainerInsight'
self.test_container_insight = ContainerInsight(time=self.test_time, cluster_name=self.test_cluster_name, instance_id=self.test_instance_id, status=self.test_status, exit_code=self.test_exit_code)
def test_convert_to_str_with_class_name(self) -> None:
expected_str = json.dumps({'time': self.test_time, 'cluster_name': self.test_cluster_name, 'instance_id': self.test_instance_id, 'status': self.test_status, 'exit_code': self.test_exit_code, 'class_name': self.class_name})
actual_str = self.test_container_insight.convert_to_str_with_class_name()
self.assertEqual(actual_str, expected_str) |
def extractRintranslatesCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
.usefixtures('use_tmpdir')
def test_that_include_non_existing_file_errors_with_location(tmpdir):
assert_that_config_leads_to_error(config_file_contents=dedent('\n JOBNAME my_name%d\n NUM_REALIZATIONS 1\n INCLUDE does_not_exists\n '), expected_error=ExpectedErrorInfo(line=4, column=9, end_column=24, match='INCLUDE file:.*does_not_exists not found')) |
class Env():
coinbase: Any
block_gas_limit: Uint
block_number: Uint
block_timestamp: U256
withdrawals: Any
block_difficulty: Optional[Uint]
prev_randao: Optional[Bytes32]
parent_difficulty: Optional[Uint]
parent_timestamp: Optional[U256]
base_fee_per_gas: Optional[Uint]
parent_gas_used: Optional[Uint]
parent_gas_limit: Optional[Uint]
parent_base_fee_per_gas: Optional[Uint]
block_hashes: Optional[List[Any]]
parent_ommers_hash: Optional[Hash32]
ommers: Any
def __init__(self, t8n: Any, stdin: Optional[Dict]=None):
if (t8n.options.input_env == 'stdin'):
assert (stdin is not None)
data = stdin['env']
else:
with open(t8n.options.input_env, 'r') as f:
data = json.load(f)
self.coinbase = t8n.hex_to_address(data['currentCoinbase'])
self.block_gas_limit = parse_hex_or_int(data['currentGasLimit'], Uint)
self.block_number = parse_hex_or_int(data['currentNumber'], Uint)
self.block_timestamp = parse_hex_or_int(data['currentTimestamp'], U256)
self.read_block_difficulty(data, t8n)
self.read_base_fee_per_gas(data, t8n)
self.read_randao(data, t8n)
self.read_block_hashes(data)
self.read_ommers(data, t8n)
self.read_withdrawals(data, t8n)
def read_base_fee_per_gas(self, data: Any, t8n: Any) -> None:
self.parent_gas_used = None
self.parent_gas_limit = None
self.parent_base_fee_per_gas = None
self.base_fee_per_gas = None
if t8n.is_after_fork('ethereum.london'):
if ('currentBaseFee' in data):
self.base_fee_per_gas = parse_hex_or_int(data['currentBaseFee'], Uint)
else:
self.parent_gas_used = parse_hex_or_int(data['parentGasUsed'], Uint)
self.parent_gas_limit = parse_hex_or_int(data['parentGasLimit'], Uint)
self.parent_base_fee_per_gas = parse_hex_or_int(data['parentBaseFee'], Uint)
parameters = [self.block_gas_limit, self.parent_gas_limit, self.parent_gas_used, self.parent_base_fee_per_gas]
if (t8n.fork_module == 'london'):
parameters.append((t8n.fork_block == self.block_number))
self.base_fee_per_gas = t8n.fork.calculate_base_fee_per_gas(*parameters)
def read_randao(self, data: Any, t8n: Any) -> None:
self.prev_randao = None
if t8n.is_after_fork('ethereum.paris'):
current_random = data['currentRandom']
if current_random.startswith('0x'):
current_random = current_random[2:]
if ((len(current_random) % 2) == 1):
current_random = ('0' + current_random)
self.prev_randao = Bytes32(left_pad_zero_bytes(hex_to_bytes(current_random), 32))
def read_withdrawals(self, data: Any, t8n: Any) -> None:
self.withdrawals = None
if t8n.is_after_fork('ethereum.shanghai'):
self.withdrawals = tuple((t8n.json_to_withdrawals(wd) for wd in data['withdrawals']))
def read_block_difficulty(self, data: Any, t8n: Any) -> None:
self.block_difficulty = None
self.parent_timestamp = None
self.parent_difficulty = None
self.parent_ommers_hash = None
if t8n.is_after_fork('ethereum.paris'):
return
elif ('currentDifficulty' in data):
self.block_difficulty = parse_hex_or_int(data['currentDifficulty'], Uint)
else:
self.parent_timestamp = parse_hex_or_int(data['parentTimestamp'], U256)
self.parent_difficulty = parse_hex_or_int(data['parentDifficulty'], Uint)
args = [self.block_number, self.block_timestamp, self.parent_timestamp, self.parent_difficulty]
if t8n.is_after_fork('ethereum.byzantium'):
if ('parentUncleHash' in data):
EMPTY_OMMER_HASH = keccak256(rlp.encode([]))
self.parent_ommers_hash = Hash32(hex_to_bytes(data['parentUncleHash']))
parent_has_ommers = (self.parent_ommers_hash != EMPTY_OMMER_HASH)
args.append(parent_has_ommers)
else:
args.append(False)
self.block_difficulty = t8n.fork.calculate_block_difficulty(*args)
def read_block_hashes(self, data: Any) -> None:
block_hashes: List[Any] = []
max_blockhash_count = min(256, self.block_number)
for number in range((self.block_number - max_blockhash_count), self.block_number):
if (('blockHashes' in data) and (str(number) in data['blockHashes'])):
block_hashes.append(Hash32(hex_to_bytes(data['blockHashes'][str(number)])))
else:
block_hashes.append(None)
self.block_hashes = block_hashes
def read_ommers(self, data: Any, t8n: Any) -> None:
ommers = []
if ('ommers' in data):
for ommer in data['ommers']:
ommers.append(Ommer(ommer['delta'], t8n.hex_to_address(ommer['address'])))
self.ommers = ommers |
class Solution():
def __init__(self):
self.end = False
self.chars = []
def read(self, buf, n):
if (self.end and (len(self.chars) == 0)):
return 0
if ((len(self.chars) >= n) or self.end):
clen = min(len(self.chars), n)
for i in range(clen):
buf[i] = self.chars[i]
self.chars = self.chars[clen:]
return clen
l = len(self.chars)
for i in range(len(self.chars)):
buf[i] = self.chars[i]
self.chars = []
curr = ([''] * 4)
while ((n > l) and (not self.end)):
cnt = read4(curr)
if (cnt < 4):
self.end = True
clen = min(cnt, (n - l))
for i in range(clen):
buf[(l + i)] = curr[i]
l += clen
for i in range((cnt - clen)):
self.chars.append(curr[(i + clen)])
return l |
class ComponentRegistry(Registry[(Tuple[(PublicId, str)], SkillComponentType)], Generic[SkillComponentType]):
__slots__ = ('_items', '_dynamically_added')
def __init__(self, **kwargs: Any) -> None:
super().__init__(**kwargs)
self._items: PublicIdRegistry[Dict[(str, SkillComponentType)]] = PublicIdRegistry()
self._dynamically_added: Dict[(PublicId, Set[str])] = {}
def register(self, item_id: Tuple[(PublicId, str)], item: SkillComponentType, is_dynamically_added: bool=False) -> None:
skill_id = item_id[0]
item_name = item_id[1]
skill_items = self._items.fetch(skill_id)
if ((skill_items is not None) and (item_name in skill_items.keys())):
raise ValueError(f"Item already registered with skill id '{skill_id}' and name '{item_name}'")
if (skill_items is not None):
self._items.unregister(skill_id)
else:
skill_items = {}
skill_items[item_name] = item
self._items.register(skill_id, skill_items)
if is_dynamically_added:
self._dynamically_added.setdefault(skill_id, set()).add(item_name)
def unregister(self, item_id: Tuple[(PublicId, str)]) -> Optional[SkillComponentType]:
return self._unregister_from_main_index(item_id)
def _unregister_from_main_index(self, item_id: Tuple[(PublicId, str)]) -> SkillComponentType:
skill_id = item_id[0]
item_name = item_id[1]
name_to_item = self._items.fetch(skill_id)
if ((name_to_item is None) or (item_name not in name_to_item)):
raise ValueError("No item registered with component id '{}'".format(item_id))
self.logger.debug('Unregistering item with id {}'.format(item_id))
item = name_to_item.pop(item_name)
if (len(name_to_item) == 0):
self._items.unregister(skill_id)
else:
self._items.unregister(skill_id)
self._items.register(skill_id, name_to_item)
items = self._dynamically_added.get(skill_id, None)
if (items is not None):
items.remove(item_name)
if (len(items) == 0):
self._dynamically_added.pop(skill_id, None)
return item
def fetch(self, item_id: Tuple[(PublicId, str)]) -> Optional[SkillComponentType]:
skill_id = item_id[0]
item_name = item_id[1]
name_to_item = self._items.fetch(skill_id)
if (name_to_item is None):
return None
return name_to_item.get(item_name, None)
def fetch_by_skill(self, skill_id: PublicId) -> List[SkillComponentType]:
temp: Optional[Dict[(str, SkillComponentType)]] = self._items.fetch(skill_id)
name_to_item: Dict[(str, SkillComponentType)] = ({} if (temp is None) else temp)
return list(name_to_item.values())
def fetch_all(self) -> List[SkillComponentType]:
return [item for items in self._items.fetch_all() for item in items.values()]
def unregister_by_skill(self, skill_id: PublicId) -> None:
if (skill_id not in self._items.ids()):
raise ValueError('No component of skill {} present in the registry.'.format(skill_id))
self._items.unregister(skill_id)
self._dynamically_added.pop(skill_id, None)
def ids(self) -> Set[Tuple[(PublicId, str)]]:
result: Set[Tuple[(PublicId, str)]] = set()
for skill_id in self._items.ids():
name_to_item = cast(Dict[(str, SkillComponentType)], self._items.fetch(skill_id))
for (name, _) in name_to_item.items():
result.add((skill_id, name))
return result
def setup(self) -> None:
for item in self.fetch_all():
if item.context.is_active:
self.logger.debug('Calling setup() of component {} of skill {}'.format(item.name, item.skill_id))
try:
item.setup()
except Exception as e:
e_str = parse_exception(e)
e_str = f'''An error occurred while setting up item {item.skill_id}/{type(item).__name__}:
{e_str}'''
raise AEASetupError(e_str)
else:
self.logger.debug('Ignoring setup() of component {} of skill {}, because the skill is not active.'.format(item.name, item.skill_id))
def teardown(self) -> None:
for name_to_items in self._items.fetch_all():
for (_, item) in name_to_items.items():
self.logger.debug('Calling teardown() of component {} of skill {}'.format(item.name, item.skill_id))
try:
item.teardown()
except Exception as e:
e_str = parse_exception(e)
e_str = f'''An error occurred while tearing down item {item.skill_id}/{type(item).__name__}:
{str(e_str)}'''
e = AEATeardownError(e_str)
self.logger.error(str(e))
_dynamically_added = copy.deepcopy(self._dynamically_added)
for (skill_id, items_names) in _dynamically_added.items():
for item_name in items_names:
self.unregister((skill_id, item_name)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.