code stringlengths 281 23.7M |
|---|
class Migration(migrations.Migration):
dependencies = [('dmd', '0002_auto__1443'), ('frontend', '0045_auto__2136')]
operations = [migrations.CreateModel(name='TariffPrice', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('date', models.DateField(db_index=True)), ('price_pence', models.IntegerField()), ('tariff_category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dmd.DtPaymentCategory')), ('vmpp', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dmd.VMPP'))]), migrations.AlterUniqueTogether(name='tariffprice', unique_together=set([('date', 'vmpp')]))] |
class bsn_vrf_counter_stats_reply(bsn_stats_reply):
version = 6
type = 19
stats_type = 65535
experimenter = 6035143
subtype = 15
def __init__(self, xid=None, flags=None, entries=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (entries != None):
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.subtype))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bsn_vrf_counter_stats_reply()
_version = reader.read('!B')[0]
assert (_version == 6)
_type = reader.read('!B')[0]
assert (_type == 19)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 65535)
obj.flags = reader.read('!H')[0]
reader.skip(4)
_experimenter = reader.read('!L')[0]
assert (_experimenter == 6035143)
_subtype = reader.read('!L')[0]
assert (_subtype == 15)
obj.entries = loxi.generic_util.unpack_list(reader, ofp.common.bsn_vrf_counter_stats_entry.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.entries != other.entries):
return False
return True
def pretty_print(self, q):
q.text('bsn_vrf_counter_stats_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REPLY_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('entries = ')
q.pp(self.entries)
q.breakable()
q.text('}') |
class Cadastro(Form):
name = TextField('Nome')
lastName = TextField('Sobrenome')
username = TextField('Usuario')
passwd = PasswordField('Senha')
email = EmailField('Email')
gender = SelectField('Sexo', choices=[('masculino', 'Masculino'), ('feminino', 'Feminino')])
btnSend = SubmitField('Enviar')
age = DateField('Nascimento', format='%d/%m/%Y') |
def _replace_component_id(config: PackageConfiguration, types_to_update: Set[ComponentType], replacements: Dict[(ComponentType, Dict[(PublicId, PublicId)])]) -> None:
for component_type in types_to_update:
public_id_set: Set[PublicId] = getattr(config, component_type.to_plural(), set())
replacements_given_type = replacements.get(component_type, {})
for old_public_id in list(public_id_set):
new_public_id = replacements_given_type.get(old_public_id, old_public_id)
public_id_set.remove(old_public_id)
public_id_set.add(new_public_id) |
def launch(main_func: Callable[(..., _RT)], num_processes_per_machine: int, num_machines: int=1, machine_rank: int=0, dist_url: Optional[str]=None, backend: str='NCCL', always_spawn: bool=False, launch_method: str='multiprocessing', shared_context: Optional[D2GoSharedContext]=None, timeout: timedelta=DEFAULT_TIMEOUT, args: Tuple[(Any, ...)]=(), kwargs: Dict[(str, Any)]=None) -> Dict[(int, _RT)]:
(args, backend) = _maybe_convert_to_cpu_run(args, backend)
return _launch(main_func=main_func, num_processes_per_machine=num_processes_per_machine, num_machines=num_machines, machine_rank=machine_rank, dist_url=dist_url, backend=backend, always_spawn=always_spawn, launch_method=launch_method, shared_context=shared_context, timeout=timeout, args=args, kwargs=kwargs, _distributed_worker=distributed_worker) |
def _synchronize_user_on_group_related_monitoring_servers(task_id, user, affected_groups):
logger.info('Going to create/update user %s in zabbixes related to groups %s.', user.username, affected_groups)
for dc in Dc.objects.filter(roles__in=affected_groups):
mon = get_monitoring(dc)
if (not mon.enabled):
logger.info('Monitoring is disabled in DC %s', dc)
continue
try:
res = mon.user_sync(user=user)
except MonitoringError as exc:
logger.exception(exc)
logger.error('Creating a separate task for dc %s and user %s because it crashed.', dc.name, user.username)
mon_user_changed.call(task_id, user_name=user.username, dc_name=dc.name)
else:
_log_mon_user_action(res, mon, task_id, user.username, dc.name) |
def test_staticfiles_with_missing_dir_returns_404(tmpdir, test_client_factory):
path = os.path.join(tmpdir, 'example.txt')
with open(path, 'w') as file:
file.write('<file content>')
routes = [Mount('/', app=StaticFiles(directory=tmpdir), name='static')]
app = Starlette(routes=routes)
client = test_client_factory(app)
response = client.get('/foo/example.txt')
assert (response.status_code == 404)
assert (response.text == 'Not Found') |
def test_01_08_02_predictions(nlp):
text = 'De acuerdo con la revista global de negocios Fortune, Apple fue la empresa mas admirada en el mundo entre 2008 y 2012.'
doc = nlp(text)
ents = [(ent.text, ent.label_) for ent in doc.ents]
assert (len(ents) == 2)
assert (ents[0] == ('Fortune', 'ORG'))
assert (ents[1] == ('Apple', 'ORG')) |
class ContractEvents(_ContractEvents):
def __init__(self, contract: _DeployedContractBase):
self.linked_contract = contract
_ContractEvents.__init__(self, contract.abi, web3, contract.address)
def subscribe(self, event_name: str, callback: Callable[([AttributeDict], None)], delay: float=2.0) -> None:
target_event: ContractEvent = self.__getitem__(event_name)
event_watcher.add_event_callback(event=target_event, callback=callback, delay=delay)
def get_sequence(self, from_block: int, to_block: int=None, event_type: Union[(ContractEvent, str)]=None) -> Union[(List[AttributeDict], AttributeDict)]:
if ((to_block is None) or (to_block > web3.eth.block_number)):
to_block = web3.eth.block_number
if (event_type is not None):
if isinstance(event_type, str):
event_type: ContractEvent = self.__getitem__(event_type)
return self._retrieve_contract_events(event_type, from_block, to_block)
events_logbook = dict()
for event in ContractEvents.__iter__(self):
events_logbook[event.event_name] = self._retrieve_contract_events(event, from_block, to_block)
return AttributeDict(events_logbook)
def listen(self, event_name: str, timeout: float=0) -> Coroutine:
_triggered: bool = False
_received_data: Union[(AttributeDict, None)] = None
def _event_callback(event_data: AttributeDict) -> None:
nonlocal _triggered, _received_data
_received_data = event_data
_triggered = True
_listener_end_time = (time.time() + timeout)
async def _listening_task(is_timeout: bool, end_time: float) -> AttributeDict:
nonlocal _triggered, _received_data
timed_out: bool = False
while (not _triggered):
if (is_timeout and (end_time <= time.time())):
timed_out = True
break
(await asyncio.sleep(0.05))
return AttributeDict({'event_data': _received_data, 'timed_out': timed_out})
target_event: ContractEvent = self.__getitem__(event_name)
event_watcher.add_event_callback(event=target_event, callback=_event_callback, delay=0.2, repeat=False)
return _listening_task(bool((timeout > 0)), _listener_end_time)
def _retrieve_contract_events(self, event_type: ContractEvent, from_block: int=None, to_block: int=None) -> List[LogReceipt]:
if (to_block is None):
to_block = web3.eth.block_number
if ((from_block is None) and isinstance(to_block, int)):
from_block = (to_block - 10)
event_filter: filters.LogFilter = event_type.createFilter(fromBlock=from_block, toBlock=to_block)
return event_filter.get_all_entries() |
def to_int(s: str):
if (type(s) is not str):
return s
s = re.sub('[^0-9\\.\\-]', '', s)
is_negative = s.startswith('-')
s = s.replace('-', '')
if (len(s) == 0):
return None
if (len(s.replace('.', '')) == 0):
return None
if (s.count('.') > 1):
return None
return (int(float(s)) * ((- 1) if is_negative else 1)) |
class ActivityTask():
def from_poll_for_activity_task_response(cls, task: PollActivityTaskQueueResponse) -> 'ActivityTask':
activity_task: 'ActivityTask' = cls()
activity_task.task_token = task.task_token
activity_task.workflow_execution = task.workflow_execution
activity_task.activity_id = task.activity_id
activity_task.activity_type = task.activity_type
activity_task.scheduled_time = task.scheduled_time
activity_task.schedule_to_close_timeout = task.schedule_to_close_timeout
activity_task.start_to_close_timeout = task.start_to_close_timeout
activity_task.heartbeat_timeout = task.heartbeat_timeout
activity_task.attempt = task.attempt
activity_task.heartbeat_details = task.heartbeat_details
activity_task.workflow_namespace = task.workflow_namespace
return activity_task
task_token: bytes = None
workflow_execution: WorkflowExecution = None
activity_id: str = None
activity_type: ActivityType = None
scheduled_time: datetime = None
schedule_to_close_timeout: timedelta = None
start_to_close_timeout: timedelta = None
heartbeat_timeout: timedelta = None
attempt: int = None
heartbeat_details: Payloads = None
workflow_namespace: str = None |
def test_generate_fiscal_date_range():
start = date(2020, 9, 30)
end = date(2020, 10, 1)
expected = [{'fiscal_year': 2020, 'fiscal_quarter': 4, 'fiscal_month': 12}, {'fiscal_year': 2021, 'fiscal_quarter': 1, 'fiscal_month': 1}]
assert (fyh.generate_fiscal_date_range(start, end, 'fiscal_year') == expected)
assert (fyh.generate_fiscal_date_range(start, end, 'quarter') == expected)
assert (fyh.generate_fiscal_date_range(start, end, 'anything') == expected)
start = date(2019, 10, 2)
end = date(2020, 9, 30)
expected = [{'fiscal_year': 2020, 'fiscal_quarter': 1, 'fiscal_month': 1}]
assert (fyh.generate_fiscal_date_range(start, end, 'fiscal_year') == expected)
expected.append({'fiscal_year': 2020, 'fiscal_quarter': 2, 'fiscal_month': 4})
expected.append({'fiscal_year': 2020, 'fiscal_quarter': 3, 'fiscal_month': 7})
expected.append({'fiscal_year': 2020, 'fiscal_quarter': 4, 'fiscal_month': 10})
assert (fyh.generate_fiscal_date_range(start, end, 'quarter') == expected)
start = end = date(2021, 6, 23)
expected = [{'fiscal_year': 2021, 'fiscal_quarter': 3, 'fiscal_month': 9}]
assert (fyh.generate_fiscal_date_range(start, end, 'fiscal_year') == expected)
assert (fyh.generate_fiscal_date_range(start, end, 'quarter') == expected)
assert (fyh.generate_fiscal_date_range(start, end, 'anything') == expected) |
.django_db
def test_faba_excludes_non_selected_defc(client, monkeypatch, helpers, defc_codes, basic_ref_data, early_gtas, faba_with_non_covid_values):
helpers.patch_datetime_now(monkeypatch, EARLY_YEAR, LATE_MONTH, 25)
helpers.reset_dabs_cache()
resp = client.get((OVERVIEW_URL + '?def_codes=M,N'))
assert (resp.data['spending']['award_obligations'] == Decimal('1.6'))
assert (resp.data['spending']['award_outlays'] == Decimal('0.8')) |
_os(*metadata.platforms)
def main():
param1 = '--remote-debugging-port=9222'
param2 = '--user-data-dir=remote-profile'
if (platform.system() == 'Darwin'):
if (platform.processor() == 'arm'):
name = 'com.apple.ditto_and_spawn_arm'
else:
name = 'com.apple.ditto_and_spawn_intel'
source = common.get_path('bin', name)
chrome = '/tmp/google-chrome'
common.copy_file(source, chrome)
common.log('Starting browser on debug mode')
common.execute([chrome, param1, param2], timeout=10, kill=True)
elif (common.CURRENT_OS == 'linux'):
name = 'linux.ditto_and_spawn'
source = common.get_path('bin', name)
chrome = '/tmp/google-chrome'
common.copy_file(source, chrome)
common.log('Starting browser on debug mode')
common.execute([chrome, param1, param2], timeout=10, kill=True)
else:
chrome = 'C:\\Users\\Public\\chrome.exe'
common.copy_file(EXE_FILE, chrome)
common.log('Mimicking the start of a browser on debug mode')
common.execute([chrome, '/c', 'echo', param1, param2], timeout=10)
common.remove_file(chrome) |
class FirewallHelper():
def __init__(self, fw):
self._fw = fw
self._helpers = {}
def __repr__(self):
return ('%s(%r)' % (self.__class__, self._helpers))
def cleanup(self):
self._helpers.clear()
def check_helper(self, name):
if (name not in self.get_helpers()):
raise FirewallError(errors.INVALID_HELPER, name)
def query_helper(self, name):
return (name in self.get_helpers())
def get_helpers(self):
return sorted(self._helpers.keys())
def has_helpers(self):
return (len(self._helpers) > 0)
def get_helper(self, name):
self.check_helper(name)
return self._helpers[name]
def add_helper(self, obj):
self._helpers[obj.name] = obj
def remove_helper(self, name):
if (name not in self._helpers):
raise FirewallError(errors.INVALID_HELPER, name)
del self._helpers[name] |
class OptionPlotoptionsLollipopSonificationDefaultinstrumentoptionsMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TFCONSTANS(object):
J_CONSTANTS = get_gateway().jvm.org.flinkextended.flink.ml.tensorflow.util.TFConstants
TF_PORT = str(J_CONSTANTS.TF_PORT)
TF_INFERENCE_EXPORT_PATH = str(J_CONSTANTS.TF_INFERENCE_EXPORT_PATH)
TF_INFERENCE_INPUT_TENSOR_NAMES = str(J_CONSTANTS.TF_INFERENCE_INPUT_TENSOR_NAMES)
TF_INFERENCE_OUTPUT_TENSOR_NAMES = str(J_CONSTANTS.TF_INFERENCE_OUTPUT_TENSOR_NAMES)
TF_INFERENCE_OUTPUT_ROW_FIELDS = str(J_CONSTANTS.TF_INFERENCE_OUTPUT_ROW_FIELDS)
TF_INFERENCE_BATCH_SIZE = str(J_CONSTANTS.TF_INFERENCE_BATCH_SIZE)
TF_IS_CHIEF_ALONE = str(J_CONSTANTS.TF_IS_CHIEF_ALONE)
TF_IS_CHIEF_ROLE = str(J_CONSTANTS.TF_IS_CHIEF_ROLE)
TENSORBOARD_PORT = str(J_CONSTANTS.TENSORBOARD_PORT)
INPUT_TF_EXAMPLE_CONFIG = str(J_CONSTANTS.INPUT_TF_EXAMPLE_CONFIG)
OUTPUT_TF_EXAMPLE_CONFIG = str(J_CONSTANTS.OUTPUT_TF_EXAMPLE_CONFIG) |
def test_insert_before_existing_case(task):
var_0_0 = Variable('var_0', Integer(32, True), None, True, Variable('var_10', Integer(32, True), 0, True, None))
var_0_2 = Variable('var_0', Integer(32, True), None, True, Variable('var_10', Integer(32, True), 2, True, None))
task.graph.add_nodes_from((vertices := [BasicBlock(0, [Assignment(ListOperation([]), print_call('Enter week number(1-7): ', 1)), Assignment(ListOperation([]), scanf_call(UnaryOperation(OperationType.address, [var_0_0], Pointer(Integer(32, True), 32), None, False), , 2)), Branch(Condition(OperationType.equal, [var_0_2, Constant(500, Integer(32, True))], CustomType('bool', 1)))]), BasicBlock(2, [Branch(Condition(OperationType.greater, [var_0_2, Constant(500, Integer(32, True))], CustomType('bool', 1)))]), BasicBlock(3, [Assignment(ListOperation([]), print_call('Friday', 3))]), BasicBlock(5, [Branch(Condition(OperationType.greater, [var_0_2, Constant(34, Integer(32, True))], CustomType('bool', 1)))]), BasicBlock(6, [Return(ListOperation([Constant(0, Integer(32, True))]))]), BasicBlock(7, [Assignment(ListOperation([]), print_call('Invalid input! Please enter week number between 1-7.', 11))]), BasicBlock(8, [Branch(Condition(OperationType.equal, [var_0_2, Constant(400, Integer(32, True))], CustomType('bool', 1)))]), BasicBlock(9, [Branch(Condition(OperationType.less, [var_0_2, Constant(0, Integer(32, True))], CustomType('bool', 1)))]), BasicBlock(10, [Assignment(ListOperation([]), print_call('Thursday', 7))]), BasicBlock(13, [Branch(Condition(OperationType.greater_us, [var_0_2, Constant(34, Integer(32, True))], CustomType('bool', 1)))]), BasicBlock(15, [IndirectBranch(var_0_2)]), BasicBlock(16, [Assignment(ListOperation([]), print_call('Monday', 4))]), BasicBlock(17, [Assignment(ListOperation([]), print_call('Tuesday', 5))]), BasicBlock(18, [Assignment(ListOperation([]), print_call('Wednesday', 6))]), BasicBlock(19, [Assignment(ListOperation([]), print_call('Saturday', 8))]), BasicBlock(20, [Assignment(ListOperation([]), print_call('Sunday', 9))])]))
task.graph.add_edges_from([TrueCase(vertices[0], vertices[2]), FalseCase(vertices[0], vertices[1]), TrueCase(vertices[1], vertices[5]), FalseCase(vertices[1], vertices[3]), UnconditionalEdge(vertices[2], vertices[4]), TrueCase(vertices[3], vertices[6]), FalseCase(vertices[3], vertices[7]), UnconditionalEdge(vertices[5], vertices[4]), TrueCase(vertices[6], vertices[8]), FalseCase(vertices[6], vertices[5]), TrueCase(vertices[7], vertices[5]), FalseCase(vertices[7], vertices[9]), UnconditionalEdge(vertices[8], vertices[2]), TrueCase(vertices[9], vertices[5]), FalseCase(vertices[9], vertices[10]), SwitchCase(vertices[10], vertices[5], [Constant(i) for i in range(1, 34) if (i not in {6, 9, 12})]), SwitchCase(vertices[10], vertices[11], [Constant(0, Integer(32, True))]), SwitchCase(vertices[10], vertices[12], [Constant(12, Integer(32, True))]), SwitchCase(vertices[10], vertices[13], [Constant(34, Integer(32, True))]), SwitchCase(vertices[10], vertices[14], [Constant(6, Integer(32, True))]), SwitchCase(vertices[10], vertices[15], [Constant(9, Integer(32, True))]), UnconditionalEdge(vertices[11], vertices[4]), UnconditionalEdge(vertices[12], vertices[4]), UnconditionalEdge(vertices[13], vertices[4]), UnconditionalEdge(vertices[14], vertices[4]), UnconditionalEdge(vertices[15], vertices[4])])
PatternIndependentRestructuring().run(task)
assert (isinstance((seq_node := task._ast.root), SeqNode) and (len(seq_node.children) == 3))
assert (isinstance(seq_node.children[0], CodeNode) and (seq_node.children[0].instructions == vertices[0].instructions[:(- 1)]))
assert isinstance((switch := seq_node.children[1]), SwitchNode)
assert (isinstance(seq_node.children[2], CodeNode) and (seq_node.children[2].instructions == vertices[4].instructions))
assert ((switch.expression == var_0_2) and (len(switch.children) == 8))
assert (isinstance((case1 := switch.cases[0]), CaseNode) and (case1.constant == Constant(0, Integer(32, True))) and (case1.break_case is True))
assert (isinstance((case2 := switch.cases[1]), CaseNode) and (case2.constant == Constant(6, Integer(32, True))) and (case2.break_case is True))
assert (isinstance((case3 := switch.cases[2]), CaseNode) and (case3.constant == Constant(9, Integer(32, True))) and (case3.break_case is True))
assert (isinstance((case4 := switch.cases[3]), CaseNode) and (case4.constant == Constant(12, Integer(32, True))) and (case4.break_case is True))
assert (isinstance((case5 := switch.cases[4]), CaseNode) and (case5.constant == Constant(34, Integer(32, True))) and (case5.break_case is True))
assert (isinstance((case6 := switch.cases[5]), CaseNode) and (case6.constant == Constant(400, Integer(32, True))) and (case6.break_case is False))
assert (isinstance((case7 := switch.cases[6]), CaseNode) and (case7.constant == Constant(500, Integer(32, True))) and (case7.break_case is True))
assert (isinstance((default := switch.default), CaseNode) and (default.constant == 'default') and (default.break_case is False))
assert (isinstance(case1.child, CodeNode) and (case1.child.instructions == vertices[11].instructions))
assert (isinstance(case2.child, CodeNode) and (case2.child.instructions == vertices[14].instructions))
assert (isinstance(case3.child, CodeNode) and (case3.child.instructions == vertices[15].instructions))
assert (isinstance(case4.child, CodeNode) and (case4.child.instructions == vertices[12].instructions))
assert (isinstance(case5.child, CodeNode) and (case5.child.instructions == vertices[13].instructions))
assert (isinstance(case6.child, CodeNode) and (case6.child.instructions == vertices[8].instructions))
assert (isinstance(case7.child, CodeNode) and (case7.child.instructions == vertices[2].instructions))
assert (isinstance(default.child, CodeNode) and (default.child.instructions == vertices[5].instructions)) |
class AttrSaver(BaseSaver):
def __init__(self, s3_config: Optional[_T]=None):
super().__init__(s3_config=s3_config)
def __call__(self, *args, **kwargs):
return AttrSaver(*args, **kwargs)
def _clean_up_values(self, payload: Spockspace, remove_crypto: bool=True) -> Dict:
out_dict = {}
all_spock_cls = set(vars(payload).keys())
out_dict = self._recursively_handle_clean(payload, out_dict, all_cls=all_spock_cls)
clean_dict = self._clean_output(out_dict)
clean_dict = {k: v for (k, v) in clean_dict.items() if (len(v) > 0)}
if remove_crypto:
if ('__salt__' in clean_dict):
_ = clean_dict.pop('__salt__')
if ('__key__' in clean_dict):
_ = clean_dict.pop('__key__')
out_dict = {}
for (k, v) in clean_dict.items():
cls_dict = {}
for (ik, iv) in v.items():
if (not ik.startswith('_')):
cls_dict.update({ik: iv})
out_dict.update({k: cls_dict})
return out_dict
def _clean_tuner_values(self, payload: Spockspace) -> Dict:
out_dict = {k: {ik: vars(iv) for (ik, iv) in vars(v).items()} for (k, v) in vars(payload).items()}
clean_dict = self._clean_output(out_dict)
return clean_dict
def _check_list_of_spock_classes(val: List, key: str, all_cls: Set) -> List:
clean_val = []
repeat_flag = False
for v in val:
cls_name = type(v).__name__
if ((cls_name in all_cls) and (cls_name == key)):
clean_val.append(attr.asdict(v))
elif (cls_name in all_cls):
repeat_flag = True
clean_val.append(cls_name)
else:
clean_val.append(v)
if repeat_flag:
clean_val = list(set(clean_val))[(- 1)]
return clean_val
def _recursively_handle_clean(self, payload: Spockspace, out_dict: Dict, parent_name: Optional[str]=None, all_cls: Optional[Set]=None) -> Dict:
for (key, val) in vars(payload).items():
val_name = type(val).__name__
if isinstance(val, (dict, Dict, list, List, tuple, Tuple)):
mod_val = (self._check_list_of_spock_classes(val, key, all_cls) if isinstance(val, (list, List)) else val)
clean_val = _recurse_callables(mod_val, _callable_2_str, check_type=Callable)
out_dict.update({key: clean_val})
elif callable(val):
out_dict.update({key: _callable_2_str(val)})
elif ((val_name in all_cls) and (parent_name is not None)):
out_dict.update({key: val_name})
elif (val_name in all_cls):
new_dict = self._recursively_handle_clean(val, {}, parent_name=key, all_cls=all_cls)
out_dict.update({key: new_dict})
else:
out_dict.update({key: val})
return out_dict |
def run_python(path, env=None, args=None, timeout=None, pythonpath_extend=None, expect_pass=False):
new_argv = [sys.executable]
new_env = os.environ.copy()
new_env.setdefault('eventlet_test_in_progress', 'yes')
src_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if path:
path = os.path.abspath(path)
new_argv.append(path)
new_env['PYTHONPATH'] = os.pathsep.join((sys.path + [src_dir]))
if env:
new_env.update(env)
if pythonpath_extend:
new_path = [p for p in new_env.get('PYTHONPATH', '').split(os.pathsep) if p]
new_path.extend(((p if os.path.isabs(p) else os.path.join(src_dir, p)) for p in pythonpath_extend))
new_env['PYTHONPATH'] = os.pathsep.join(new_path)
if args:
new_argv.extend(args)
p = subprocess.Popen(new_argv, env=new_env, stderr=subprocess.STDOUT, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
if (timeout is None):
timeout = 10
try:
(output, _) = p.communicate(timeout=timeout)
except subprocess.TimeoutExpired:
p.kill()
(output, _) = p.communicate(timeout=timeout)
if expect_pass:
sys.stderr.write('Program {} output:\n---\n{}\n---\n'.format(path, output.decode()))
assert False, 'timed out'
return '{}\nFAIL - timed out'.format(output).encode()
if expect_pass:
if output.startswith(b'skip'):
parts = output.rstrip().split(b':', 1)
skip_args = []
if (len(parts) > 1):
skip_args.append(parts[1])
raise SkipTest(*skip_args)
lines = output.splitlines()
ok = (lines[(- 1)].rstrip() == b'pass')
if ((not ok) or (len(lines) > 1)):
sys.stderr.write('Program {} output:\n---\n{}\n---\n'.format(path, output.decode()))
assert ok, 'Expected single line "pass" in stdout'
return output |
class mem_eff_attention(Operator):
def __init__(self, causal, dropout=0, variable_seq_length_kv=False, variable_seq_length_q=False, use_grouped_fmha=False) -> None:
super().__init__()
assert (dropout == 0)
self._attrs['op'] = 'mem_eff_attention'
self._attrs['has_profiler'] = False
self._attrs['dropout'] = dropout
self._attrs['causal'] = causal
self._attrs['variable_seq_length_kv'] = variable_seq_length_kv
self._attrs['variable_seq_length_q'] = variable_seq_length_q
self._attrs['head_size'] = (- 1)
self._attrs['workspace'] = 0
self._attrs['use_grouped_fmha'] = use_grouped_fmha
self.exec_key_template = EXEC_KEY_TEMPLATE
self.shape_eval_template = SHAPE_FUNC_TEMPLATE
def _infer_shape(self, x: List[int], w: List[int]):
eval_func = self.shape_eval_template.render(indent='', dtype='', div='//', x_dim0=x[0], x_dim1=x[1], x_dim2=x[2], x_dim3=w[3])
output = {}
exec(eval_func, output)
return [int(output['B']), int(output['M']), int(output['num_heads']), int(output['Kv'])]
def _infer_shapes(self, x: Tensor, w: Tensor):
x_shape_values = [var._attrs['values'] for var in x._attrs['shape']]
x_shapes = itertools.product(*x_shape_values)
w_shape = [var._attrs['values'][0] for var in w._attrs['shape']]
y_shapes = []
for x_shape in x_shapes:
y_shape = self._infer_shape(x_shape, w_shape)
y_shapes.append(y_shape)
def unique(vector):
return sorted(set(vector))
batch_info = x._attrs['shape'][0]
output_shape = [batch_info, x._attrs['shape'][2], x._attrs['shape'][1], w._attrs['shape'][(- 1)]]
return output_shape
def __call__(self, q: Tensor, k: Tensor, v: Tensor, lengths_kv: Optional[Tensor]=None, lengths_q: Optional[Tensor]=None) -> Tensor:
head_size_v = v._attrs['shape'][3]._attrs['values'][0]
self._attrs['head_size'] = head_size_v
self._attrs['inputs'] = [q, k, v]
if self._attrs['variable_seq_length_kv']:
assert (lengths_kv is not None)
self._attrs['inputs'].append(lengths_kv)
if self._attrs['variable_seq_length_q']:
assert (lengths_q is not None)
self._attrs['inputs'].append(lengths_q)
self._set_depth()
self._extract_exec_path(q)
output_shape = self._infer_shapes(q, v)
required_workspace_size = self._compute_required_workspace(output_shape, q._attrs['shape'], k._attrs['shape'])
self._attrs['workspace'] = required_workspace_size
_LOGGER.debug(f'Required workspace size: {required_workspace_size}')
output = Tensor(output_shape, src_ops={self}, dtype=self._attrs['inputs'][0]._attrs['dtype'])
self._attrs['outputs'] = [output]
return output
def _compute_required_workspace(self, output_shape: Tuple[(IntVar, IntVar, IntVar, IntVar)], q_shape: Tuple[(IntVar, IntVar, IntVar, IntVar)], k_shape: Tuple[(IntVar, IntVar, IntVar, IntVar)]) -> int:
is_float32 = (self._attrs['inputs'][0]._attrs['dtype'] not in ['float16', 'bfloat16'])
o_shape = [var._attrs['values'][(- 1)] for var in output_shape]
needs_output_accum_buffer = ((o_shape[(- 1)] > 128) or (not is_float32))
if needs_output_accum_buffer:
size_of_accum_element = 4
accu_size = (size_of_accum_element * np.prod(o_shape))
else:
accu_size = 0
if (not self._attrs['use_grouped_fmha']):
return accu_size
problem_count = (q_shape[0].upper_bound() * q_shape[1].upper_bound())
size_of_int = 4
size_of_int64 = 8
size_of_gemm_coord = (3 * size_of_int)
problem_sizes_size = ((2 * size_of_gemm_coord) * problem_count)
ld_sizes = ((4 * size_of_int64) * problem_count)
size_of_ptr = 8
ptrs_sizes = ((5 * size_of_ptr) * problem_count)
total_size = (((problem_sizes_size + accu_size) + ld_sizes) + ptrs_sizes)
return total_size
def _get_op_attributes(self):
target_attrs = ['causal']
attr = {}
for target_attr in target_attrs:
if (target_attr in self._attrs):
attr[target_attr] = self._attrs[target_attr]
return attr
def _gen_exec_key(self, shape):
return self.exec_key_template.render(x_dim0=shape[0], x_dim1=shape[1], x_dim2=shape[2], x_dim3=shape[3]).replace('\n', '')
def _extract_exec_path(self, x: Tensor):
x_shape_values = [var._attrs['values'] for var in x._attrs['shape']]
x_shapes = itertools.product(*x_shape_values)
self._attrs['exec_path'] = OrderedDict()
for x_shape in x_shapes:
key = self._gen_exec_key(x_shape)
self._attrs['exec_path'][key] = ''
def gen_function(self) -> str:
target = backend.target.Target.current()
self._attrs['arch'] = target._arch
func_key = '{target}.{op}.gen_function'.format(target=target.name(), op=self._attrs['op'])
func = registry.get(func_key)
return func(self._attrs) |
def run_live(func):
def runner():
loop.reset()
asyncio_loop = asyncio.new_event_loop()
app.create_server(port=0, loop=asyncio_loop)
print('running', func.__name__, '...', end='')
orig_stdout = sys.stdout
orig_stderr = sys.stderr
fake_stdout = FakeStream()
sys.stdout = sys.stderr = fake_stdout
t0 = time.time()
try:
cr = func()
if asyncio.iscoroutine(cr):
asyncio_loop.run_until_complete(cr)
gc.collect()
finally:
sys.stdout = orig_stdout
sys.stderr = orig_stderr
print(('done in %f seconds' % (time.time() - t0)))
for appname in app.manager.get_app_names():
if ('default' not in appname):
sessions = app.manager.get_connections(appname)
for session in sessions:
if (session.app is not None):
session.app.dispose()
session.close()
loop.reset()
(pyresult, jsresult) = filter_stdout(fake_stdout.getvalue())
reference = '\n'.join((line[4:] for line in func.__doc__.splitlines()))
parts = reference.split(('-' * 10))
pyref = parts[0].strip(' \n')
jsref = parts[(- 1)].strip(' \n-')
smart_compare(func, ('Python', pyresult, pyref), ('JavaScript', jsresult, jsref))
return runner |
def test_prepare_dict():
class Quote(Record):
ask_price: Optional[float] = None
bid_price: Optional[float] = None
def _prepare_dict(self, payload):
return {k: v for (k, v) in payload.items() if (v is not None)}
assert (Quote().asdict() == {})
assert (Quote(1.0, 2.0).asdict() == {'ask_price': 1.0, 'bid_price': 2.0})
assert (Quote(None, 2.0).asdict() == {'bid_price': 2.0}) |
class EvAdventureRunestone(EvAdventureWeapon, EvAdventureConsumable):
obj_type = (ObjType.WEAPON, ObjType.MAGIC)
inventory_use_slot = WieldLocation.TWO_HANDS
quality = AttributeProperty(3)
attack_type = AttributeProperty(Ability.INT)
defense_type = AttributeProperty(Ability.DEX)
damage_roll = AttributeProperty('1d8')
def at_post_use(self, user, *args, **kwargs):
self.uses -= 1
def refresh(self):
self.uses = 1 |
class OptionPlotoptionsPyramid3dSonificationTracksMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class IncorrectVersionError(OneCommandRunnerBaseException, ValueError):
def make_error(cls, instance_id: str, expected_tier: PCSTier, actual_tier: PCSTier) -> 'IncorrectVersionError':
return cls(msg=f'Expected version for instance {instance_id} is {expected_tier.value} but the computation was attempted with {actual_tier.value}.', cause='The binary_version parameter in your config.yml is incorrect', remediation=f'If using run_fbpcs.sh, the script will auto retry. If you see this message but the computation continued anyway, you can ignore it. If the computation did not auto retry, you can manually pass -- --version={expected_tier.value} to end of the command. If you are not using run_fbpcs.sh, you should update the binary_version field in your config.yml to be binary_version: {expected_tier.value}', exit_code=cls._determine_exit_code(expected_tier))
def _determine_exit_code(cls, expected_tier: PCSTier) -> OneCommandRunnerExitCode:
if (expected_tier is PCSTier.RC):
return OneCommandRunnerExitCode.INCORRECT_TIER_EXPECTED_RC
elif (expected_tier is PCSTier.CANARY):
return OneCommandRunnerExitCode.INCORRECT_TIER_EXPECTED_CANARY
elif (expected_tier is PCSTier.PROD):
return OneCommandRunnerExitCode.INCORRECT_TIER_EXPECTED_LATEST
else:
return OneCommandRunnerExitCode.INCORRECT_TIER |
class OptionSeriesPolygonSonificationTracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
class RijndaelDecryptor():
def __init__(self, key, keybits=256):
assert (len(key) == KEYLENGTH(keybits))
(self.rk, self.nrounds) = rijndaelSetupDecrypt(key, keybits)
assert (len(self.rk) == RKLENGTH(keybits))
assert (self.nrounds == NROUNDS(keybits))
return
def decrypt(self, ciphertext):
assert (len(ciphertext) == 16)
return rijndaelDecrypt(self.rk, self.nrounds, ciphertext) |
def substitute_args(param: Union[(str, Dict)], context: Optional[Dict]=None, only: Optional[List[str]]=None):
if isinstance(param, str):
param = resolve_args(param, context, only=only)
return param
if isinstance(param, dict):
for key in param:
if isinstance(param[key], (dict, list)):
substitute_args(param[key], context, only=only)
elif isinstance(param[key], str):
param[key] = resolve_args(param[key], context, only=only)
elif isinstance(param, list):
for (idx, i) in enumerate(param):
if isinstance(i, (dict, list)):
substitute_args(i, context, only=only)
elif isinstance(i, str):
param[idx] = resolve_args(i, context, only=only)
return param |
class RxErrors(base_tests.SimpleDataPlane):
def runTest(self):
logging.info('Running Rx_Errors test')
of_ports = config['port_map'].keys()
of_ports.sort()
self.assertTrue((len(of_ports) > 1), 'Not enough ports for test')
delete_all_flows(self.controller)
logging.info('Send Port_Stats Request')
logging.info('Verify reply has rx_errors count ')
counter = get_portstats(self, of_ports[0])
rx_err = counter[6]
logging.info(('Recieve Errors count is :' + str(rx_err))) |
def extractFlorilegeHomeBlog(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [("i'll be the warrior's mother", "i'll be the warrior's mother", 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesColumnrangeMarkerStatesSelect(Options):
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get('#cccccc')
def fillColor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#000000')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(2)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(None)
def radius(self, num: float):
self._config(num, js_type=False) |
class OptionSeriesFunnelDataAccessibility(Options):
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
class TestDefaultHandler(ConfirmationAW3TestCase):
path_to_skill = Path(ROOT_DIR, 'packages', 'fetchai', 'skills', 'confirmation_aw3')
def setup(cls):
super().setup()
cls.default_handler = cast(DefaultHandler, cls._skill.skill_context.handlers.default_handler)
cls.logger = cls._skill.skill_context.logger
cls.strategy = cast(Strategy, cls._skill.skill_context.strategy)
cls.default_dialogues = cast(DefaultDialogues, cls._skill.skill_context.default_dialogues)
cls.list_of_default_messages = (DialogueMessage(DefaultMessage.Performative.BYTES, {'content': b'some_content'}),)
cls.confirmed_aea = b'ConfirmedAEA'
cls.developer_handle = b'DeveloperHandle'
def test_setup(self):
assert (self.default_handler.setup() is None)
self.assert_quantity_in_outbox(0)
def test_handle_unidentified_dialogue(self):
incorrect_dialogue_reference = ('', '')
incoming_message = self.build_incoming_message(message_type=DefaultMessage, dialogue_reference=incorrect_dialogue_reference, performative=DefaultMessage.Performative.BYTES, content=b'some_content')
with patch.object(self.logger, 'log') as mock_logger:
self.default_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.INFO, f'received invalid default message={incoming_message}, unidentified dialogue.')
def test_handle_bytes_i(self):
incoming_message = cast(DefaultMessage, self.build_incoming_message(message_type=DefaultMessage, performative=DefaultMessage.Performative.BYTES, content=((self.confirmed_aea + b'_') + self.developer_handle), sender=self.aw1_aea))
with patch.object(LedgerApis, 'is_valid_address', return_value=True):
with patch.object(self.logger, 'log') as mock_logger:
with patch.object(self.strategy, 'register_counterparty') as mock_register:
self.default_handler.handle(incoming_message)
mock_register.called_once()
mock_logger.assert_any_call(logging.INFO, f"adding confirmed_aea={self.confirmed_aea.decode('utf-8')} with developer_handle={self.developer_handle.decode('utf-8')} to db.")
def test_handle_bytes_ii(self):
incorrect_content = 'some_incorrect_content'
incoming_message = cast(DefaultMessage, self.build_incoming_message(message_type=DefaultMessage, performative=DefaultMessage.Performative.BYTES, content=incorrect_content, sender=self.aw1_aea))
with patch.object(LedgerApis, 'is_valid_address', return_value=True):
with patch.object(self.logger, 'log') as mock_logger:
with patch.object(self.strategy, 'register_counterparty') as mock_register:
self.default_handler.handle(incoming_message)
mock_register.called_once()
mock_logger.assert_any_call(logging.WARNING, 'received invalid developer_handle=.')
def test_handle_bytes_iii(self):
incoming_message = cast(DefaultMessage, self.build_incoming_message(message_type=DefaultMessage, performative=DefaultMessage.Performative.BYTES, content=((self.confirmed_aea + b'_') + self.developer_handle), sender=self.aw1_aea))
with patch.object(LedgerApis, 'is_valid_address', return_value=False):
with patch.object(self.logger, 'log') as mock_logger:
with patch.object(self.strategy, 'register_counterparty') as mock_register:
self.default_handler.handle(incoming_message)
mock_register.called_once()
default_dialogue = cast(DefaultDialogue, self.default_dialogues.get_dialogue(incoming_message))
mock_logger.assert_any_call(logging.WARNING, f"received invalid address={self.confirmed_aea.decode('utf-8')} in dialogue={default_dialogue}.")
def test_handle_bytes_iv(self):
incoming_message = cast(DefaultMessage, self.build_incoming_message(message_type=DefaultMessage, performative=DefaultMessage.Performative.BYTES, content=((self.confirmed_aea + b'_') + self.developer_handle), sender='some_other_aea'))
with patch.object(self.logger, 'log') as mock_logger:
with patch.object(self.strategy, 'register_counterparty') as mock_register:
self.default_handler.handle(incoming_message)
mock_register.called_once()
default_dialogue = cast(DefaultDialogue, self.default_dialogues.get_dialogue(incoming_message))
mock_logger.assert_any_call(logging.WARNING, f'cannot handle default message of performative={incoming_message.performative} in dialogue={default_dialogue}. Invalid sender={incoming_message.sender}')
def test_handle_invalid(self):
default_dialogue = cast(DefaultDialogue, self.prepare_skill_dialogue(dialogues=self.default_dialogues, messages=self.list_of_default_messages[:1]))
incoming_message = cast(DefaultMessage, self.build_incoming_message_for_skill_dialogue(dialogue=default_dialogue, performative=DefaultMessage.Performative.ERROR, error_code=DefaultMessage.ErrorCode.DECODING_ERROR, error_msg='some_error_message', error_data={'some_key': b'some_value'}))
with patch.object(self.logger, 'log') as mock_logger:
self.default_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.WARNING, f'cannot handle default message of performative={incoming_message.performative} in dialogue={default_dialogue}.')
def test_teardown(self):
assert (self.default_handler.teardown() is None)
self.assert_quantity_in_outbox(0) |
def test_get_tracking_db_errors_bad_schema_version(db_path):
session_a = get_tracking_database(db_path)
assert (_check_schema_version(session_a) is True)
schema_version = session_a.query(SchemaVersion).one()
schema_version.version = 'unknown'
session_a.add(schema_version)
session_a.commit()
del session_a
session_b = _get_session(db_path)
assert (_check_schema_version(session_b) is False)
del session_b
with pytest.raises(BadDatabaseError):
get_tracking_database(db_path) |
class OptionSeriesFunnelSonificationContexttracksMapping(Options):
def frequency(self) -> 'OptionSeriesFunnelSonificationContexttracksMappingFrequency':
return self._config_sub_data('frequency', OptionSeriesFunnelSonificationContexttracksMappingFrequency)
def gapBetweenNotes(self) -> 'OptionSeriesFunnelSonificationContexttracksMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionSeriesFunnelSonificationContexttracksMappingGapbetweennotes)
def highpass(self) -> 'OptionSeriesFunnelSonificationContexttracksMappingHighpass':
return self._config_sub_data('highpass', OptionSeriesFunnelSonificationContexttracksMappingHighpass)
def lowpass(self) -> 'OptionSeriesFunnelSonificationContexttracksMappingLowpass':
return self._config_sub_data('lowpass', OptionSeriesFunnelSonificationContexttracksMappingLowpass)
def noteDuration(self) -> 'OptionSeriesFunnelSonificationContexttracksMappingNoteduration':
return self._config_sub_data('noteDuration', OptionSeriesFunnelSonificationContexttracksMappingNoteduration)
def pan(self) -> 'OptionSeriesFunnelSonificationContexttracksMappingPan':
return self._config_sub_data('pan', OptionSeriesFunnelSonificationContexttracksMappingPan)
def pitch(self) -> 'OptionSeriesFunnelSonificationContexttracksMappingPitch':
return self._config_sub_data('pitch', OptionSeriesFunnelSonificationContexttracksMappingPitch)
def playDelay(self) -> 'OptionSeriesFunnelSonificationContexttracksMappingPlaydelay':
return self._config_sub_data('playDelay', OptionSeriesFunnelSonificationContexttracksMappingPlaydelay)
def rate(self) -> 'OptionSeriesFunnelSonificationContexttracksMappingRate':
return self._config_sub_data('rate', OptionSeriesFunnelSonificationContexttracksMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionSeriesFunnelSonificationContexttracksMappingTime':
return self._config_sub_data('time', OptionSeriesFunnelSonificationContexttracksMappingTime)
def tremolo(self) -> 'OptionSeriesFunnelSonificationContexttracksMappingTremolo':
return self._config_sub_data('tremolo', OptionSeriesFunnelSonificationContexttracksMappingTremolo)
def volume(self) -> 'OptionSeriesFunnelSonificationContexttracksMappingVolume':
return self._config_sub_data('volume', OptionSeriesFunnelSonificationContexttracksMappingVolume) |
def parse_eml(internal_msg, wsl):
decode_subj = email.header.decode_header(internal_msg['Subject'])
decoded_elements_subj = []
for decode_elem in decode_subj:
if (decode_elem[1] is not None):
if (str(decode_elem[1]) == 'unknown-8bit'):
decoded_elements_subj.append(decode_elem[0].decode())
else:
decoded_elements_subj.append(decode_elem[0].decode(decode_elem[1]))
elif isinstance(decode_elem[0], str):
decoded_elements_subj.append(str(decode_elem[0]))
else:
decoded_elements_subj.append(decode_elem[0].decode())
subject_field = ''.join(decoded_elements_subj)
log.info('Analyzing attached message with subject: {}'.format(subject_field))
wsl.emit_info('Analyzing attached message with subject: {}'.format(subject_field))
attachments = []
hashes_attachments = []
observables_body = []
observables_header = {}
header_fields_list = ['To', 'From', 'Sender', 'Cc', 'Delivered-To', 'Return-Path', 'Reply-To', 'Bounces-to', 'Received', 'X-Received', 'X-OriginatorOrg', 'X-Sender-IP', 'X-Originating-IP', 'X-SenderIP', 'X-Originating-Email']
parser = email.parser.HeaderParser()
header_fields = parser.parsestr(internal_msg.as_string())
i = 0
while (i < len(header_fields.keys())):
if (header_fields.keys()[i] in header_fields_list):
if (not observables_header.get(header_fields.keys()[i])):
observables_header[header_fields.keys()[i]] = []
observables_header[header_fields.keys()[i]].extend(search_observables(header_fields.values()[i], wsl))
i += 1
for part in internal_msg.walk():
mimetype = part.get_content_type()
content_disposition = part.get_content_disposition()
if (content_disposition != 'attachment'):
if (mimetype == 'text/plain'):
try:
body = part.get_payload(decode=True).decode()
except UnicodeDecodeError:
body = part.get_payload(decode=True).decode('ISO-8859-1')
observables_body.extend(search_observables(body, wsl))
elif (mimetype == 'text/html'):
try:
html = part.get_payload(decode=True).decode()
except UnicodeDecodeError:
html = part.get_payload(decode=True).decode('ISO-8859-1')
html_urldecoded = urllib.parse.unquote(html.replace('&', '&'))
observables_body.extend(search_observables(html_urldecoded, wsl))
else:
filename = part.get_filename()
if (filename and mimetype):
if (is_whitelisted('filename', filename) or is_whitelisted('filetype', mimetype)):
log.info('Skipped whitelisted observable file: {0}'.format(filename))
wsl.emit_info('Skipped whitelisted observable file: {0}'.format(filename))
else:
inmem_file = io.BytesIO(part.get_payload(decode=1))
attachments.append((inmem_file, filename))
log.info('Found observable file: {0}'.format(filename))
wsl.emit_info('Found observable file: {0}'.format(filename))
sha256 = hashlib.sha256()
sha256.update(part.get_payload(decode=1))
hash_attachment = {}
hash_attachment['hashValue'] = sha256.hexdigest()
hash_attachment['hashedAttachment'] = filename
if is_whitelisted('hash', hash_attachment['hashValue']):
log.info('Skipped whitelisted observable hash: {0}'.format(hash_attachment['hashValue']))
wsl.emit_info('Skipped whitelisted observable hash: {0}'.format(hash_attachment['hashValue']))
else:
hashes_attachments.append(hash_attachment)
log.info('Found observable hash {0} calculated from file: {1}'.format(hash_attachment['hashValue'], filename))
wsl.emit_info('Found observable hash {0} calculated from file: {1}'.format(hash_attachment['hashValue'], filename))
filename = (subject_field + '.eml')
inmem_file = io.BytesIO()
gen = email.generator.BytesGenerator(inmem_file)
gen.flatten(internal_msg)
eml_file_tuple = (inmem_file, filename)
for observable_body in observables_body:
if (observable_body['type'] == 'url'):
observable_body['value'] = observable_body['value'].replace('>', '<').split('<')[0]
return (subject_field, observables_header, observables_body, attachments, hashes_attachments, eml_file_tuple) |
class TestDialogues():
def setup_class(cls):
cls.agent_addr = 'agent address'
cls.server_addr = 'server address'
cls.agent_dialogues = AgentDialogues(cls.agent_addr)
cls.server_dialogues = ServerDialogues(cls.server_addr)
def test_create_self_initiated(self):
result = self.agent_dialogues._create_self_initiated(dialogue_opponent_addr=self.server_addr, dialogue_reference=(str(0), ''), role=PrometheusDialogue.Role.AGENT)
assert isinstance(result, PrometheusDialogue)
assert (result.role == PrometheusDialogue.Role.AGENT), 'The role must be Agent.'
def test_create_opponent_initiated(self):
result = self.agent_dialogues._create_opponent_initiated(dialogue_opponent_addr=self.server_addr, dialogue_reference=(str(0), ''), role=PrometheusDialogue.Role.AGENT)
assert isinstance(result, PrometheusDialogue)
assert (result.role == PrometheusDialogue.Role.AGENT), 'The role must be Agent.' |
_app.route('/')
_app.route('/checkversion/<version>/')
def index_page(version=None):
show_old_version_notice = False
if (config.CONFIG.check_version and (version is not None)):
show_old_version_notice = utils.is_old_version(version)
games = datamodel.get_available_games_dict().values()
return render_template('game_index.html', games=games, show_old_version_notice=show_old_version_notice, version=utils.VERSION, update_version_id=version) |
class KrxHistoricalDailyPriceData(DataBase):
params = (('loader', None), ('symbol', None), ('name', None), ('fromdate', None), ('todate', None), ('compression', 1), ('timeframe', TimeFrame.Days), ('calendar', None), ('lazy', False))
lines = ('amount', 'marketcap', 'shares')
def __init__(self):
super().__init__()
assert self.p.loader
assert self.p.symbol
assert (self.p.timeframe == TimeFrame.Days)
assert (self.p.compression == 1)
self.p.name = (self.p.name or self.p.symbol or '')
self._cursor = None
self._started_already = False
if (not self.p.lazy):
self.start()
def _close_cursor(self):
if (self._cursor is not None):
self._cursor.close()
self._cursor = None
def _initialize_cursor(self):
self._close_cursor()
self._cursor = self.p.loader.load_as_cursor(self.p.symbol, start_time=self.p.fromdate, end_time=self.p.todate)
def start(self):
if (not self._started_already):
self._initialize_cursor()
self._started_already = True
def stop(self):
self._close_cursor()
self._started_already = False
def _load(self):
if (self._cursor is None):
return False
try:
(date, open_, high, low, close, volume, amount, marcap, shares) = next(self._cursor)
except StopIteration:
return False
else:
dt = pd.Timestamp(date)
self.lines.datetime[0] = date2num(dt)
self.lines.open[0] = open_
self.lines.high[0] = high
self.lines.low[0] = low
self.lines.close[0] = close
self.lines.volume[0] = volume
self.lines.openinterest[0] = 0.0
self.lines.amount[0] = amount
self.lines.marketcap[0] = marcap
self.lines.shares[0] = shares
return True
def adddata_fromfile(cls, cerebro, filename, symbols=None, fromdate=None, todate=None, progress_bar=True):
loader = KrxHistoricalDailyPriceDataForBacktestLoader(filename)
if (symbols is None):
symbols = loader.get_symbols()
progress = tqdm(symbols, disable=(not progress_bar))
for symbol in progress:
progress.set_description(('Adding Symbol [%s]' % symbol))
data = cls(loader=loader, symbol=symbol, fromdate=fromdate, todate=todate, name=symbol)
cerebro.adddata(data, name=data.p.name) |
def connect_with_timeout(resolver, port: int, timeout: float):
delay = 0.5
elapsed = 0.0
while True:
try:
connect_result = connect(resolver, port)
LOGGER.debug('connected to port %s', port)
return connect_result
except NoConnectException as e:
if (elapsed >= timeout):
LOGGER.debug('connection failed, timeout exceeded (%.1f >= %s)', elapsed, timeout)
raise e
LOGGER.debug('connection failed, try again in %.1f (%.1f)', delay, elapsed)
sleep(delay)
elapsed += delay |
class Query(commons.BaseRequest):
def __init__(self, session_id, instance_url, query_string, **kwargs):
super(Query, self).__init__(session_id, instance_url, **kwargs)
qry = urlencode({'q': query_string.encode('utf-8')})
self.service = (QUERY_SERVICE % (self.api_version, qry)) |
class OptionSeriesSolidgaugeSonificationTracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class SetContext(ContextModule):
def __init__(self, context: str, key: str, callback: (Callable[([Any, Any], Any)] | None)=None) -> None:
super().__init__()
self.context = context
self.key = key
self.callback = callback
def __call__(self, x: Tensor) -> Tensor:
if (context := self.use_context(self.context)):
if (not self.callback):
context.update({self.key: x})
else:
self.callback(context[self.key], x)
return x
def __repr__(self):
return f'{self.__class__.__name__}(context={repr(self.context)}, key={repr(self.key)})' |
class TaskMetaData():
def __init__(self, task_name, task_id):
self.task_name = task_name
self.task_id = task_id
self.outputs_list = []
self.number_of_outputs = 0
def add_output_meta_data(self, output_meta_data):
self.outputs_list.append(output_meta_data)
self.number_of_outputs += 1
def to_dict(self):
meta_data = {'task_name': self.task_name, 'task_id': self.task_id}
outputs_dict = []
for output in self.outputs_list:
outputs_dict.append(output.to_dict())
if (len(outputs_dict) > 0):
meta_data['outputs'] = outputs_dict
return meta_data
def to_json(self):
return json.dumps(str(self.to_dict())) |
class OptionPlotoptionsColumnrangeSonificationTracksMapping(Options):
def frequency(self) -> 'OptionPlotoptionsColumnrangeSonificationTracksMappingFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsColumnrangeSonificationTracksMappingFrequency)
def gapBetweenNotes(self) -> 'OptionPlotoptionsColumnrangeSonificationTracksMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionPlotoptionsColumnrangeSonificationTracksMappingGapbetweennotes)
def highpass(self) -> 'OptionPlotoptionsColumnrangeSonificationTracksMappingHighpass':
return self._config_sub_data('highpass', OptionPlotoptionsColumnrangeSonificationTracksMappingHighpass)
def lowpass(self) -> 'OptionPlotoptionsColumnrangeSonificationTracksMappingLowpass':
return self._config_sub_data('lowpass', OptionPlotoptionsColumnrangeSonificationTracksMappingLowpass)
def noteDuration(self) -> 'OptionPlotoptionsColumnrangeSonificationTracksMappingNoteduration':
return self._config_sub_data('noteDuration', OptionPlotoptionsColumnrangeSonificationTracksMappingNoteduration)
def pan(self) -> 'OptionPlotoptionsColumnrangeSonificationTracksMappingPan':
return self._config_sub_data('pan', OptionPlotoptionsColumnrangeSonificationTracksMappingPan)
def pitch(self) -> 'OptionPlotoptionsColumnrangeSonificationTracksMappingPitch':
return self._config_sub_data('pitch', OptionPlotoptionsColumnrangeSonificationTracksMappingPitch)
def playDelay(self) -> 'OptionPlotoptionsColumnrangeSonificationTracksMappingPlaydelay':
return self._config_sub_data('playDelay', OptionPlotoptionsColumnrangeSonificationTracksMappingPlaydelay)
def rate(self) -> 'OptionPlotoptionsColumnrangeSonificationTracksMappingRate':
return self._config_sub_data('rate', OptionPlotoptionsColumnrangeSonificationTracksMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionPlotoptionsColumnrangeSonificationTracksMappingTime':
return self._config_sub_data('time', OptionPlotoptionsColumnrangeSonificationTracksMappingTime)
def tremolo(self) -> 'OptionPlotoptionsColumnrangeSonificationTracksMappingTremolo':
return self._config_sub_data('tremolo', OptionPlotoptionsColumnrangeSonificationTracksMappingTremolo)
def volume(self) -> 'OptionPlotoptionsColumnrangeSonificationTracksMappingVolume':
return self._config_sub_data('volume', OptionPlotoptionsColumnrangeSonificationTracksMappingVolume) |
def generate_input_json(contract_sources: Dict[(str, str)], optimize: bool=True, runs: int=200, evm_version: Optional[str]=None, language: str='Solidity', interface_sources: Optional[Dict[(str, str)]]=None, remappings: Optional[list]=None, optimizer: Optional[Dict]=None, viaIR: Optional[bool]=None) -> Dict:
if (language not in ('Solidity', 'Vyper')):
raise UnsupportedLanguage(f'{language}')
if (optimizer is None):
optimizer = {'enabled': optimize, 'runs': (runs if optimize else 0)}
if (evm_version is None):
_module = (solidity if (language == 'Solidity') else vyper)
evm_version = next((i[0] for i in _module.EVM_VERSION_MAPPING if (_module.get_version() >= i[1])))
input_json: Dict = deepcopy(STANDARD_JSON)
input_json['language'] = language
input_json['settings']['evmVersion'] = evm_version
if (language == 'Solidity'):
input_json['settings']['optimizer'] = optimizer
input_json['settings']['remappings'] = _get_solc_remappings(remappings)
if (viaIR is not None):
input_json['settings']['viaIR'] = viaIR
input_json['sources'] = _sources_dict(contract_sources, language)
if interface_sources:
if (language == 'Solidity'):
input_json['sources'].update(_sources_dict(interface_sources, language))
else:
input_json['interfaces'] = _sources_dict(interface_sources, language)
return input_json |
def test_access():
c = Config('testconfig', foo=(1, int, ''), BAR=(1, int, ''))
assert (len(c) == 2)
c.foo = 3
c.BAR = 4
assert (c['foo'] == 3)
assert (c['BAR'] == 4)
c['foO'] = 30
c['BAr'] = 40
assert (c['FOO'] == 30)
assert (c['bar'] == 40)
with raises(AttributeError):
c.FOO
with raises(AttributeError):
c.bar
with raises(TypeError):
c[3]
with raises(IndexError):
c['optiondoesnotexist']
with raises(TypeError):
c[3] = ''
with raises(IndexError):
c['optiondoesnotexist'] = '' |
class Command(BaseCommand):
def handle(self, *args, **kwargs):
keys = ['analgesics_cost', 'antidepressants_adq', 'antidepressants_cost', 'antiepileptic_drugs_cost', 'antiplatelet_drugs_cost', 'benzodiazepine_caps_and_tabs_cost', 'bisphosphonates_and_other_drugs_cost', 'bronchodilators_cost', 'calcium-channel_blockers_cost', 'cox-2_inhibitors_cost', 'drugs_acting_on_benzodiazepine_receptors_cost', 'drugs_affecting_the_renin_angiotensin_system_cost', 'drugs_for_dementia_cost', 'drugs_used_in_parkinsonism_and_related_disorders_cost', 'hypnotics_adq', 'inhaled_corticosteroids_cost', 'laxatives_cost', 'lipid-regulating_drugs_cost', 'omega-3_fatty_acid_compounds_adq', 'oral_antibacterials_cost', 'oral_antibacterials_item', 'oral_nsaids_cost', 'proton_pump_inhibitors_cost', 'statins_cost', 'ulcer_healing_drugs_cost']
sql = '\n-- This SQL is generated by `generate_ccg_statistics_sql.py`\n\nCREATE TEMPORARY FUNCTION\n jsonify_starpu({% for safe_key in safe_keys %}\n {{ safe_key }} FLOAT64{% if not forloop.last %},{% endif %}{% endfor %}\n )\n RETURNS STRING\n LANGUAGE js AS \'\'\'\n var obj = {};{% for key, safe_key in zipped_keys %}\n obj[\'{{ key }}\'] = {{ safe_key }};{% endfor %}\n return JSON.stringify(obj);\n \'\'\';\n\nSELECT\n month AS date,\n practices.ccg_id AS pct_id,\n ccgs.name AS name,\n SUM(total_list_size) AS total_list_size,\n SUM(astro_pu_items) AS astro_pu_items,\n SUM(astro_pu_cost) AS astro_pu_cost,\n jsonify_starpu({% for key in keys %}\n SUM(CAST(JSON_EXTRACT_SCALAR(star_pu, \'$.{{ key }}\') AS FLOAT64)){% if not forloop.last %},{% endif %}{% endfor %}\n ) AS star_pu\nFROM {hscic}.practice_statistics\nINNER JOIN {hscic}.practices\n ON practice_statistics.practice = practices.code\nINNER JOIN {hscic}.ccgs ccgs\n ON practices.ccg_id = ccgs.code AND ccgs.org_type = \'CCG\'\nWHERE month > TIMESTAMP(DATE_SUB(DATE "{this_month}", INTERVAL 5 YEAR))\nGROUP BY\n month,\n practices.ccg_id,\n name\n'.strip()
template = Template(sql)
safe_keys = [key.replace('-', '_') for key in keys]
zipped_keys = list(zip(keys, safe_keys))
ctx = Context({'keys': keys, 'safe_keys': safe_keys, 'zipped_keys': zipped_keys}, autoescape=False)
print(template.render(ctx)) |
class RayJob(_common.FlyteIdlEntity):
def __init__(self, ray_cluster: RayCluster, runtime_env: typing.Optional[str]):
self._ray_cluster = ray_cluster
self._runtime_env = runtime_env
def ray_cluster(self) -> RayCluster:
return self._ray_cluster
def runtime_env(self) -> typing.Optional[str]:
return self._runtime_env
def to_flyte_idl(self) -> _ray_pb2.RayJob:
return _ray_pb2.RayJob(ray_cluster=self.ray_cluster.to_flyte_idl(), runtime_env=self.runtime_env)
def from_flyte_idl(cls, proto: _ray_pb2.RayJob):
return cls(ray_cluster=(RayCluster.from_flyte_idl(proto.ray_cluster) if proto.ray_cluster else None), runtime_env=proto.runtime_env) |
class GEMMRcrFastGeluTestCase(unittest.TestCase):
def setUpClass(cls) -> None:
torch.manual_seed(10)
def _test_gemm_rcr_fast_gelu(self, Ms, test_name, K=1024, N=64, dtype='float16'):
MDim = shape_utils.gen_int_var_min_max(Ms, name='m')
X = Tensor(shape=[MDim, IntImm(K)], dtype=dtype, name='input_0', is_input=True)
W = Tensor(shape=[IntImm(N), IntImm(K)], dtype=dtype, name='input_1', is_input=True)
OP = ops.gemm_rcr_fast_gelu()
Y = OP(X, W)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
module = compile_model(Y, detect_target(), './tmp', f'gemm_rcr_fast_gelu_{test_name}')
for M in Ms:
logging.info(f'Testing M={M!r}')
X_pt = get_random_torch_tensor([M, K], dtype)
W_pt = get_random_torch_tensor([N, K], dtype)
Y_pt = NewGELUActivation()(torch.nn.functional.linear(X_pt, W_pt))
y = get_torch_empty_tensor([M, N], dtype)
module.run_with_tensors({'input_0': X_pt, 'input_1': W_pt}, [y])
torch.testing.assert_close(Y_pt, y, **_TOLERANCE_LIMITS[dtype])
def test_gemm_rcr_fast_gelu_fp16(self):
self._test_gemm_rcr_fast_gelu(Ms=[128], test_name='static_fp16', dtype='float16')
self._test_gemm_rcr_fast_gelu(Ms=[1, 7, 64, 127], test_name='dynamic_m_fp16', dtype='float16')
def test_gemm_rcr_fast_gelu_fp16_rocm(self):
self._test_gemm_rcr_fast_gelu(Ms=[128], test_name='static_fp16_rocm', dtype='float16')
def test_gemm_rcr_fast_gelu_fp32_sm80(self):
self._test_gemm_rcr_fast_gelu(Ms=[128], test_name='static_fp32', dtype='float32')
self._test_gemm_rcr_fast_gelu(Ms=[1, 7, 64, 127], test_name='dynamic_m_fp32', dtype='float32')
def test_gemm_rcr_fast_gelu_bf16(self):
self._test_gemm_rcr_fast_gelu(Ms=[128], test_name='static_bf16', dtype='bfloat16')
self._test_gemm_rcr_fast_gelu(Ms=[1, 7, 64, 127], test_name='dynamic_m_bf16', dtype='bfloat16')
def test_gemm_rcr_fast_gelu_sm90(self):
with env_variables(AIT_FORCE_CUTLASS_SM90_KERNELS='1', INSIDE_RE_WORKER='1'):
with self.assertRaisesRegex(expected_exception=RuntimeError, expected_regex='No GEMM op instances are left after filtering'):
self._test_gemm_rcr_fast_gelu(Ms=[1, 7, 64, 127], K=1020, test_name='wrong_input_alignment_sm90', dtype='float16')
with self.assertRaisesRegex(expected_exception=RuntimeError, expected_regex='No GEMM op instances are left after filtering'):
self._test_gemm_rcr_fast_gelu(Ms=[1, 7, 64, 127], N=63, test_name='wrong_output_alignment_sm90', dtype='float16')
self._test_gemm_rcr_fast_gelu(Ms=[1, 7, 64, 127], test_name='dynamic_m_fp16_force_sm90', dtype='float16')
self._test_gemm_rcr_fast_gelu(Ms=[1, 7, 64, 127], test_name='dynamic_m_bf16_force_sm90', dtype='bfloat16') |
def test_sync_checkpoint_save_filepath(tmpdir):
src_path = Path(os.path.join(tmpdir, 'src'))
src_path.mkdir(parents=True, exist_ok=True)
chkpnt_path = Path(os.path.join(tmpdir, 'dest'))
chkpnt_path.mkdir()
cp = SyncCheckpoint(checkpoint_dest=str(chkpnt_path))
dst_path = chkpnt_path.joinpath('test')
assert (not dst_path.exists())
inp = src_path.joinpath('test')
with inp.open('wb') as f:
f.write(b'blah')
cp.save(inp)
assert dst_path.exists() |
class LayoutPlot(TreeLayout):
def __init__(self, name=None, width=200, size_prop=None, color_prop=None, position='aligned', column=0, color_gradient=None, color='red', colors=None, padding_x=10, scale=True, legend=True, active=True):
super().__init__(name, aligned_faces=(True if (position == 'aligned') else False), legend=legend, active=active)
self.width = width
self.position = position
self.column = column
self.scale = scale
self.padding_x = padding_x
self.size_prop = size_prop
self.color_prop = color_prop
self.size_range = None
self.color_range = None
self.color = color
self.colors = colors
self.color_gradient = color_gradient
if (self.color_prop and (not self.color_gradient)):
self.color_gradient = ('#FFF', self.color)
def set_tree_style(self, tree, tree_style):
super().set_tree_style(tree, tree_style)
def update_vals(metric, node):
(p, minval, maxval, uniqvals) = vals[metric]
prop = node.props.get(p)
try:
prop = float(prop)
vals[metric][1] = min(minval, prop)
vals[metric][2] = max(maxval, prop)
uniqvals.add(prop)
except:
return
vals = {'size': [self.size_prop, 0, 0, set()], 'color': [self.color_prop, 0, 0, set()]}
for node in tree.traverse():
if self.size_prop:
update_vals('size', node)
if self.color_prop:
update_vals('color', node)
if self.size_prop:
self.size_range = vals['size'][1:3]
if self.color_prop:
unique = vals['color'][3]
if len(unique):
colors = (self.colors or random_color(num=len(unique)))
if (type(colors) == dict):
self.colors = colors.copy()
else:
colors = list(colors)
self.colors = {}
for (idx, value) in enumerate(unique):
self.colors[value] = colors[(idx % len(colors))]
if self.legend:
tree_style.add_legend(title=self.name, variable='discrete', colormap=self.colors)
else:
self.color_range = vals['color'][1:3]
if self.legend:
tree_style.add_legend(title=self.name, variable='continuous', value_range=self.color_range, color_range=self.color_gradient)
def get_size(self, node):
if (not self.size_prop):
return self.width
(minval, maxval) = self.size_range
return ((float(node.props.get(self.size_prop, 0)) / float(maxval)) * self.width)
def get_color(self, node):
if (not self.color_prop):
return self.color
prop = node.props.get(self.color_prop)
if (prop is None):
return None
if self.color_range:
(minval, maxval) = self.color_range
mix = ((prop - minval) / (maxval - minval))
return interpolate_colors(*self.color_gradient, mix)
else:
return self.colors.get(prop)
def get_legend(self):
return self.legend |
def __get_size_linux__():
def ioctl_get_window_size(file_descriptor):
try:
import fcntl, termios, struct
size = struct.unpack('hh', fcntl.ioctl(file_descriptor, termios.TIOCGWINSZ, '1234'))
except:
return DEFAULT_TERMINAL_SIZE
return size
size = (ioctl_get_window_size(0) or ioctl_get_window_size(1) or ioctl_get_window_size(2))
if (not size):
try:
file_descriptor = os.open(os.ctermid(), os.O_RDONLY)
size = ioctl_get_window_size(file_descriptor)
os.close(file_descriptor)
except:
pass
if (not size):
try:
size = (os.environ['LINES'], os.environ['COLUMNS'])
except:
return DEFAULT_TERMINAL_SIZE
return (int(size[1]), int(size[0])) |
def firewalld_is_active():
if (not os.path.exists(FIREWALLD_PIDFILE)):
return False
try:
with open(FIREWALLD_PIDFILE, 'r') as fd:
pid = fd.readline()
except Exception:
return False
if (not os.path.exists(('/proc/%s' % pid))):
return False
try:
with open(('/proc/%s/cmdline' % pid), 'r') as fd:
cmdline = fd.readline()
except Exception:
return False
if ('firewalld' in cmdline):
return True
return False |
def test_pick():
assert (pick({'a': 1, 'b': 2}, ['a', 'b', 'b', 'b']) == {'a': 1, 'b': 2})
assert (pick({'a': 1, 'b': 2}, ['a', 'b']) == {'a': 1, 'b': 2})
assert (pick({'a': 1, 'b': 2}, ['a']) == {'a': 1})
assert (pick({'a': 1, 'b': 2}, ['b']) == {'b': 2})
assert (pick({'a': 1, 'b': 2}, []) == {}) |
def read_messages(input_bag: str):
reader = rosbag2_py.SequentialReader()
reader.open(rosbag2_py.StorageOptions(uri=input_bag, storage_id='mcap'), rosbag2_py.ConverterOptions(input_serialization_format='cdr', output_serialization_format='cdr'))
topic_types = reader.get_all_topics_and_types()
def typename(topic_name):
for topic_type in topic_types:
if (topic_type.name == topic_name):
return topic_type.type
raise ValueError(f'topic {topic_name} not in bag')
while reader.has_next():
(topic, data, timestamp) = reader.read_next()
msg_type = get_message(typename(topic))
msg = deserialize_message(data, msg_type)
(yield (topic, msg, timestamp))
del reader |
def analyze_packets(signal_received, frame):
print()
collected_data = processor.data
if config.no_analysis:
[print(entry) for entry in set(collected_data)]
exit(0)
if (config.verbose or config.verbose_extra):
print('Analyzing packets')
if (not collected_data):
print('No data to show :-)')
else:
columns = ['data']
df = pd.DataFrame(data=list(collected_data), columns=columns)
table = df['data'].value_counts()
labels = list(table.index)
counts = [int(c) for c in table.to_numpy().tolist()]
fig = tpl.figure()
fig.barh(counts, labels, force_ascii=True)
fig.show()
if (config.show_missed and (len(missed) > 0)):
print()
print('Packets not analyzed: ')
[print(miss) for miss in missed]
elif (len(missed) > 0):
miss_count = len(missed)
print(f'Not showing {miss_count} unknown packets. Run with -m')
exit(0) |
def plot_tradeoff(label, score, title):
(precision, recall, thresholds) = precision_recall_curve(label, score)
plt.figure(figsize=(5, 5))
plt.grid()
plt.step(recall, precision, color='b', label='Precision-Recall Trade-off')
plt.fill_between(recall, precision, alpha=0.1, color='b')
plt.xlabel('Recall')
plt.ylabel('Precision')
_ = plt.title(title) |
def dispatch_to_response_pure(*, deserializer: Callable[([str], Deserialized)], validator: Callable[([Deserialized], Deserialized)], methods: Methods, context: Any, post_process: Callable[([Response], Iterable[Any])], request: str) -> Union[(Response, List[Response], None)]:
try:
result = deserialize_request(deserializer, request).bind(partial(validate_request, validator))
return (post_process(result) if isinstance(result, Left) else dispatch_deserialized(methods, context, post_process, result._value))
except Exception as exc:
logger.exception(exc)
return post_process(Left(ServerErrorResponse(str(exc), None))) |
def generateActF4(iterationsMap, iteration, t):
msg = generateGenericMessage('EiffelActivityFinishedEvent', t, '1.0.0', 'ActF4', iteration)
link(msg, iterationsMap[iteration]['ActT4'], 'ACTIVITY_EXECUTION')
if ('ArtC2' in iterationsMap[iteration]):
msg['data']['outcome'] = {'conclusion': 'SUCCESSFUL'}
else:
msg['data']['outcome'] = {'conclusion': 'UNSUCCESSFUL'}
return msg |
class Sector():
def __init__(self, text: str, angle: Optional[float]=None) -> None:
self.start: Optional[np.ndarray] = None
self.end: Optional[np.ndarray] = None
self.main_direction: Optional[np.ndarray] = None
if (('-' in text) and (not text.startswith('-'))):
parts: list[str] = text.split('-')
self.start = parse_vector(parts[0])
self.end = parse_vector(parts[1])
self.main_direction = ((self.start + self.end) / 2.0)
else:
result_angle: float
if (angle is None):
result_angle = DEFAULT_ANGLE
else:
result_angle = max(SMALLEST_ANGLE, (np.radians(angle) / 2.0))
vector: Optional[np.ndarray] = parse_vector(text)
self.main_direction = vector
if (vector is not None):
self.start = np.dot(rotation_matrix(result_angle), vector)
self.end = np.dot(rotation_matrix((- result_angle)), vector)
def draw(self, center: np.ndarray, radius: float) -> Optional[PathCommands]:
if ((self.start is None) or (self.end is None)):
return None
start: np.ndarray = (center + (radius * self.end))
end: np.ndarray = (center + (radius * self.start))
return ['L', start, 'A', radius, radius, 0, '0', 0, end]
def is_right(self) -> Optional[bool]:
if (self.main_direction is not None):
if np.allclose(self.main_direction[0], 0.0):
return None
if (self.main_direction[0] > 0.0):
return True
return False
return None
def __str__(self) -> str:
return f'{self.start}-{self.end}' |
('/urls/', methods=['GET'])
_required
def url_view():
WebMirror.rules.load_rules(override=True)
scope = request.args.get('scope', 'missing')
ignored = request.args.get('ignore', 'exclude')
nnt.filter_get_have_urls()
query = g.session.query(db.NewNetlocTracker)
if (scope == 'missing'):
query = query.filter((db.NewNetlocTracker.have == False))
if (ignored == 'exclude'):
query = query.filter((db.NewNetlocTracker.ignore == False))
items = query.all()
g.session.commit()
segs = max([item.netloc.count('.') for item in items])
print('Max segments:', segs)
def keyf(item):
if (not item.extra):
return (False,)
is_wp = (1 if item.extra.get('is-wp', False) else 2)
nlr = item.netloc.split('.')
if (nlr[0] == 'www'):
nlr = nlr[1:]
nlr.reverse()
flag = 9001
if ('wordpress' in nlr):
flag = 1
if ('blogspot' in nlr):
flag = 2
if ('livejournal' in nlr):
flag = 3
if ('dreamwidth' in nlr):
flag = 4
if ('syosetu' in nlr):
flag = 5
if ('wixsite' in nlr):
flag = 6
if ('fandom' in nlr):
flag = 7
if ('deviantart' in nlr):
flag = 8
ret = (is_wp, flag, ((10 + segs) - len(nlr)), nlr, item.example_url)
print(ret)
return ret
items.sort(key=keyf)
return render_template('url_listings.html', netloc_items=items) |
_required
def invoice(invoice_id):
invoice = stripe.Invoice.retrieve(('in_' + invoice_id))
if (invoice.customer != current_user.stripe_id):
return (render_template('error.html', title='Unauthorized Invoice', text='Only the account owner can open this invoice'), 403)
if invoice.charge:
charge = stripe.Charge.retrieve(invoice.charge)
card_mappings = {'Visa': 'cc-visa', 'American Express': 'cc-amex', 'MasterCard': 'cc-mastercard', 'Discover': 'cc-discover', 'JCB': 'cc-jcb', 'Diners Club': 'cc-diners-club', 'Unknown': 'credit-card'}
charge.source.css_name = card_mappings[charge.source.brand]
return render_template('users/invoice.html', invoice=invoice, charge=charge)
return render_template('users/invoice.html', invoice=invoice) |
class UnsignedLongLongType(Type):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.name = UNSIGNED_LONG_LONG
self.byte_size = 8
def debug_info(self):
bs = bytearray()
bs.append(ENUM_ABBREV_CODE['BASE_TYPE_WITH_ENCODING'])
bs.append(self.byte_size)
bs.append(ENUM_DW_ATE['DW_ATE_unsigned'])
bs.extend(map(ord, self.name))
bs.append(0)
return bs |
class DeviceCodeResponse():
device_code: str
user_code: str
verification_uri: str
expires_in: int
interval: int
def from_json_response(cls, j: typing.Dict) -> 'DeviceCodeResponse':
return cls(device_code=j['device_code'], user_code=j['user_code'], verification_uri=j['verification_uri'], expires_in=j['expires_in'], interval=j['interval']) |
def check_clang_cfi(file_path, mitigations, summary, checksec_result):
if (checksec_result['clangcfi'] == 'yes'):
summary.update({'CLANGCFI enabled': file_path})
mitigations.update({'CLANGCFI': 'enabled'})
elif (checksec_result['clangcfi'] == 'no'):
summary.update({'CLANGCFI disabled': file_path})
mitigations.update({'CLANGCFI': 'disabled'}) |
class OptionSeriesScatterStates(Options):
def hover(self) -> 'OptionSeriesScatterStatesHover':
return self._config_sub_data('hover', OptionSeriesScatterStatesHover)
def inactive(self) -> 'OptionSeriesScatterStatesInactive':
return self._config_sub_data('inactive', OptionSeriesScatterStatesInactive)
def normal(self) -> 'OptionSeriesScatterStatesNormal':
return self._config_sub_data('normal', OptionSeriesScatterStatesNormal)
def select(self) -> 'OptionSeriesScatterStatesSelect':
return self._config_sub_data('select', OptionSeriesScatterStatesSelect) |
class RegisterDialogues(Model, BaseRegisterDialogues):
def __init__(self, **kwargs: Any) -> None:
Model.__init__(self, **kwargs)
def role_from_first_message(message: Message, receiver_address: Address) -> BaseDialogue.Role:
return BaseRegisterDialogue.Role.AGENT
BaseRegisterDialogues.__init__(self, self_address=self.context.agent_address, role_from_first_message=role_from_first_message) |
class IncludedWithWafFirewall(ModelSimple):
allowed_values = {}
validations = {}
additional_properties_type = None
_nullable = False
_property
def openapi_types():
lazy_import()
return {'value': ([SchemasWafFirewallVersion],)}
_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_path_to_item = kwargs.pop('_path_to_item', ())
if ('value' in kwargs):
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError("value is required, but not passed in args or kwargs and doesn't have default", path_to_item=_path_to_item, valid_classes=(self.__class__,))
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
self.value = value
if kwargs:
raise ApiTypeError(('Invalid named arguments=%s passed to %s. Remove those invalid named arguments.' % (kwargs, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if ('value' in kwargs):
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError("value is required, but not passed in args or kwargs and doesn't have default", path_to_item=_path_to_item, valid_classes=(self.__class__,))
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
self.value = value
if kwargs:
raise ApiTypeError(('Invalid named arguments=%s passed to %s. Remove those invalid named arguments.' % (kwargs, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
return self |
def extractCloudyskytranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def load(lib, IM_VERSION, IM_QUANTUM_DEPTH, IM_HDRI):
if (not isinstance(lib, CDLL)):
raise AttributeError((repr(lib) + ' is not an instanced of ctypes.CDLL'))
if (not isinstance(IM_VERSION, numbers.Integral)):
raise AttributeError('Expecting MagickCore version number')
if (IM_QUANTUM_DEPTH not in [8, 16, 32, 65]):
raise AttributeError('QUANTUM_DEPTH must be one of 8, 16, 32, or 64')
is_im_6 = (IM_VERSION < 1792)
is_im_7 = (IM_VERSION >= 1792)
if (platform.machine() in ['s390', 's390x', 'i686']):
FloatType = c_double
else:
FloatType = c_float
if (IM_QUANTUM_DEPTH == 8):
QuantumType = (FloatType if IM_HDRI else c_ubyte)
elif (IM_QUANTUM_DEPTH == 16):
QuantumType = (FloatType if IM_HDRI else c_ushort)
elif (IM_QUANTUM_DEPTH == 32):
QuantumType = (c_double if IM_HDRI else c_uint)
elif (IM_QUANTUM_DEPTH == 64):
QuantumType = c_longdouble
lib.ClearPixelWand.argtypes = [c_void_p]
lib.ClonePixelWand.argtypes = [c_void_p]
lib.ClonePixelWand.restype = c_void_p
lib.DestroyPixelWand.argtypes = [c_void_p]
lib.DestroyPixelWand.restype = c_void_p
lib.DestroyPixelWands.argtypes = [POINTER(c_void_p), c_size_t]
lib.DestroyPixelWands.restype = POINTER(c_void_p)
lib.IsPixelWand.argtypes = [c_void_p]
lib.IsPixelWandSimilar.argtypes = [c_void_p, c_void_p, c_double]
lib.NewPixelWand.argtypes = []
lib.NewPixelWand.restype = c_void_p
lib.PixelClearException.argtypes = [c_void_p]
lib.PixelClearException.restype = c_int
lib.PixelGetAlpha.argtypes = [c_void_p]
lib.PixelGetAlpha.restype = c_double
lib.PixelGetAlphaQuantum.argtypes = [c_void_p]
lib.PixelGetAlphaQuantum.restype = QuantumType
lib.PixelGetBlack.argtypes = [c_void_p]
lib.PixelGetBlack.restype = c_double
lib.PixelGetBlackQuantum.argtypes = [c_void_p]
lib.PixelGetBlackQuantum.restype = QuantumType
lib.PixelGetBlue.argtypes = [c_void_p]
lib.PixelGetBlue.restype = c_double
lib.PixelGetBlueQuantum.argtypes = [c_void_p]
lib.PixelGetBlueQuantum.restype = QuantumType
lib.PixelGetColorAsNormalizedString.argtypes = [c_void_p]
lib.PixelGetColorAsNormalizedString.restype = c_void_p
lib.PixelGetColorAsString.argtypes = [c_void_p]
lib.PixelGetColorAsString.restype = c_void_p
lib.PixelGetColorCount.argtypes = [c_void_p]
lib.PixelGetColorCount.restype = c_size_t
lib.PixelGetCyan.argtypes = [c_void_p]
lib.PixelGetCyan.restype = c_double
lib.PixelGetCyanQuantum.argtypes = [c_void_p]
lib.PixelGetCyanQuantum.restype = QuantumType
lib.PixelGetException.argtypes = [c_void_p, POINTER(c_int)]
lib.PixelGetException.restype = c_void_p
lib.PixelGetExceptionType.argtypes = [c_void_p]
lib.PixelGetExceptionType.restype = c_int
lib.PixelGetFuzz.argtypes = [c_void_p]
lib.PixelGetFuzz.restype = c_double
lib.PixelGetGreen.argtypes = [c_void_p]
lib.PixelGetGreen.restype = c_double
lib.PixelGetGreenQuantum.argtypes = [c_void_p]
lib.PixelGetGreenQuantum.restype = QuantumType
lib.PixelGetHSL.argtypes = [c_void_p, POINTER(c_double), POINTER(c_double), POINTER(c_double)]
lib.PixelGetIndex.argtypes = [c_void_p]
lib.PixelGetIndex.restype = QuantumType
lib.PixelGetMagenta.argtypes = [c_void_p]
lib.PixelGetMagenta.restype = c_double
lib.PixelGetMagentaQuantum.argtypes = [c_void_p]
lib.PixelGetMagentaQuantum.restype = QuantumType
lib.PixelGetMagickColor.argtypes = [c_void_p, c_void_p]
if is_im_7:
lib.PixelGetPixel.argtypes = [c_void_p]
lib.PixelGetPixel.restype = c_void_p
lib.PixelGetRed.argtypes = [c_void_p]
lib.PixelGetRed.restype = c_double
lib.PixelGetRedQuantum.argtypes = [c_void_p]
lib.PixelGetRedQuantum.restype = QuantumType
lib.PixelGetYellow.argtypes = [c_void_p]
lib.PixelGetYellow.restype = c_double
lib.PixelGetYellowQuantum.argtypes = [c_void_p]
lib.PixelGetYellowQuantum.restype = QuantumType
lib.PixelSetAlpha.argtypes = [c_void_p, c_double]
lib.PixelSetAlphaQuantum.argtypes = [c_void_p, QuantumType]
lib.PixelSetBlack.argtypes = [c_void_p, c_double]
lib.PixelSetBlackQuantum.argtypes = [c_void_p, QuantumType]
lib.PixelSetBlue.argtypes = [c_void_p, c_double]
lib.PixelSetBlueQuantum.argtypes = [c_void_p, QuantumType]
lib.PixelSetColor.argtypes = [c_void_p, c_char_p]
lib.PixelSetColor.restype = c_int
lib.PixelSetColorCount.argtypes = [c_void_p, c_size_t]
lib.PixelSetCyan.argtypes = [c_void_p, c_double]
lib.PixelSetCyanQuantum.argtypes = [c_void_p, QuantumType]
lib.PixelSetFuzz.argtypes = [c_void_p, c_double]
lib.PixelSetGreen.argtypes = [c_void_p, c_double]
lib.PixelSetGreenQuantum.argtypes = [c_void_p, QuantumType]
lib.PixelSetHSL.argtypes = [c_void_p, c_double, c_double, c_double]
lib.PixelSetIndex.argtypes = [c_void_p, QuantumType]
lib.PixelSetMagenta.argtypes = [c_void_p, c_double]
lib.PixelSetMagentaQuantum.argtypes = [c_void_p, QuantumType]
if is_im_6:
lib.PixelSetMagickColor.argtypes = [c_void_p, c_void_p]
else:
lib.PixelSetMagickColor = None
if is_im_7:
lib.PixelSetPixelColor.argtypes = [c_void_p, c_void_p]
else:
lib.PixelSetPixelColor = None
lib.PixelSetRed.argtypes = [c_void_p, c_double]
lib.PixelSetRedQuantum.argtypes = [c_void_p, QuantumType]
lib.PixelSetYellow.argtypes = [c_void_p, c_double]
lib.PixelSetYellowQuantum.argtypes = [c_void_p, QuantumType]
if is_im_6:
lib.PixelSetMagickColor.argtypes = [c_void_p, c_void_p]
lib.PixelSetPixelColor = None
if is_im_7:
lib.PixelSetMagickColor = None
lib.PixelSetPixelColor.argtypes = [c_void_p, c_void_p] |
class TestObserverChangeNotifierRemove(unittest.TestCase):
def test_remove_notifier(self):
instance = DummyClass()
notifier = create_notifier()
notifier.add_to(instance)
notifier.remove_from(instance)
self.assertEqual(instance.notifiers, [])
def test_remove_from_error_if_not_found(self):
instance = DummyClass()
notifier = create_notifier()
with self.assertRaises(NotifierNotFound):
notifier.remove_from(instance)
def test_remove_from_recognize_equivalent_notifier(self):
instance = DummyClass()
handler = mock.Mock()
observer_handler = mock.Mock()
graph = mock.Mock()
target = mock.Mock()
notifier1 = create_notifier(handler=handler, observer_handler=observer_handler, graph=graph, target=target)
notifier2 = create_notifier(handler=handler, observer_handler=observer_handler, graph=graph, target=target)
notifier1.add_to(instance)
notifier2.remove_from(instance)
self.assertEqual(instance.notifiers, []) |
class bsn_lua_upload(bsn_header):
version = 4
type = 4
experimenter = 6035143
subtype = 64
def __init__(self, xid=None, flags=None, filename=None, data=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (filename != None):
self.filename = filename
else:
self.filename = ''
if (data != None):
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.subtype))
packed.append(struct.pack('!H', self.flags))
packed.append(struct.pack('!64s', self.filename))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bsn_lua_upload()
_version = reader.read('!B')[0]
assert (_version == 4)
_type = reader.read('!B')[0]
assert (_type == 4)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_experimenter = reader.read('!L')[0]
assert (_experimenter == 6035143)
_subtype = reader.read('!L')[0]
assert (_subtype == 64)
obj.flags = reader.read('!H')[0]
obj.filename = reader.read('!64s')[0].rstrip('\x00')
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.filename != other.filename):
return False
if (self.data != other.data):
return False
return True
def pretty_print(self, q):
q.text('bsn_lua_upload {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFP_BSN_LUA_UPLOAD_MORE', 2: 'OFP_BSN_LUA_UPLOAD_FORCE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('filename = ')
q.pp(self.filename)
q.text(',')
q.breakable()
q.text('data = ')
q.pp(self.data)
q.breakable()
q.text('}') |
class OptionSeriesPolygonSonificationContexttracksMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class FetchSearchInputThread(QThread):
search_changed = pyqtSignal(str, str)
search_finish = pyqtSignal(str)
def __init__(self, callback_tag):
QThread.__init__(self)
self.search_string = ''
self.callback_tag = callback_tag
self.running_flag = True
def run(self):
while self.running_flag:
in_minibuffer = get_emacs_func_result('minibufferp', [])
if in_minibuffer:
minibuffer_input = get_emacs_func_result('minibuffer-contents-no-properties', [])
if (minibuffer_input != self.search_string):
self.search_changed.emit(self.callback_tag, minibuffer_input)
self.search_string = minibuffer_input
else:
self.stop()
time.sleep(0.1)
def stop(self):
self.search_finish.emit(self.callback_tag)
self.running_flag = False |
def login_required(role=RoleEnum('user')):
def view_wrapper(f):
(f)
def decorated_function(*args, **kwargs):
if (flask.g.user is None):
return flask.redirect(flask.url_for('misc.oid_login', next=flask.request.url))
if ((role == RoleEnum('admin')) and (not flask.g.user.admin)):
flask.flash('You are not allowed to access admin section.')
return flask.redirect(flask.url_for('coprs_ns.coprs_show'))
return f(*args, **kwargs)
return decorated_function
if callable(role):
return view_wrapper(role)
else:
return view_wrapper |
def test_callable_method(df_test, random_uniform_method):
(X, _) = df_test
transformer = SmartCorrelatedSelection(method=random_uniform_method)
Xt = transformer.fit_transform(X)
assert (not Xt.empty)
assert (len(transformer.correlated_feature_sets_) > 0)
assert (len(transformer.features_to_drop_) > 0)
assert (len(transformer.variables_) > 0)
assert (transformer.n_features_in_ == len(X.columns)) |
def parse_unittest_output(s):
s = s[(s.rindex(unittest_delim) + len(unittest_delim)):]
num = int(re.search('^Ran (\\d+) test.*?$', s, re.M).group(1))
ok = re.search('^OK$', s, re.M)
(error, fail, timeout) = (0, 0, 0)
failed_match = re.search('^FAILED \\((?:failures=(?P<f>\\d+))?,? ?(?:errors=(?P<e>\\d+))?\\)$', s, re.M)
ok_match = re.search('^OK$', s, re.M)
if failed_match:
assert (not ok_match), (ok_match, s)
fail = failed_match.group('f')
error = failed_match.group('e')
fail = int((fail or '0'))
error = int((error or '0'))
else:
assert ok_match, repr(s)
timeout_match = re.search('^===disabled because of timeout: (\\d+)$', s, re.M)
if timeout_match:
timeout = int(timeout_match.group(1))
return (num, error, fail, timeout) |
def remainder_saturations(saturations):
if all(((k in saturations) for k in sat_keys)):
return {}
if any(((k not in sat_keys) for k in saturations)):
raise ValueError(f'Unknown saturation keys: {list(saturations.keys())}')
rest = sum(saturations.values())
if ((len(saturations) == 2) or np.allclose(rest, 1.0)):
missing = set(sat_keys).difference(set(saturations.keys()))
return {m: (1.0 - rest) for m in missing}
return {} |
def test_category_get_all(forum, user):
category = forum.category
with current_app.test_request_context():
login_user(user)
assert current_user.is_authenticated
categories = Category.get_all(current_user)
assert isinstance(categories, list)
assert isinstance(categories[0][1], list)
assert (categories == [(category, [(forum, None)])])
logout_user()
assert (not current_user.is_authenticated)
categories = Category.get_all(current_user)
assert isinstance(categories, list)
assert isinstance(categories[0][1], list)
assert (categories == [(category, [(forum, None)])]) |
class SettingController(Document):
def is_enabled(self) -> bool:
raise NotImplementedError()
def get_erpnext_warehouses(self) -> List[ERPNextWarehouse]:
raise NotImplementedError()
def get_erpnext_to_integration_wh_mapping(self) -> Dict[(ERPNextWarehouse, IntegrationWarehouse)]:
raise NotImplementedError()
def get_integration_to_erpnext_wh_mapping(self) -> Dict[(IntegrationWarehouse, ERPNextWarehouse)]:
raise NotImplementedError() |
class op(bpy.types.Operator):
bl_idname = 'uv.textools_texture_save'
bl_label = 'Save Texture'
bl_description = 'Save the texture'
name: bpy.props.StringProperty(name='image name', default='')
filepath: bpy.props.StringProperty(name='myName.png', description='Texture filepath', maxlen=1024, default='bla bla.png')
filter_folder: BoolProperty(name='Filter folders', description='', default=True, options={'HIDDEN'})
filter_glob: StringProperty(default='*.png;*.tga;*.jpg;*.tif;*.exr', options={'HIDDEN'})
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
def draw(self, context):
layout = self.layout
layout.label(text='Choose your Unity Asset directory')
def poll(cls, context):
return True
def execute(self, context):
save_texture(self.filepath)
return {'FINISHED'} |
class OptionSeriesLineSonificationDefaultspeechoptions(Options):
def activeWhen(self) -> 'OptionSeriesLineSonificationDefaultspeechoptionsActivewhen':
return self._config_sub_data('activeWhen', OptionSeriesLineSonificationDefaultspeechoptionsActivewhen)
def language(self):
return self._config_get('en-US')
def language(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionSeriesLineSonificationDefaultspeechoptionsMapping':
return self._config_sub_data('mapping', OptionSeriesLineSonificationDefaultspeechoptionsMapping)
def pointGrouping(self) -> 'OptionSeriesLineSonificationDefaultspeechoptionsPointgrouping':
return self._config_sub_data('pointGrouping', OptionSeriesLineSonificationDefaultspeechoptionsPointgrouping)
def preferredVoice(self):
return self._config_get(None)
def preferredVoice(self, text: str):
self._config(text, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def type(self):
return self._config_get('speech')
def type(self, text: str):
self._config(text, js_type=False) |
def _build_search_filter(cls, field_name):
if (field_name == 'bnf_code'):
return _build_search_filter_bnf_code_prefox()
field = cls._meta.get_field(field_name)
builder = {ForeignKey: _build_search_filter_fk, ManyToOneRel: _build_search_filter_rev_fk, OneToOneRel: _build_search_filter_rev_fk, fields.CharField: _build_search_filter_char, fields.DateField: _build_search_filter_date, fields.BooleanField: _build_search_filter_boolean, fields.DecimalField: _build_search_filter_decimal}[type(field)]
search_filter = builder(field)
search_filter['id'] = field_name
return search_filter |
class TestStringMethods(TestCase):
def setUp(self):
self.url = '
self.data = {'foo': 'bar', 'bar': [1, 3, 5]}
self.log = logging.getLogger('testlog')
('copr_common.request.post')
def test_send_request_not_200(self, post_req):
post_req.return_value.status_code = 501
with self.assertRaises(RequestRetryError):
request = SafeRequest(log=self.log)
request._send_request(self.url, 'post', self.data)
self.assertTrue(post_req.called)
('copr_common.request.post')
def test_send_request_post_error(self, post_req):
post_req.side_effect = RequestException()
with self.assertRaises(RequestRetryError):
request = SafeRequest(log=self.log)
request._send_request(self.url, 'post', self.data)
self.assertTrue(post_req.called) |
.skipif((not has_hf_transformers), reason='requires huggingface transformers')
.parametrize('torch_device', TORCH_DEVICES)
def test_causal_lm_hf_serializtion_roundtrip(torch_device):
assert_model_hf_serialization_roundtrip(MPTCausalLM, 'explosion-testing/mpt-test', torch_device, optional_hf_config_keys={HFConfigKeys.LAYER_NORM_EPSILON.name, CommonHFKeys.ATTENTION_PROBS_DROPOUT_PROB.name, CommonHFKeys.HIDDEN_DROPOUT_PROB.name}) |
def test_relations_scopes(db):
gus = db.User.insert(name='Gus Fring')
org = db.Organization.insert(name='Los pollos hermanos')
org.users.add(gus, role='admin')
frank = db.User.insert(name='Frank')
org.users.add(frank, role='manager')
assert (org.admins.count() == 1)
assert (org.admins2.count() == 1)
assert (org.admins3.count() == 1)
org.users.remove(gus)
org.users.remove(frank)
assert (org.admins.count() == 0)
assert (org.admins2.count() == 0)
assert (org.admins3.count() == 0)
org2 = db.Organization.insert(name='Laundry', is_cover=True)
org2.users.add(gus, role='admin')
assert (gus.cover_orgs.count() == 1)
assert (gus.cover_orgs().first().id == org2)
org2.users.remove(gus)
assert (gus.cover_orgs.count() == 0)
org.delete_record()
org2.delete_record()
org = db.Organization.insert(name='Los pollos hermanos')
org.admins.add(gus)
assert (org.admins.count() == 1)
org.admins.remove(gus)
assert (org.admins.count() == 0)
org.delete_record()
org = db.Organization.insert(name='Los pollos hermanos')
org.admins2.add(gus)
assert (org.admins2.count() == 1)
org.admins2.remove(gus)
assert (org.admins2.count() == 0)
org.delete_record()
org = db.Organization.insert(name='Los pollos hermanos')
org.admins3.add(gus)
assert (org.admins3.count() == 1)
org.admins3.remove(gus)
assert (org.admins3.count() == 0)
org.delete_record()
gus = User.get(name='Gus Fring')
org2 = db.Organization.insert(name='Laundry', is_cover=True)
gus.cover_orgs.add(org2)
assert (gus.cover_orgs.count() == 1)
assert (gus.cover_orgs().first().id == org2)
gus.cover_orgs.remove(org2)
assert (gus.cover_orgs.count() == 0) |
def main(page):
def btn_click(e):
if (not txt_name.value):
txt_name.error_text = 'Please enter the url'
page.update()
else:
name = txt_name.value
s = pyshorteners.Shortener()
page.add(ft.Text(f'Short link - {s.tinyurl.short(name)}'))
txt_name = ft.TextField(label='Enter the url')
page.add(txt_name, ft.ElevatedButton('Create URL!', on_click=btn_click)) |
class Backend(object):
def __init__(self, core: Core):
self.core = core
self._client = requests.Session()
def query_raw(self, ql: str, **vars: Dict[(str, Any)]) -> dict:
cli = self._client
core = self.core
resp = cli.post(core.options.backend, json={'query': ql, 'variables': vars})
rst = resp.json()
return rst
def query(self, ql: str, **vars: Any) -> Dict[(str, Any)]:
rst = self.query_raw(ql, **vars)
if ('errors' in rst):
raise BackendError(rst['errors'])
return rst['data'] |
def test_build_transaction_with_contract_fallback_function(w3, fallback_function_contract):
txn = fallback_function_contract.fallback.build_transaction()
assert (dissoc(txn, 'gas') == {'to': fallback_function_contract.address, 'data': '0x', 'value': 0, 'maxFeePerGas': , 'maxPriorityFeePerGas': (10 ** 9), 'chainId': }) |
class Solution():
def nextClosestTime(self, time: str) -> str:
def generate_times(digits, curr):
if (len(curr) == 4):
(first, second) = ((curr[0] + curr[1]), (curr[2] + curr[3]))
if ((0 <= int(first) < 24) and (0 <= int(second) < 60)):
(yield ((first + ':') + second))
return
for digit in digits:
curr.append(digit)
(yield from generate_times(digits, curr))
curr.pop()
def calculate_diff(time, nt):
(first1, second1) = time.split(':')
(first2, second2) = nt.split(':')
time = ((int(first1) * 60) + int(second1))
nt = ((int(first2) * 60) + int(second2))
if (nt <= time):
nt += (24 * 60)
return (nt - time)
digits = set([c for c in time if c.isnumeric()])
(min_diff, ret) = (None, None)
for nt in generate_times(digits, []):
diff = calculate_diff(time, nt)
if ((min_diff is None) or (diff < min_diff)):
min_diff = diff
ret = nt
return ret |
class OptionPlotoptionsColumnSonificationDefaultspeechoptionsMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def forward(model: Model[(InT, OutT)], X: InT, is_train: bool) -> Tuple[(OutT, Callable)]:
convert_inputs = model.attrs['convert_inputs']
convert_outputs = model.attrs['convert_outputs']
tensorflow_model = model.shims[0]
(X_tensorflow, get_dX) = convert_inputs(model, X, is_train)
if is_train:
(Y_tensorflow, tensorflow_backprop) = tensorflow_model(X_tensorflow, is_train)
else:
Y_tensorflow = tensorflow_model(X_tensorflow, is_train)
(Y, get_dY_tensorflow) = convert_outputs(model, Y_tensorflow, is_train)
def backprop(dY: OutT) -> InT:
dY_tensorflow = get_dY_tensorflow(dY)
dX_tensorflow = tensorflow_backprop(dY_tensorflow)
return get_dX(dX_tensorflow)
return (Y, backprop) |
def test_code_is_parent_of():
assert (ESPSCCodes.code_is_parent_of([], []) is False)
assert (ESPSCCodes.code_is_parent_of([1, 2], [1, 2, 3]) is True)
assert (ESPSCCodes.code_is_parent_of([1, 2, 3], [1, 2]) is False)
assert (ESPSCCodes.code_is_parent_of(['A'], ['A', 'B']) is True)
assert (PGPSCCodes.code_is_parent_of([], []) is False)
assert (PGPSCCodes.code_is_parent_of([1, 2], [1, 2, 3]) is True)
assert (PGPSCCodes.code_is_parent_of([1, 2, 3], [1, 2]) is False)
assert (PGPSCCodes.code_is_parent_of(['A'], ['A', 'B']) is True) |
()
def check_container_startup(session: nox.Session) -> None:
throw_error = False
start_command = ('docker', 'compose', 'up', '--wait', IMAGE_NAME)
healthcheck_logs_command = ('docker', 'inspect', '--format', '"{{json .State.Health }}"', IMAGE_NAME)
startup_logs_command = ('docker', 'logs', '--tail', '50', IMAGE_NAME)
try:
session.run(*start_command, external=True)
except CommandFailed:
throw_error = True
log_dashes = ('*' * 20)
session.log(f'{log_dashes} Healthcheck Logs {log_dashes}')
session.run(*healthcheck_logs_command, external=True)
session.log(f'{log_dashes} Startup Logs {log_dashes}')
session.run(*startup_logs_command, external=True)
if throw_error:
session.error('Container startup failed') |
def forgot_password_token(user_manager: UserManagerMock[UserModel]):
def _forgot_password_token(user_id=None, current_password_hash=None, lifetime=user_manager.reset_password_token_lifetime_seconds):
data = {'aud': user_manager.reset_password_token_audience}
if (user_id is not None):
data['sub'] = str(user_id)
if (current_password_hash is not None):
data['password_fgpt'] = user_manager.password_helper.hash(current_password_hash)
return generate_jwt(data, user_manager.reset_password_token_secret, lifetime)
return _forgot_password_token |
def test_get_avg(stats_update_db, backend_db):
fw1 = create_test_firmware()
fw1.uid = 'fw1'
fw1.size = 33
backend_db.add_object(fw1)
fw2 = create_test_firmware()
fw2.uid = 'fw2'
fw2.size = 67
backend_db.add_object(fw2)
result = stats_update_db.get_avg(FileObjectEntry.size, firmware=True)
assert (round(result) == 50) |
class bsn_pdu_tx_request(bsn_header):
version = 6
type = 4
experimenter = 6035143
subtype = 31
def __init__(self, xid=None, tx_interval_ms=None, port_no=None, slot_num=None, data=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (tx_interval_ms != None):
self.tx_interval_ms = tx_interval_ms
else:
self.tx_interval_ms = 0
if (port_no != None):
self.port_no = port_no
else:
self.port_no = 0
if (slot_num != None):
self.slot_num = slot_num
else:
self.slot_num = 0
if (data != None):
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.subtype))
packed.append(struct.pack('!L', self.tx_interval_ms))
packed.append(util.pack_port_no(self.port_no))
packed.append(struct.pack('!B', self.slot_num))
packed.append(('\x00' * 3))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bsn_pdu_tx_request()
_version = reader.read('!B')[0]
assert (_version == 6)
_type = reader.read('!B')[0]
assert (_type == 4)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_experimenter = reader.read('!L')[0]
assert (_experimenter == 6035143)
_subtype = reader.read('!L')[0]
assert (_subtype == 31)
obj.tx_interval_ms = reader.read('!L')[0]
obj.port_no = util.unpack_port_no(reader)
obj.slot_num = reader.read('!B')[0]
reader.skip(3)
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.tx_interval_ms != other.tx_interval_ms):
return False
if (self.port_no != other.port_no):
return False
if (self.slot_num != other.slot_num):
return False
if (self.data != other.data):
return False
return True
def pretty_print(self, q):
q.text('bsn_pdu_tx_request {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('tx_interval_ms = ')
q.text(('%#x' % self.tx_interval_ms))
q.text(',')
q.breakable()
q.text('port_no = ')
q.text(util.pretty_port(self.port_no))
q.text(',')
q.breakable()
q.text('slot_num = ')
q.text(('%#x' % self.slot_num))
q.text(',')
q.breakable()
q.text('data = ')
q.pp(self.data)
q.breakable()
q.text('}') |
def extractAstumbledupontranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.