code stringlengths 281 23.7M |
|---|
class AbstractMibInstrumController(object):
def readMibObjects(self, *varBinds, **context):
raise error.NoSuchInstanceError(idx=0)
def readNextMibObjects(self, *varBinds, **context):
raise error.EndOfMibViewError(idx=0)
def writeMibObjects(self, *varBinds, **context):
raise error.NoSuchObjectError(idx=0) |
class OptionSeriesAreaDatalabelsFilter(Options):
def operator(self):
return self._config_get(None)
def operator(self, value: Any):
self._config(value, js_type=False)
def property(self):
return self._config_get(None)
def property(self, text: str):
self._config(text, js_type=False) |
def debug_cfg(cunit, mh):
assert isinstance(cunit, Compilation_Unit)
assert isinstance(mh, Message_Handler)
class Function_Visitor(AST_Visitor):
def visit(self, node, n_parent, relation):
if isinstance(node, (Function_Definition, Script_File)):
cfg = build_cfg(node)
cunit.visit(None, Function_Visitor(), 'Root') |
class MultiLineEdit(Widget):
DEFAULT_MIN_SIZE = (100, 50)
CSS = '\n .flx-MultiLineEdit {\n resize: none;\n overflow-y: scroll;\n color: #333;\n padding: 0.2em 0.4em;\n border-radius: 3px;\n border: 1px solid #aaa;\n margin: 2px;\n }\n .flx-MultiLineEdit:focus {\n outline: none;\n box-shadow: 0px 0px 3px 1px rgba(0, 100, 200, 0.7);\n }\n '
text = event.StringProp(settable=True, doc='\n The current text of the multi-line edit. Settable. If this is an empty\n string, the placeholder_text is displayed instead.\n ')
def _create_dom(self):
node = window.document.createElement('textarea')
f1 = (lambda : self.user_text(self.node.value))
self._addEventListener(node, 'input', f1, False)
self._addEventListener(node, 'blur', self.user_done, False)
return node
def __text_changed(self):
self.node.value = self.text
def user_text(self, text):
d = {'old_value': self.text, 'new_value': text}
self.set_text(text)
return d
def user_done(self):
d = {'old_value': self.text, 'new_value': self.text}
return d |
class ComputeErrorSolutionHintTemplate(Enum):
CLUSTER_DEFINITION_WRONG_VALUES = f"Please set container values (cpu, memory, image) as ({CONTAINER_CPU},{CONTAINER_MEMORY},'{CONTAINER_IMAGE}')"
ROLE_WRONG_POLICY = 'Set the policy of {role_name} to {role_policy}.'
ROLE_POLICIES_NOT_FOUND = 'Make sure there are policies attached to {role_names} in the pce {pce_id}.' |
_type(ofproto.OFPTFPT_MATCH)
_type(ofproto.OFPTFPT_WILDCARDS)
_type(ofproto.OFPTFPT_WRITE_SETFIELD)
_type(ofproto.OFPTFPT_WRITE_SETFIELD_MISS)
_type(ofproto.OFPTFPT_APPLY_SETFIELD)
_type(ofproto.OFPTFPT_APPLY_SETFIELD_MISS)
_type(ofproto.OFPTFPT_WRITE_COPYFIELD)
_type(ofproto.OFPTFPT_WRITE_COPYFIELD_MISS)
_type(ofproto.OFPTFPT_APPLY_COPYFIELD)
_type(ofproto.OFPTFPT_APPLY_COPYFIELD_MISS)
class OFPTableFeaturePropOxm(OFPTableFeatureProp):
def __init__(self, type_=None, length=None, oxm_ids=None):
oxm_ids = (oxm_ids if oxm_ids else [])
super(OFPTableFeaturePropOxm, self).__init__(type_, length)
self.oxm_ids = oxm_ids
def parser(cls, buf):
rest = cls.get_rest(buf)
ids = []
while rest:
(i, rest) = OFPOxmId.parse(rest)
ids.append(i)
return cls(oxm_ids=ids)
def serialize_body(self):
bin_ids = bytearray()
for i in self.oxm_ids:
bin_ids += i.serialize()
return bin_ids |
class EfuseWafer(EfuseField):
def get(self, from_read=True):
rev_bit0 = self.parent['CHIP_VER_REV1'].get(from_read)
assert (self.parent['CHIP_VER_REV1'].bit_len == 1)
rev_bit1 = self.parent['CHIP_VER_REV2'].get(from_read)
assert (self.parent['CHIP_VER_REV2'].bit_len == 1)
apb_ctl_date = self.parent.read_reg(self.parent.REGS.APB_CTL_DATE_ADDR)
rev_bit2 = ((apb_ctl_date >> self.parent.REGS.APB_CTL_DATE_S) & self.parent.REGS.APB_CTL_DATE_V)
combine_value = (((rev_bit2 << 2) | (rev_bit1 << 1)) | rev_bit0)
revision = {0: 0, 1: 1, 3: 2, 7: 3}.get(combine_value, 0)
return revision
def save(self, new_value):
raise esptool.FatalError(('Burning %s is not supported' % self.name)) |
def _get_wiki_dump_dl_url(t, **kwargs):
_kwargs = {**config, **kwargs}
w = _kwargs['wiki']
v = _kwargs['version']
n = _WIKI_DL_NAME.format(w=w, t=t, v=v)
dp = _kwargs['dumps_path']
if ((dp is not None) and dp.exists()):
path = dp.joinpath(n)
if path.exists():
return path
return URL(_WIKI_DL_PATH.format(w=w, n=n, v=v)) |
def random_sdp_bram(luts, name, modules, lines):
sdp_choices = set()
for width in (1, 2, 4, 8, 16, 18, 32, 36):
sdp_choices.add((width, (1, max_address_bits(width))))
for nbram in range(2, (MAX_BRAM + 1)):
sdp_choices.add(((nbram * 32), (1, max_address_bits((nbram * 32)))))
sdp_choices.add(((nbram * 36), (1, max_address_bits((nbram * 36)))))
sdp_choices.add(((nbram * 16), (1, max_address_bits((nbram * 16)))))
sdp_choices.add(((nbram * 32), (1, max_address_bits((nbram * 32)))))
for address_bits in range(1, 4):
sdp_choices.add(((nbram * 16), (address_bits, address_bits)))
sdp_choices = sorted(sdp_choices)
(width, address_bits_range) = random.choice(sdp_choices)
address_bits = random.randint(*address_bits_range)
return emit_sdp_bram(luts, name, modules, lines, width, address_bits) |
def text2sep_kata(text: str) -> (list, list):
parsed = pyopenjtalk.run_frontend(text)
res = []
sep = []
for parts in parsed:
(word, yomi) = (replace_punctuation(parts['orig']), parts['pron'].replace('', ''))
if yomi:
if re.match(_MARKS, yomi):
if (len(word) > 1):
word = [replace_punctuation(i) for i in list(word)]
yomi = word
res += yomi
sep += word
continue
elif ((word not in rep_map.keys()) and (word not in rep_map.values())):
word = ','
yomi = word
res.append(yomi)
elif (word in _SYMBOL_TOKENS):
res.append(word)
elif (word in ('', '')):
res.append('')
elif (word in _NO_YOMI_TOKENS):
pass
else:
res.append(word)
sep.append(word)
return (sep, [hira2kata(i) for i in res]) |
class Pylint(SimpleTool):
name = 'Pylint'
description = 'runs pylint to check for python errors. By default it runs on the entire project. You can specify a relative path to run on a specific file or module.'
def __init__(self, wd: str='.'):
self.workdir = wd
def func(self, args: str) -> str:
return run_pylint_on_args(args, self.workdir) |
class WebsocketRoutingRule(RoutingRule):
__slots__ = ['f', 'hostname', 'name', 'paths', 'pipeline_flow_close', 'pipeline_flow_open', 'pipeline_flow_receive', 'pipeline_flow_send', 'pipeline', 'prefix', 'schemes']
def __init__(self, router, paths=None, name=None, pipeline=None, schemes=None, hostname=None, prefix=None):
super().__init__(router)
self.name = name
self.paths = paths
if (self.paths is None):
self.paths = []
if (not isinstance(self.paths, (list, tuple))):
self.paths = (self.paths,)
self.schemes = (schemes or ('ws', 'wss'))
if (not isinstance(self.schemes, (list, tuple))):
self.schemes = (self.schemes,)
self.hostname = (hostname or self.app.config.hostname_default)
if prefix:
if (not prefix.startswith('/')):
prefix = ('/' + prefix)
self.prefix = prefix
self.pipeline = (self.router.pipeline + (pipeline or []))
if any(((not isinstance(pipe, Pipe)) for pipe in self.pipeline)):
raise RuntimeError('Invalid pipeline')
def __call__(self, f: Callable[(..., Any)]) -> Callable[(..., Any)]:
if (not self.paths):
self.paths.append(('/' + f.__name__))
if (not self.name):
self.name = self.build_name(f)
if self.name.endswith('.'):
self.name = (self.name + f.__name__)
pipeline_obj = WebsocketPipeline(self.pipeline)
wrapped_f = pipeline_obj(f)
self.pipeline_flow_open = pipeline_obj._flow_open()
self.pipeline_flow_close = pipeline_obj._flow_close()
self.pipeline_flow_receive = pipeline_obj._flow_receive()
self.pipeline_flow_send = pipeline_obj._flow_send()
self.f = wrapped_f
for (idx, path) in enumerate(self.paths):
self.router.add_route(WebsocketRoute(self, path, idx))
return f |
class TasksListView(QtWidgets.QListView):
def __init__(self, parent=None, *args, **kwargs):
super(TasksListView, self).__init__(*args, *kwargs, parent=parent)
self.setSelectionMode(QtWidgets.QAbstractItemView.ExtendedSelection)
def get_selected_items(self):
selection_model = self.selectionModel()
indexes = selection_model.selectedIndexes()
task_items = []
if indexes:
item_model = self.model()
for index in indexes:
current_item = item_model.itemFromIndex(index)
if (current_item and isinstance(current_item, TaskItem)):
task_items.append(current_item)
return task_items
def get_selected_tasks(self):
tasks = []
for task_item in self.get_selected_items():
tasks.append(task_item.task)
return tasks |
class TestSkillBehaviour(BaseSkillTestCase):
path_to_skill = Path(ROOT_DIR, 'packages', 'fetchai', 'skills', 'generic_seller')
is_agent_to_agent_messages = False
def setup(cls):
super().setup()
cls.service_registration = cast(GenericServiceRegistrationBehaviour, cls._skill.skill_context.behaviours.service_registration)
cls.strategy = cast(GenericStrategy, cls._skill.skill_context.strategy)
cls.logger = cls._skill.skill_context.logger
cls.registration_message = OefSearchMessage(dialogue_reference=('', ''), performative=OefSearchMessage.Performative.REGISTER_SERVICE, service_description='some_service_description')
cls.registration_message.sender = str(cls._skill.skill_context.skill_id)
cls.registration_message.to = cls._skill.skill_context.search_service_address
cls.mocked_description = Description({'foo1': 1, 'bar1': 2})
def test_setup_is_ledger_tx(self):
self.strategy._is_ledger_tx = True
mocked_description_1 = 'some_description_1'
with patch.object(self.strategy, 'get_location_description', return_value=mocked_description_1):
with patch.object(self.service_registration.context.logger, 'log') as mock_logger:
self.service_registration.setup()
self.assert_quantity_in_outbox(2)
(has_attributes, error_str) = self.message_has_attributes(actual_message=self.get_message_from_outbox(), message_type=LedgerApiMessage, performative=LedgerApiMessage.Performative.GET_BALANCE, to=LEDGER_API_ADDRESS, sender=str(self.skill.skill_context.skill_id), ledger_id=self.strategy.ledger_id, address=self.skill.skill_context.agent_address)
assert has_attributes, error_str
(has_attributes, error_str) = self.message_has_attributes(actual_message=self.get_message_from_outbox(), message_type=OefSearchMessage, performative=OefSearchMessage.Performative.REGISTER_SERVICE, to=self.skill.skill_context.search_service_address, sender=str(self.skill.skill_context.skill_id), service_description=mocked_description_1)
assert has_attributes, error_str
mock_logger.assert_any_call(logging.INFO, 'registering agent on SOEF.')
def test_setup_not_is_ledger_tx(self):
self.strategy._is_ledger_tx = False
mocked_description_1 = 'some_description_1'
with patch.object(self.strategy, 'get_location_description', return_value=mocked_description_1):
with patch.object(self.service_registration.context.logger, 'log') as mock_logger:
self.service_registration.setup()
self.assert_quantity_in_outbox(1)
(has_attributes, error_str) = self.message_has_attributes(actual_message=self.get_message_from_outbox(), message_type=OefSearchMessage, performative=OefSearchMessage.Performative.REGISTER_SERVICE, to=self.skill.skill_context.search_service_address, sender=str(self.skill.skill_context.skill_id), service_description=mocked_description_1)
assert has_attributes, error_str
mock_logger.assert_any_call(logging.INFO, 'registering agent on SOEF.')
def test_act_i(self):
self.service_registration.failed_registration_msg = None
assert (self.service_registration.act() is None)
self.assert_quantity_in_outbox(0)
def test_act_ii(self):
self.service_registration.failed_registration_msg = self.registration_message
with patch.object(self.service_registration.context.logger, 'log') as mock_logger:
self.service_registration.act()
self.assert_quantity_in_outbox(1)
(has_attributes, error_str) = self.message_has_attributes(actual_message=self.get_message_from_outbox(), message_type=type(self.registration_message), performative=self.registration_message.performative, to=self.registration_message.to, sender=str(self.skill.skill_context.skill_id), service_description=self.registration_message.service_description)
assert has_attributes, error_str
mock_logger.assert_any_call(logging.INFO, f'Retrying registration on SOEF. Retry {self.service_registration._nb_retries} out of {self.service_registration._max_soef_registration_retries}.')
assert (self.service_registration.failed_registration_msg is None)
def test_act_iii(self):
self.service_registration.failed_registration_msg = self.registration_message
self.service_registration._max_soef_registration_retries = 2
self.service_registration._nb_retries = 2
self.service_registration.act()
self.assert_quantity_in_outbox(0)
assert (self.skill.skill_context.is_active is False)
def test_register_service(self):
with patch.object(self.strategy, 'get_register_service_description', return_value=self.mocked_description):
with patch.object(self.logger, 'log') as mock_logger:
self.service_registration.register_service()
self.assert_quantity_in_outbox(1)
message = self.get_message_from_outbox()
(has_attributes, error_str) = self.message_has_attributes(actual_message=message, message_type=OefSearchMessage, performative=OefSearchMessage.Performative.REGISTER_SERVICE, to=self.skill.skill_context.search_service_address, sender=str(self.skill.skill_context.skill_id), service_description=self.mocked_description)
assert has_attributes, error_str
mock_logger.assert_any_call(logging.INFO, "registering agent's service on the SOEF.")
def test_register_genus(self):
with patch.object(self.strategy, 'get_register_personality_description', return_value=self.mocked_description):
with patch.object(self.logger, 'log') as mock_logger:
self.service_registration.register_genus()
self.assert_quantity_in_outbox(1)
message = self.get_message_from_outbox()
(has_attributes, error_str) = self.message_has_attributes(actual_message=message, message_type=OefSearchMessage, performative=OefSearchMessage.Performative.REGISTER_SERVICE, to=self.skill.skill_context.search_service_address, sender=str(self.skill.skill_context.skill_id), service_description=self.mocked_description)
assert has_attributes, error_str
mock_logger.assert_any_call(logging.INFO, "registering agent's personality genus on the SOEF.")
def test_register_classification(self):
with patch.object(self.strategy, 'get_register_classification_description', return_value=self.mocked_description):
with patch.object(self.logger, 'log') as mock_logger:
self.service_registration.register_classification()
self.assert_quantity_in_outbox(1)
message = self.get_message_from_outbox()
(has_attributes, error_str) = self.message_has_attributes(actual_message=message, message_type=OefSearchMessage, performative=OefSearchMessage.Performative.REGISTER_SERVICE, to=self.skill.skill_context.search_service_address, sender=str(self.skill.skill_context.skill_id), service_description=self.mocked_description)
assert has_attributes, error_str
mock_logger.assert_any_call(logging.INFO, "registering agent's personality classification on the SOEF.")
def test_teardown(self):
mocked_description_1 = 'some_description_1'
mocked_description_2 = 'some_description_2'
with patch.object(self.strategy, 'get_unregister_service_description', return_value=mocked_description_1):
with patch.object(self.strategy, 'get_location_description', return_value=mocked_description_2):
with patch.object(self.service_registration.context.logger, 'log') as mock_logger:
self.service_registration.teardown()
self.assert_quantity_in_outbox(2)
(has_attributes, error_str) = self.message_has_attributes(actual_message=self.get_message_from_outbox(), message_type=OefSearchMessage, performative=OefSearchMessage.Performative.UNREGISTER_SERVICE, to=self.skill.skill_context.search_service_address, sender=str(self.skill.skill_context.skill_id), service_description=mocked_description_1)
assert has_attributes, error_str
mock_logger.assert_any_call(logging.INFO, 'unregistering service from SOEF.')
(has_attributes, error_str) = self.message_has_attributes(actual_message=self.get_message_from_outbox(), message_type=OefSearchMessage, performative=OefSearchMessage.Performative.UNREGISTER_SERVICE, to=self.skill.skill_context.search_service_address, sender=str(self.skill.skill_context.skill_id), service_description=mocked_description_2)
assert has_attributes, error_str
mock_logger.assert_any_call(logging.INFO, 'unregistering agent from SOEF.') |
class FirewallClientIPSetSettings():
_exceptions
def __init__(self, settings=None):
if settings:
self.settings = settings
else:
self.settings = ['', '', '', '', {}, []]
_exceptions
def __repr__(self):
return ('%s(%r)' % (self.__class__, self.settings))
_exceptions
def getVersion(self):
return self.settings[0]
_exceptions
def setVersion(self, version):
self.settings[0] = version
_exceptions
def getShort(self):
return self.settings[1]
_exceptions
def setShort(self, short):
self.settings[1] = short
_exceptions
def getDescription(self):
return self.settings[2]
_exceptions
def setDescription(self, description):
self.settings[2] = description
_exceptions
def getType(self):
return self.settings[3]
_exceptions
def setType(self, ipset_type):
self.settings[3] = ipset_type
_exceptions
def getOptions(self):
return self.settings[4]
_exceptions
def setOptions(self, options):
self.settings[4] = options
_exceptions
def addOption(self, key, value):
if ((key not in self.settings[4]) or (self.settings[4][key] != value)):
self.settings[4][key] = value
else:
raise FirewallError(errors.ALREADY_ENABLED, (("'%s=%s'" % (key, value)) if value else key))
_exceptions
def removeOption(self, key):
if (key in self.settings[4]):
del self.settings[4][key]
else:
raise FirewallError(errors.NOT_ENABLED, key)
_exceptions
def queryOption(self, key, value):
return ((key in self.settings[4]) and (self.settings[4][key] == value))
_exceptions
def getEntries(self):
return self.settings[5]
_exceptions
def setEntries(self, entries):
if (('timeout' in self.settings[4]) and (self.settings[4]['timeout'] != '0')):
raise FirewallError(errors.IPSET_WITH_TIMEOUT)
check_for_overlapping_entries(entries)
self.settings[5] = entries
_exceptions
def addEntry(self, entry):
if (('timeout' in self.settings[4]) and (self.settings[4]['timeout'] != '0')):
raise FirewallError(errors.IPSET_WITH_TIMEOUT)
entry = normalize_ipset_entry(entry)
if (entry not in self.settings[5]):
check_entry_overlaps_existing(entry, self.settings[5])
self.settings[5].append(entry)
else:
raise FirewallError(errors.ALREADY_ENABLED, entry)
_exceptions
def removeEntry(self, entry):
if (('timeout' in self.settings[4]) and (self.settings[4]['timeout'] != '0')):
raise FirewallError(errors.IPSET_WITH_TIMEOUT)
entry = normalize_ipset_entry(entry)
if (entry in self.settings[5]):
self.settings[5].remove(entry)
else:
raise FirewallError(errors.NOT_ENABLED, entry)
_exceptions
def queryEntry(self, entry):
if (('timeout' in self.settings[4]) and (self.settings[4]['timeout'] != '0')):
raise FirewallError(errors.IPSET_WITH_TIMEOUT)
entry = normalize_ipset_entry(entry)
return (entry in self.settings[5]) |
def test_get_raw_transaction_serialization():
kwargs_arg = ContractApiMessage.Kwargs({'key_1': 1, 'key_2': 2})
msg = ContractApiMessage(message_id=1, dialogue_reference=(str(0), ''), target=0, performative=ContractApiMessage.Performative.GET_RAW_TRANSACTION, ledger_id='some_ledger_id', contract_id='some_contract_id', contract_address='some_contract_address', callable='some_callable', kwargs=kwargs_arg)
msg.to = 'receiver'
envelope = Envelope(to=msg.to, sender='sender', message=msg)
envelope_bytes = envelope.encode()
actual_envelope = Envelope.decode(envelope_bytes)
expected_envelope = envelope
assert (expected_envelope.to == actual_envelope.to)
assert (expected_envelope.sender == actual_envelope.sender)
assert (expected_envelope.protocol_specification_id == actual_envelope.protocol_specification_id)
assert (expected_envelope.message != actual_envelope.message)
actual_msg = ContractApiMessage.serializer.decode(actual_envelope.message)
actual_msg.to = actual_envelope.to
actual_msg.sender = actual_envelope.sender
expected_msg = msg
assert (expected_msg == actual_msg) |
class TestHNSW(unittest.TestCase):
def _create_random_points(self, n=100, dim=10):
return np.random.rand(n, dim)
def _create_index(self, vecs, keys=None):
hnsw = HNSW(distance_func=l2_distance, m=16, ef_construction=100)
self._insert_points(hnsw, vecs, keys)
return hnsw
def _search_index(self, index, queries, k=10):
return self._search_index_dist(index, queries, l2_distance, k)
def _insert_points(self, index, points, keys=None):
original_length = len(index)
if (keys is None):
keys = list(range(len(points)))
for (i, (key, point)) in enumerate(zip(keys, points)):
if ((i % 2) == 0):
index.insert(key, point)
else:
index[key] = point
self.assertTrue((index._entry_point is not None))
self.assertIn(key, index)
if (original_length == 0):
self.assertNotIn((key + 1), index)
self.assertTrue(np.array_equal(index.get(key), point))
self.assertTrue(np.array_equal(index[key], point))
if (original_length == 0):
self.assertEqual(len(index), len(points))
for (key_indexed, key) in zip(index, keys):
self.assertEqual(key_indexed, key)
for (key_indexed, key) in zip(index.keys(), keys):
self.assertEqual(key_indexed, key)
for (vec_indexed, vec) in zip(index.values(), points):
self.assertTrue(np.array_equal(vec_indexed, vec))
for ((key_indexed, vec_indexed), key, vec) in zip(index.items(), keys, points):
self.assertEqual(key_indexed, key)
self.assertTrue(np.array_equal(vec_indexed, vec))
def _search_index_dist(self, index, queries, distance_func, k=10):
for i in range(len(queries)):
results = index.query(queries[i], 10)
self.assertEqual(len(results), 10)
for j in range((len(results) - 1)):
self.assertLessEqual(distance_func(index[results[j][0]], queries[i]), distance_func(index[results[(j + 1)][0]], queries[i]))
def test_search(self):
data = self._create_random_points()
hnsw = self._create_index(data)
self._search_index(hnsw, data)
def test_upsert(self):
data = self._create_random_points()
hnsw = self._create_index(data)
new_data = self._create_random_points(n=10, dim=10)
self._insert_points(hnsw, new_data)
self._search_index(hnsw, new_data)
def test_update(self):
data = self._create_random_points()
hnsw = self._create_index(data)
new_data = self._create_random_points(n=10, dim=10)
hnsw.update({i: new_data[i] for i in range(len(new_data))})
self._search_index(hnsw, new_data)
def test_merge(self):
data1 = self._create_random_points()
data2 = self._create_random_points()
hnsw1 = self._create_index(data1, keys=list(range(len(data1))))
hnsw2 = self._create_index(data2, keys=list(range(len(data1), (len(data1) + len(data2)))))
new_index = hnsw1.merge(hnsw2)
self._search_index(new_index, data1)
self._search_index(new_index, data2)
for i in range(len(data1)):
self.assertIn(i, new_index)
self.assertTrue(np.array_equal(new_index[i], data1[i]))
for i in range(len(data2)):
self.assertIn((i + len(data1)), new_index)
self.assertTrue(np.array_equal(new_index[(i + len(data1))], data2[i]))
def test_pickle(self):
data = self._create_random_points()
hnsw = self._create_index(data)
import pickle
hnsw2 = pickle.loads(pickle.dumps(hnsw))
self.assertEqual(hnsw, hnsw2)
def test_copy(self):
data = self._create_random_points()
hnsw = self._create_index(data)
hnsw2 = hnsw.copy()
self.assertEqual(hnsw, hnsw2)
hnsw.remove(0)
self.assertTrue((0 not in hnsw))
self.assertTrue((0 in hnsw2))
def test_soft_remove_and_pop_and_clean(self):
data = self._create_random_points()
hnsw = self._create_index(data)
for i in range((len(data) - 1)):
if ((i % 2) == 0):
hnsw.remove(i)
else:
point = hnsw.pop(i)
self.assertTrue(np.array_equal(point, data[i]))
self.assertNotIn(i, hnsw)
self.assertEqual(len(hnsw), ((len(data) - i) - 1))
self.assertRaises(KeyError, hnsw.pop, i)
hnsw.remove(i)
hnsw.remove(i)
hnsw.remove(i)
results = hnsw.query(data[i], 10)
expected_result_size = min(10, ((len(data) - i) - 1))
if (len(results) != expected_result_size):
warnings.warn(f'Issue encountered at i={i} during soft remove unit test: expected {expected_result_size} results, got {len(results)} results. Potential graph connectivity issue.')
hnsw.clean()
results = hnsw.query(data[i], 10)
self.assertEqual(len(results), min(10, ((len(data) - i) - 1)))
hnsw.remove((len(data) - 1))
self.assertNotIn((len(data) - 1), hnsw)
self.assertEqual(len(hnsw), 0)
self.assertRaises(KeyError, hnsw.pop, (len(data) - 1))
self.assertRaises(KeyError, hnsw.remove, (len(data) - 1))
self.assertRaises(ValueError, hnsw.query, data[0])
hnsw.clean()
self.assertEqual(len(hnsw), 0)
self.assertRaises(KeyError, hnsw.remove, 0)
self.assertRaises(ValueError, hnsw.query, data[0])
def test_hard_remove_and_pop_and_clean(self):
data = self._create_random_points()
hnsw = self._create_index(data)
for i in range((len(data) - 1)):
if ((i % 2) == 0):
hnsw.remove(i, hard=True)
else:
point = hnsw.pop(i, hard=True)
self.assertTrue(np.array_equal(point, data[i]))
self.assertNotIn(i, hnsw)
self.assertEqual(len(hnsw), ((len(data) - i) - 1))
self.assertRaises(KeyError, hnsw.pop, i)
self.assertRaises(KeyError, hnsw.remove, i)
results = hnsw.query(data[i], 10)
self.assertEqual(len(results), min(10, ((len(data) - i) - 1)))
hnsw.remove((len(data) - 1), hard=True)
self.assertNotIn((len(data) - 1), hnsw)
self.assertEqual(len(hnsw), 0)
self.assertRaises(KeyError, hnsw.pop, (len(data) - 1))
self.assertRaises(KeyError, hnsw.remove, (len(data) - 1))
self.assertRaises(ValueError, hnsw.query, data[0])
hnsw.clean()
self.assertEqual(len(hnsw), 0)
self.assertRaises(KeyError, hnsw.remove, 0)
self.assertRaises(ValueError, hnsw.query, data[0])
def test_popitem_last(self):
data = self._create_random_points()
for hard in [True, False]:
hnsw = self._create_index(data)
for i in range(len(data)):
(key, point) = hnsw.popitem(hard=hard)
self.assertTrue(np.array_equal(point, data[key]))
self.assertEqual(key, ((len(data) - i) - 1))
self.assertTrue(np.array_equal(point, data[((len(data) - i) - 1)]))
self.assertNotIn(key, hnsw)
self.assertEqual(len(hnsw), ((len(data) - i) - 1))
self.assertRaises(KeyError, hnsw.popitem)
def test_popitem_first(self):
data = self._create_random_points()
for hard in [True, False]:
hnsw = self._create_index(data)
for i in range(len(data)):
(key, point) = hnsw.popitem(last=False, hard=hard)
self.assertTrue(np.array_equal(point, data[key]))
self.assertEqual(key, i)
self.assertTrue(np.array_equal(point, data[i]))
self.assertNotIn(key, hnsw)
self.assertEqual(len(hnsw), ((len(data) - i) - 1))
self.assertRaises(KeyError, hnsw.popitem)
def test_clear(self):
data = self._create_random_points()
hnsw = self._create_index(data)
hnsw.clear()
self.assertEqual(len(hnsw), 0)
self.assertRaises(StopIteration, next, iter(hnsw))
self.assertRaises(StopIteration, next, iter(hnsw.keys()))
self.assertRaises(StopIteration, next, iter(hnsw.values()))
self.assertRaises(KeyError, hnsw.pop, 0)
self.assertRaises(KeyError, hnsw.__getitem__, 0)
self.assertRaises(KeyError, hnsw.popitem)
self.assertRaises(ValueError, hnsw.query, data[0]) |
def _graphs_dangerous_dereference_in_the_same_block_as_target_and_definition_32bit() -> Tuple[(ControlFlowGraph, ControlFlowGraph)]:
in_cfg = ControlFlowGraph()
x = vars('x', 2, aliased=False)
y = vars('y', 2, aliased=True)
ptr = vars('ptr', 1, aliased=False)
c = const(11)
in_node = BasicBlock(0, [_call('rand', [x[0]], []), _assign(y[0], _add(x[0], c[5])), _assign(ptr[0], _addr(y[0])), _assign(x[1], y[0]), _assign(_deref(ptr[0]), c[10]), _assign(y[1], y[0]), _ret(y[0])])
in_cfg.add_node(in_node)
out_cfg = ControlFlowGraph()
out_node = BasicBlock(0, [_call('rand', [x[0]], []), _assign(y[0], _add(x[0], c[5])), _assign(ptr[0], _addr(y[0])), _assign(x[1], _add(x[0], c[5])), _assign(_deref(ptr[0]), c[10]), _assign(y[1], y[0]), _ret(y[0])])
out_cfg.add_node(out_node)
return (in_cfg, out_cfg) |
def convert_list_arguments(*indices):
def decorator(f):
sig = signature(f)
names = tuple(sig.parameters.keys())
(f)
def wrapper(*args, **kwargs):
ba = sig.bind(*args, **kwargs)
ba.apply_defaults()
for i in indices:
if isinstance(i, int):
i = names[i]
if isinstance(i, str):
ba.arguments[i] = list_to_string(ba.arguments[i])
args = ba.args
kwargs = ba.kwargs
return f(*args, **kwargs)
return wrapper
return decorator |
class WafRuleAttributes(ModelNormal):
allowed_values = {('publisher',): {'FASTLY': 'fastly', 'TRUSTWAVE': 'trustwave', 'OWASP': 'owasp'}, ('type',): {'STRICT': 'strict', 'SCORE': 'score', 'THRESHOLD': 'threshold'}}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'modsec_rule_id': (int,), 'publisher': (str,), 'type': (str,)}
_property
def discriminator():
return None
attribute_map = {'modsec_rule_id': 'modsec_rule_id', 'publisher': 'publisher', 'type': 'type'}
read_only_vars = {'modsec_rule_id', 'publisher', 'type'}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def screenshot(figure=None, mode='rgb', antialiased=False):
if (figure is None):
figure = gcf()
(x, y) = tuple(figure.scene.render_window.size)
figure.scene._lift()
if (mode == 'rgb'):
out = tvtk.UnsignedCharArray()
shape = (y, x, 3)
pixel_getter = figure.scene.render_window.get_pixel_data
if (vtk_major_version > 7):
pg_args = (0, 0, (x - 1), (y - 1), 1, out, 0)
else:
pg_args = (0, 0, (x - 1), (y - 1), 1, out)
elif (mode == 'rgba'):
out = tvtk.FloatArray()
shape = (y, x, 4)
pixel_getter = figure.scene.render_window.get_rgba_pixel_data
if (vtk_major_version > 7):
pg_args = (0, 0, (x - 1), (y - 1), 1, out, 0)
else:
pg_args = (0, 0, (x - 1), (y - 1), 1, out)
else:
raise ValueError('mode type not understood')
if antialiased:
render_window = figure.scene.render_window
if hasattr(render_window, 'aa_frames'):
old_aa = render_window.aa_frames
render_window.aa_frames = figure.scene.anti_aliasing_frames
else:
old_aa = render_window.multi_samples
render_window.multi_samples = figure.scene.anti_aliasing_frames
figure.scene.render()
pixel_getter(*pg_args)
if hasattr(render_window, 'aa_frames'):
render_window.aa_frames = old_aa
else:
render_window.multi_samples = old_aa
figure.scene.render()
else:
pixel_getter(*pg_args)
out = out.to_array()
out.shape = shape
out = np.flipud(out)
return out |
class OptionPlotoptionsScatter3dSonificationTracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_set_parameter_frequency(la: LogicAnalyzer, master: SPIMaster, slave: SPISlave):
ppre = 0
spre = 2
master.set_parameters(primary_prescaler=ppre, secondary_prescaler=spre)
la.capture(1, block=False)
slave.write8(0)
la.stop()
(sck,) = la.fetch_data()
write_start = sck[0]
write_stop = sck[(- 2)]
start_to_stop = 7
period = ((write_stop - write_start) / start_to_stop)
assert (((period * MICROSECONDS) ** (- 1)) == pytest.approx(master._frequency, rel=RELTOL)) |
def test_cursor_pretty_print_gaps(proc_bar, golden):
output = []
c = _find_stmt(proc_bar, 'x: f32').before()
output.append(_print_cursor(c))
c = _find_stmt(proc_bar, 'for i in _: _').before()
output.append(_print_cursor(c))
c = _find_stmt(proc_bar, 'for j in _: _').before()
output.append(_print_cursor(c))
c = _find_stmt(proc_bar, 'x = 0.0').before()
output.append(_print_cursor(c))
c = _find_stmt(proc_bar, 'x = 2.0').before()
output.append(_print_cursor(c))
c = _find_stmt(proc_bar, 'x = 5.0').after()
output.append(_print_cursor(c))
assert ('\n\n'.join(output) == golden) |
class Regression(object):
def __init__(self, n_iterations, learning_rate):
self.n_iterations = n_iterations
self.learning_rate = learning_rate
def initialize_weights(self, n_features):
limit = (1 / math.sqrt(n_features))
self.w = np.random.uniform((- limit), limit, (n_features,))
def fit(self, X, y):
X = np.insert(X, 0, 1, axis=1)
self.training_errors = []
self.initialize_weights(n_features=X.shape[1])
for i in range(self.n_iterations):
y_pred = X.dot(self.w)
mse = np.mean(((0.5 * ((y - y_pred) ** 2)) + self.regularization(self.w)))
self.training_errors.append(mse)
grad_w = ((- (y - y_pred).dot(X)) + self.regularization.grad(self.w))
self.w -= (self.learning_rate * grad_w)
def predict(self, X):
X = np.insert(X, 0, 1, axis=1)
y_pred = X.dot(self.w)
return y_pred |
class TestValidationErrorConvertsTuplesToLists(TestCase):
def test_validation_error_details(self):
error = ValidationError(detail=('message1', 'message2'))
assert isinstance(error.detail, list)
assert (len(error.detail) == 2)
assert (str(error.detail[0]) == 'message1')
assert (str(error.detail[1]) == 'message2') |
class OptionSeriesBarDataDragdropDraghandle(Options):
def className(self):
return self._config_get('highcharts-drag-handle')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#fff')
def color(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('rgba(0, 0, 0, 0.6)')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(901)
def zIndex(self, num: float):
self._config(num, js_type=False) |
class TestSAExtractionError(unittest.TestCase):
def setUp(self):
TESTDATA_FILENAME = os.path.join(os.path.dirname(__file__), 'chimeric_read_errors.bam')
self.samfile = pysam.AlignmentFile(TESTDATA_FILENAME, 'rb')
self.alignments = list(self.samfile.fetch(until_eof=True))
def test_satag_too_many_fields(self):
primary = self.alignments[0]
supplementary_alns = retrieve_other_alignments(primary, self.samfile)
self.assertEqual(len(supplementary_alns), 2)
def test_satag_negative_mapq(self):
primary = self.alignments[1]
supplementary_alns = retrieve_other_alignments(primary, self.samfile)
self.assertEqual(len(supplementary_alns), 1)
self.assertEqual(supplementary_alns[0].mapping_quality, 0) |
class FaucetTaggedTargetedResolutionIPv4RouteTest(FaucetTaggedIPv4RouteTest):
CONFIG_GLOBAL = '\nvlans:\n 100:\n description: "tagged"\n faucet_vips: ["10.0.0.254/24"]\n targeted_gw_resolution: True\n routes:\n - route:\n ip_dst: "10.0.1.0/24"\n ip_gw: "10.0.0.1"\n - route:\n ip_dst: "10.0.2.0/24"\n ip_gw: "10.0.0.2"\n - route:\n ip_dst: "10.0.3.0/24"\n ip_gw: "10.0.0.2"\n' |
class Registered(list):
def __init__(self, bot):
super(Registered, self).__init__()
self.bot = weakref.proxy(bot)
def get_config(self, msg):
for conf in self[::(- 1)]:
if ((not conf.enabled) or (conf.except_self and (msg.sender == self.bot.self))):
continue
if (conf.msg_types and (msg.type not in conf.msg_types)):
continue
elif ((conf.msg_types is None) and (msg.type == SYSTEM)):
continue
if (conf.chats is None):
return conf
for chat in conf.chats:
if ((isinstance(chat, type) and isinstance(msg.chat, chat)) or (chat == msg.chat)):
return conf
def get_config_by_func(self, func):
for conf in self:
if (conf.func == func):
return conf
def _change_status(self, func, enabled):
if func:
self.get_config_by_func(func).enabled = enabled
else:
for conf in self:
conf.enabled = enabled
def enable(self, func=None):
self._change_status(func, True)
def disable(self, func=None):
self._change_status(func, False)
def _check_status(self, enabled):
ret = list()
for conf in self:
if (conf.enabled == enabled):
ret.append(conf)
return ret
def enabled(self):
return self._check_status(True)
def disabled(self):
return self._check_status(False) |
class PatientMulti(Model):
primary_keys = ['foo', 'bar']
has_many({'appointments': 'AppointmentMulti'}, {'symptoms': 'SymptomMulti.patient'}, {'doctors': {'via': 'appointments.doctor_multi'}})
foo = Field.string(default=(lambda : uuid4().hex))
bar = Field.string(default=(lambda : uuid4().hex))
name = Field.string() |
class MultiLineMagic(BaseMagic):
def score(self, ocr_result: OcrResult) -> float:
if ((ocr_result.num_lines > 1) and (ocr_result.num_blocks == 1) and (ocr_result.num_pars == 1)):
return 50.0
return 0
def transform(self, ocr_result: OcrResult) -> str:
return ocr_result.add_linebreaks() |
def verify_statistics_map(fledge_url, skip_verify_north_interface):
get_url = '/fledge/statistics'
jdoc = utils.get_request(fledge_url, get_url)
actual_stats_map = utils.serialize_stats_map(jdoc)
assert (1 <= actual_stats_map[south_asset_name.upper()])
assert (1 <= actual_stats_map['READINGS'])
if (not skip_verify_north_interface):
assert (1 <= actual_stats_map['Readings Sent'])
assert (1 <= actual_stats_map[north_service_name]) |
def hpluv_to_lch(hpluv: Vector) -> Vector:
(h, s, l) = hpluv
c = 0.0
if (l > (100 - 1e-07)):
l = 100
elif (l < 1e-08):
l = 0
elif (not alg.is_nan(h)):
_hx_max = max_safe_chroma_for_l(l)
c = ((_hx_max / 100) * s)
if (c < ACHROMATIC_THRESHOLD):
h = alg.NaN
return [l, c, util.constrain_hue(h)] |
class CmdlineOpt(object):
positional_count = 0
SUCCESS = 0
SUCCESS_AND_FULL = 1
FAILURE = 2
nonalpha_rgx = re.compile('[^0-9a-zA-Z\\-\\_]')
def __init__(self):
self.value = None
self.opt = None
self.longopt = None
self.type = None
self.var_name = None
self.desc = None
def finalize(self):
if self.is_positional():
if self.value.isidentifier():
self.var_name = self.value
else:
varname = f'positional_arg{CmdlineOpt.positional_count}'
self.var_name = varname
CmdlineOpt.positional_count += 1
else:
if (self.longopt is not None):
varname = self.longopt
elif (self.opt is not None):
varname = self.opt
else:
raise RuntimeError(f'Invalid attribute values for {self.__class__.__name__}')
self.var_name = varname.lstrip('-').replace('-', '_')
self.desc = self.var_name
if (self.value is None):
return
if _is_int(self.value):
self.type = ArgType.INT
if (self.type is None):
try:
fltval = float(self.value)
except ValueError:
pass
else:
self.type = ArgType.FLOAT
if (self.type is None):
if (('FILE' == self.value) or os.path.isfile(self.value)):
self.type = ArgType.FILE
else:
self.type = ArgType.STRING
def add_arg(self, arg):
cleaned_arg = self.nonalpha_rgx.sub('-', arg)
is_int = _is_int(arg)
if arg.startswith('--'):
if (self.opt is None):
raise ValueError(f'long option ({arg}) is not allowed without short option')
if (self.longopt is None):
self.longopt = cleaned_arg
else:
return self.FAILURE
elif (arg.startswith('-') and (not is_int)):
if (len(arg) > 2):
raise ValueError(f'short option ({arg}) must have exactly one character after the dash (-)')
if (self.opt is None):
self.opt = cleaned_arg
else:
return self.FAILURE
else:
if (self.value is None):
if (self.opt or self.longopt or is_int):
self.value = arg
else:
self.value = arg.replace('-', '_')
return self.SUCCESS_AND_FULL
return self.SUCCESS
def opttext(self):
ret = []
if (self.opt is not None):
ret.append(f"'{self.opt}'")
if (self.longopt is not None):
ret.append(f"'{self.longopt}'")
return ', '.join(ret)
def is_empty(self):
return ((self.value is None) and (self.opt is None) and (self.longopt is None))
def is_flag(self):
return (((self.opt is not None) or (self.longopt is not None)) and (self.value is None))
def is_option(self):
return (((self.opt is not None) or (self.longopt is not None)) and (self.value is not None))
def is_positional(self):
return ((self.opt is None) and (self.longopt is None) and (self.value is not None))
def __str__(self):
return f'{self.__class__.__name__}({self.opt}, {self.longopt}, {self.value})'
def __repr__(self):
return self.__str__() |
class Reindex():
def __init__(self, ilo, request_body, refresh=True, requests_per_second=(- 1), slices=1, timeout=60, wait_for_active_shards=1, wait_for_completion=True, max_wait=(- 1), wait_interval=9, remote_certificate=None, remote_client_cert=None, remote_client_key=None, remote_filters=None, migration_prefix='', migration_suffix=''):
if (remote_filters is None):
remote_filters = {}
self.loggit = logging.getLogger('curator.actions.reindex')
verify_index_list(ilo)
if (not isinstance(request_body, dict)):
raise CuratorConfigError('"request_body" is not of type dictionary')
self.body = request_body
self.loggit.debug('REQUEST_BODY = %s', request_body)
self.index_list = ilo
self.client = ilo.client
self.refresh = refresh
self.requests_per_second = requests_per_second
self.slices = slices
self.timeout = f'{timeout}s'
self.wait_for_active_shards = wait_for_active_shards
self.wfc = wait_for_completion
self.wait_interval = wait_interval
self.max_wait = max_wait
self.mpfx = migration_prefix
self.msfx = migration_suffix
self.remote = False
if ('remote' in self.body['source']):
self.remote = True
self.migration = False
if (self.body['dest']['index'] == 'MIGRATION'):
self.migration = True
if self.migration:
if ((not self.remote) and (not self.mpfx) and (not self.msfx)):
raise CuratorConfigError('MIGRATION can only be used locally with one or both of migration_prefix or migration_suffix.')
if ((self.body['source']['index'] == 'REINDEX_SELECTION') and (not self.remote)):
self.body['source']['index'] = self.index_list.indices
elif self.remote:
rclient_args = ClientArgs()
rother_args = OtherArgs()
self.loggit.debug('Remote reindex request detected')
if ('host' not in self.body['source']['remote']):
raise CuratorConfigError('Missing remote "host"')
try:
rclient_args.hosts = verify_url_schema(self.body['source']['remote']['host'])
except ConfigurationError as exc:
raise CuratorConfigError(exc) from exc
self.remote_host = rclient_args.hosts.split(':')[(- 2)]
self.remote_host = self.remote_host.split('/')[2]
self.remote_port = rclient_args.hosts.split(':')[(- 1)]
if ('username' in self.body['source']['remote']):
rother_args.username = self.body['source']['remote']['username']
if ('password' in self.body['source']['remote']):
rother_args.password = self.body['source']['remote']['password']
if remote_certificate:
rclient_args.ca_certs = remote_certificate
if remote_client_cert:
rclient_args.client_cert = remote_client_cert
if remote_client_key:
rclient_args.client_key = remote_client_key
rclient_args.remote_timeout = 180
if (self.body['source']['index'] == 'REINDEX_SELECTION'):
self.loggit.debug('Filtering indices from remote')
msg = f'Remote client args: hosts={rclient_args.hosts} username=REDACTED password=REDACTED certificate={remote_certificate} client_cert={remote_client_cert} client_key={remote_client_key} request_timeout={rclient_args.remote_timeout} skip_version_test=True'
self.loggit.debug(msg)
remote_config = {'elasticsearch': {'client': rclient_args.asdict(), 'other_settings': rother_args.asdict()}}
try:
builder = Builder(configdict=remote_config, version_min=(1, 0, 0))
builder.connect()
rclient = builder.client
except Exception as err:
self.loggit.error('Unable to establish connection to remote Elasticsearch with provided credentials/certificates/settings.')
report_failure(err)
try:
rio = IndexList(rclient)
rio.iterate_filters({'filters': remote_filters})
try:
rio.empty_list_check()
except NoIndices as exc:
raise FailedExecution('No actionable remote indices selected after applying filters.') from exc
self.body['source']['index'] = rio.indices
except Exception as err:
self.loggit.error('Unable to get/filter list of remote indices.')
report_failure(err)
self.loggit.debug('Reindexing indices: %s', self.body['source']['index'])
def _get_request_body(self, source, dest):
body = deepcopy(self.body)
body['source']['index'] = source
body['dest']['index'] = dest
return body
def _get_reindex_args(self, source, dest):
reindex_args = {'refresh': self.refresh, 'requests_per_second': self.requests_per_second, 'slices': self.slices, 'timeout': self.timeout, 'wait_for_active_shards': self.wait_for_active_shards, 'wait_for_completion': False}
for keyname in ['dest', 'source', 'conflicts', 'max_docs', 'size', '_source', 'script']:
if (keyname in self.body):
reindex_args[keyname] = self.body[keyname]
reindex_args['dest']['index'] = dest
reindex_args['source']['index'] = source
return reindex_args
def get_processed_items(self, task_id):
try:
task_data = self.client.tasks.get(task_id=task_id)
except Exception as exc:
raise CuratorException(f'Unable to obtain task information for task_id "{task_id}". Exception {exc}') from exc
total_processed_items = (- 1)
task = task_data['task']
if (task['action'] == 'indices:data/write/reindex'):
self.loggit.debug("It's a REINDEX TASK'")
self.loggit.debug('TASK_DATA: %s', task_data)
self.loggit.debug('TASK_DATA keys: %s', list(task_data.keys()))
if ('response' in task_data):
response = task_data['response']
total_processed_items = response['total']
self.loggit.debug('total_processed_items = %s', total_processed_items)
return total_processed_items
def _post_run_quick_check(self, index_name, task_id):
processed_items = self.get_processed_items(task_id)
if (processed_items == 0):
msg = f'No items were processed. Will not check if target index "{index_name}" exists'
self.loggit.info(msg)
else:
index_exists = self.client.indices.exists(index=index_name)
alias_instead = self.client.indices.exists_alias(name=index_name)
if ((not index_exists) and (not alias_instead)):
self.loggit.error(f'The index described as "{index_name}" was not found after the reindex operation. Check Elasticsearch logs for more information.')
if self.remote:
self.loggit.error(f'Did you forget to add "reindex.remote.whitelist: {self.remote_host}:{self.remote_port}" to the elasticsearch.yml file on the "dest" node?')
raise FailedExecution(f'Reindex failed. The index or alias identified by "{index_name}" was not found.')
def sources(self):
dest = self.body['dest']['index']
source_list = ensure_list(self.body['source']['index'])
self.loggit.debug('source_list: %s', source_list)
if ((not source_list) or (source_list == ['REINDEX_SELECTED'])):
raise NoIndices
if (not self.migration):
(yield (self.body['source']['index'], dest))
else:
for source in source_list:
if self.migration:
dest = ((self.mpfx + source) + self.msfx)
(yield (source, dest))
def show_run_args(self, source, dest):
return f'request body: {self._get_request_body(source, dest)} with arguments: refresh={self.refresh} requests_per_second={self.requests_per_second} slices={self.slices} timeout={self.timeout} wait_for_active_shards={self.wait_for_active_shards} wait_for_completion={self.wfc}'
def do_dry_run(self):
self.loggit.info('DRY-RUN MODE. No changes will be made.')
for (source, dest) in self.sources():
self.loggit.info('DRY-RUN: REINDEX: %s', self.show_run_args(source, dest))
def do_action(self):
try:
for (source, dest) in self.sources():
self.loggit.info('Commencing reindex operation')
self.loggit.debug('REINDEX: %s', self.show_run_args(source, dest))
response = self.client.reindex(**self._get_reindex_args(source, dest))
self.loggit.debug('TASK ID = %s', response['task'])
if self.wfc:
wait_for_it(self.client, 'reindex', task_id=response['task'], wait_interval=self.wait_interval, max_wait=self.max_wait)
self._post_run_quick_check(dest, response['task'])
else:
msg = f""""wait_for_completion" set to {self.wfc}. Remember to check task_id "{response['task']}" for successful completion manually."""
self.loggit.warning(msg)
except NoIndices as exc:
raise NoIndices('Source index must be list of actual indices. It must not be an empty list.') from exc
except Exception as exc:
report_failure(exc) |
def get_all_boards_with_last_threads(offset_limit) -> List[Tuple[(BoardModel, ThreadModel, PostModel)]]:
with session() as s:
q = s.query(BoardOrmModel, ThreadOrmModel, PostOrmModel).filter((BoardOrmModel.id == ThreadOrmModel.board_id), (BoardOrmModel.refno_counter == ThreadOrmModel.refno), (ThreadOrmModel.id == PostOrmModel.thread_id), (PostOrmModel.refno == 1)).order_by(ThreadOrmModel.last_modified.desc())
if offset_limit:
q = q.offset(offset_limit[0]).limit(offset_limit[1])
b = q.all()
res = list(map((lambda i: (BoardModel.from_orm_model(i[0]), ThreadModel.from_orm_model(i[1]), PostModel.from_orm_model(i[2]))), b))
s.commit()
return res |
class meter_config(loxi.OFObject):
def __init__(self, flags=None, meter_id=None, entries=None):
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (meter_id != None):
self.meter_id = meter_id
else:
self.meter_id = 0
if (entries != None):
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!H', self.flags))
packed.append(struct.pack('!L', self.meter_id))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[0] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = meter_config()
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 2)
obj.flags = reader.read('!H')[0]
obj.meter_id = reader.read('!L')[0]
obj.entries = loxi.generic_util.unpack_list(reader, ofp.meter_band.meter_band.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.flags != other.flags):
return False
if (self.meter_id != other.meter_id):
return False
if (self.entries != other.entries):
return False
return True
def pretty_print(self, q):
q.text('meter_config {')
with q.group():
with q.indent(2):
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPMF_KBPS', 2: 'OFPMF_PKTPS', 4: 'OFPMF_BURST', 8: 'OFPMF_STATS'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('meter_id = ')
q.text(('%#x' % self.meter_id))
q.text(',')
q.breakable()
q.text('entries = ')
q.pp(self.entries)
q.breakable()
q.text('}') |
def deserialize_fn_generator(msg_context, spec, is_numpy=False):
(yield 'if python3:')
(yield (INDENT + 'codecs.lookup_error("rosmsg").msg_type = self._type'))
(yield 'try:')
package = spec.package
for (type_, name) in spec.fields():
if msg_context.is_registered(type_):
(yield (' if self.%s is None:' % name))
(yield (' self.%s = %s' % (name, compute_constructor(msg_context, package, type_))))
(yield ' end = 0')
push_context('self.')
flattened = make_python_safe(flatten(msg_context, spec))
for y in serializer_generator(msg_context, flattened, False, is_numpy):
(yield (' ' + y))
pop_context()
for (type_, name) in spec.fields():
code = compute_post_deserialize(type_, ('self.%s' % name))
if code:
(yield (' %s' % code))
(yield ' return self')
(yield 'except struct.error as e:')
(yield ' raise genpy.DeserializationError(e) # most likely buffer underfill') |
class OptionSeriesColumnSonificationContexttracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
_os(*metadata.platforms)
def main():
powershell = 'C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe'
common.execute([powershell, '/c', 'Set-MpPreference', '-ExclusionPath', f'{powershell}'], timeout=10)
common.execute([powershell, '/c', f'Remove-MpPreference -ExclusionPath {powershell}'], timeout=10) |
.usefixtures('request_1', 'request_2')
def test_endpoint_hits(dashboard_user, endpoint, session):
response = dashboard_user.get('dashboard/api/endpoints_hits')
assert (response.status_code == 200)
total_hits = sum((row['hits'] for row in response.json))
assert (total_hits == session.query(Request).count())
[data] = [row for row in response.json if (row['name'] == endpoint.name)]
assert (data['hits'] == 2) |
class ObjectDB(TypedObject):
db_account = models.ForeignKey('accounts.AccountDB', null=True, verbose_name='account', on_delete=models.SET_NULL, help_text='an Account connected to this object, if any.')
db_sessid = models.CharField(null=True, max_length=32, validators=[validate_comma_separated_integer_list], verbose_name='session id', help_text='csv list of session ids of connected Account, if any.')
db_location = models.ForeignKey('self', related_name='locations_set', db_index=True, on_delete=models.SET_NULL, blank=True, null=True, verbose_name='game location')
db_home = models.ForeignKey('self', related_name='homes_set', on_delete=models.SET_NULL, blank=True, null=True, verbose_name='home location')
db_destination = models.ForeignKey('self', related_name='destinations_set', db_index=True, on_delete=models.SET_NULL, blank=True, null=True, verbose_name='destination', help_text='a destination, used only by exit objects.')
db_cmdset_storage = models.CharField('cmdset', max_length=255, null=True, blank=True, help_text='optional python path to a cmdset class.')
objects = ObjectDBManager()
__settingsclasspath__ = settings.BASE_OBJECT_TYPECLASS
__defaultclasspath__ = 'evennia.objects.objects.DefaultObject'
__applabel__ = 'objects'
_property
def contents_cache(self):
return ContentsHandler(self)
def __cmdset_storage_get(self):
storage = self.db_cmdset_storage
return ([path.strip() for path in storage.split(',')] if storage else [])
def __cmdset_storage_set(self, value):
self.db_cmdset_storage = ','.join((str(val).strip() for val in make_iter(value)))
self.save(update_fields=['db_cmdset_storage'])
def __cmdset_storage_del(self):
self.db_cmdset_storage = None
self.save(update_fields=['db_cmdset_storage'])
cmdset_storage = property(__cmdset_storage_get, __cmdset_storage_set, __cmdset_storage_del)
def __location_get(self):
return self.db_location
def __location_set(self, location):
if isinstance(location, (str, int)):
dbid = dbref(location, reqhash=False)
if dbid:
try:
location = ObjectDB.objects.get(id=dbid)
except ObjectDoesNotExist:
pass
try:
def is_loc_loop(loc, depth=0):
if (depth > 10):
return None
elif (loc == self):
raise RuntimeError
elif (loc is None):
raise RuntimeWarning
return is_loc_loop(loc.db_location, (depth + 1))
try:
is_loc_loop(location)
except RuntimeWarning:
pass
old_location = self.db_location
self._safe_contents_update = True
self.db_location = location
self.save(update_fields=['db_location'])
del self._safe_contents_update
if old_location:
old_location.contents_cache.remove(self)
if self.db_location:
self.db_location.contents_cache.add(self)
except RuntimeError:
errmsg = ('Error: %s.location = %s creates a location loop.' % (self.key, location))
raise RuntimeError(errmsg)
except Exception:
raise
return
def __location_del(self):
self.db_location = None
self.save(update_fields=['db_location'])
location = property(__location_get, __location_set, __location_del)
def at_db_location_postsave(self, new):
if (not hasattr(self, '_safe_contents_update')):
if new:
if self.db_location:
self.db_location.contents_cache.add(self)
else:
logger.log_warn('db_location direct save triggered contents_cache.init() for all objects!')
[o.contents_cache.init() for o in self.__dbclass__.get_all_cached_instances()]
class Meta():
verbose_name = 'Object'
verbose_name_plural = 'Objects' |
def create_tables(lambdas_data, args):
all_table_data = [ALL_TABLE_HEADERS]
for lambda_data in lambdas_data:
function_data = lambda_data['function-data']
last_invocation = 'N/A (no invocations?)'
if (lambda_data['last-invocation'] != (- 1)):
last_invocation = get_days_ago(datetime.fromtimestamp((lambda_data['last-invocation'] / 1000)))
all_table_data.append([lambda_data['region'], str(function_data['FunctionName']), str(function_data['MemorySize']), ('%.2f' % (function_data['CodeSize'] / BYTE_TO_MB)), str(function_data['Timeout']), (str(function_data['Runtime']) if ('Runtime' in function_data) else ''), get_days_ago(lambda_data['last-modified']), last_invocation, (('"' + function_data['Description']) + '"')])
if args.should_print_all:
min_table_data = all_table_data
else:
min_table_data = [[lambda_data[0], lambda_data[1], lambda_data[5], lambda_data[(- 2)], lambda_data[(- 1)]] for lambda_data in all_table_data]
return (min_table_data, all_table_data) |
def convert_mxnet_default_outputs(model: Model, X_Ymxnet: Any, is_train: bool):
(X, Ymxnet) = X_Ymxnet
Y = convert_recursive(is_mxnet_array, mxnet2xp, Ymxnet)
def reverse_conversion(dY: Any) -> ArgsKwargs:
dYmxnet = convert_recursive(is_xp_array, xp2mxnet, dY)
return ArgsKwargs(args=((Ymxnet,),), kwargs={'head_grads': dYmxnet})
return (Y, reverse_conversion) |
class MarkReportRead(MethodView):
decorators = [allows.requires(IsAtleastModerator, on_fail=FlashAndRedirect(message=_('You are not allowed to view reports.'), level='danger', endpoint='management.overview'))]
def post(self, report_id=None):
json = request.get_json(silent=True)
if (json is not None):
ids = json.get('ids')
if (not ids):
return jsonify(message='No ids provided.', category='error', status=404)
data = []
for report in Report.query.filter(Report.id.in_(ids)).all():
report.zapped_by = current_user.id
report.zapped = time_utcnow()
report.save()
data.append({'id': report.id, 'type': 'read', 'reverse': False, 'reverse_name': None, 'reverse_url': None})
return jsonify(message='{} reports marked as read.'.format(len(data)), category='success', data=data, status=200)
if report_id:
report = Report.query.filter_by(id=report_id).first_or_404()
if report.zapped:
flash(_('Report %(id)s is already marked as read.', id=report.id), 'success')
return redirect_or_next(url_for('management.reports'))
report.zapped_by = current_user.id
report.zapped = time_utcnow()
report.save()
flash(_('Report %(id)s marked as read.', id=report.id), 'success')
return redirect_or_next(url_for('management.reports'))
reports = Report.query.filter((Report.zapped == None)).all()
report_list = []
for report in reports:
report.zapped_by = current_user.id
report.zapped = time_utcnow()
report_list.append(report)
db.session.add_all(report_list)
db.session.commit()
flash(_('All reports were marked as read.'), 'success')
return redirect_or_next(url_for('management.reports')) |
class OptionSeriesDependencywheelSonificationDefaultinstrumentoptionsMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesVectorDatalabelsFilter(Options):
def operator(self):
return self._config_get(None)
def operator(self, value: Any):
self._config(value, js_type=False)
def property(self):
return self._config_get(None)
def property(self, text: str):
self._config(text, js_type=False) |
class permute(Operator):
def __init__(self):
super().__init__()
self._attrs['op'] = 'permute'
def _infer_shapes(self, x: Tensor) -> List[IntVar]:
output_shapes = []
input_shapes = x.shape()
for dim in self._attrs['dims']:
output_shapes.append(input_shapes[dim])
return output_shapes
def __call__(self, x: Tensor, dims: Sequence[int]) -> Tensor:
dims = list(dims)
for (i, dim) in enumerate(dims):
dims[i] = wrap_dim(dim, x._rank())
sorted_dims = list(range(x._rank()))
assert (sorted(dims) == sorted_dims), f'expected a permutation of {sorted_dims}, but got {dims}'
self._attrs['dims'] = dims
if (dims == [0, 2, 1]):
return permute021()(x)
if (dims == [1, 0, 2]):
return permute102()(x)
if (dims == [2, 1, 0]):
return permute210()(x)
if (dims == [0, 2, 1, 3]):
second_dim = x.shape()[1]
if ((isinstance(second_dim, IntImm) and (second_dim.value() >= 24)) or (isinstance(second_dim, IntVar) and (second_dim.lower_bound() >= 24))):
return permute0213()(x)
last_dim = x.shape()[(- 1)]
if ((len(dims) > 3) and ((dims[:(- 2)] + [dims[(- 1)], dims[(- 2)]]) == sorted_dims) and ((isinstance(last_dim, IntImm) and (last_dim.value() >= 8)) or (isinstance(last_dim, IntVar) and (last_dim.lower_bound() >= 8)))):
return permute021()(x)
self._attrs['inputs'] = [x]
self._set_depth()
output_shapes = self._infer_shapes(x)
output = Tensor(output_shapes, src_ops={self})
self._attrs['outputs'] = [output]
output._attrs['dtype'] = x.dtype()
return output
def gen_function(self) -> str:
target = backend.target.Target.current()
func_key = '{target}.{op}.gen_function'.format(target=target.name(), op=self._attrs['op'])
func = registry.get(func_key)
return func(self._attrs) |
def update_latest_block(db_session, block_number) -> None:
db_session.execute('\n UPDATE latest_block_update\n SET block_number = :block_number, updated_at = current_timestamp;\n INSERT INTO latest_block_update\n (block_number, updated_at)\n SELECT :block_number, current_timestamp\n WHERE NOT EXISTS (SELECT 1 FROM latest_block_update);\n ', params={'block_number': block_number}) |
class TupleCommandHandler(MethodCommandHandler):
def handle(self, params: str) -> Payload:
tup = self.get_value()
assert isinstance(tup, tuple)
if (params == '*'):
return tup._asdict()
if (params == '*;'):
return string_from_dict(tup._asdict())
elif (params in tup._fields):
return getattr(tup, params)
elif (params == ''):
raise Exception(f"Parameter in '{self.name}' should be selected")
raise Exception(f"Parameter '{params}' in '{self.name}' is not supported") |
class OptionPlotoptionsPolygonSonificationDefaultinstrumentoptionsMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.django_db
def test_correct_response_multiple_defc(client, monkeypatch, helpers, elasticsearch_award_index, awards_and_transactions):
setup_elasticsearch_test(monkeypatch, elasticsearch_award_index)
resp = helpers.post_for_spending_endpoint(client, url, def_codes=['L', 'M'], sort='obligation')
expected_results = [{'code': '', 'award_count': 1, 'description': 'RECIPIENT, 3', 'face_value_of_loan': 300.0, 'id': ['bf05f751-6841-efd6-8f1b-0144163eceae-C', 'bf05f751-6841-efd6-8f1b-0144163eceae-R'], 'obligation': 200.0, 'outlay': 100.0}, {'code': '', 'award_count': 1, 'description': 'RECIPIENT 2', 'face_value_of_loan': 30.0, 'id': ['3c92491a-f2cd-ec7d-294b-7daf-R'], 'obligation': 20.0, 'outlay': 10.0}, {'code': 'DUNS Number not provided', 'award_count': 1, 'description': 'RECIPIENT 1', 'face_value_of_loan': 3.0, 'id': ['5f572ec9-8b49-e5eb-22c7-f6ef316f7689-R'], 'obligation': 2.0, 'outlay': 1.0}]
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.json()['results'] == expected_results) |
class TestHasTraitsHelpersHasNamedTrait(unittest.TestCase):
def test_object_has_named_trait_false_for_trait_list(self):
foo = Foo()
self.assertFalse(helpers.object_has_named_trait(foo.list_of_int, 'bar'), 'Expected object_has_named_trait to return false for {!r}'.format(type(foo.list_of_int)))
def test_object_has_named_trait_true_basic(self):
foo = Foo()
self.assertTrue(helpers.object_has_named_trait(foo, 'list_of_int'), 'The named trait should exist.')
def test_object_has_named_trait_false(self):
foo = Foo()
self.assertFalse(helpers.object_has_named_trait(foo, 'not_existing'), 'Expected object_has_named_trait to return False for anonexisting trait name.')
def test_object_has_named_trait_does_not_trigger_property(self):
foo = Foo()
helpers.object_has_named_trait(foo, 'property_value')
self.assertEqual(foo.property_n_calculations, 0) |
class MoveSteering(MoveTank):
def on_for_rotations(self, steering, speed, rotations, brake=True, block=True):
(left_speed, right_speed) = self.get_speed_steering(steering, speed)
MoveTank.on_for_rotations(self, SpeedNativeUnits(left_speed), SpeedNativeUnits(right_speed), rotations, brake, block)
def on_for_degrees(self, steering, speed, degrees, brake=True, block=True):
(left_speed, right_speed) = self.get_speed_steering(steering, speed)
MoveTank.on_for_degrees(self, SpeedNativeUnits(left_speed), SpeedNativeUnits(right_speed), degrees, brake, block)
def on_for_seconds(self, steering, speed, seconds, brake=True, block=True):
(left_speed, right_speed) = self.get_speed_steering(steering, speed)
MoveTank.on_for_seconds(self, SpeedNativeUnits(left_speed), SpeedNativeUnits(right_speed), seconds, brake, block)
def on(self, steering, speed):
(left_speed, right_speed) = self.get_speed_steering(steering, speed)
MoveTank.on(self, SpeedNativeUnits(left_speed), SpeedNativeUnits(right_speed))
def get_speed_steering(self, steering, speed):
assert ((steering >= (- 100)) and (steering <= 100)), '{} is an invalid steering, must be between -100 and 100 (inclusive)'.format(steering)
speed = self.left_motor._speed_native_units(speed)
left_speed = speed
right_speed = speed
speed_factor = ((50 - abs(float(steering))) / 50)
if (steering >= 0):
right_speed *= speed_factor
else:
left_speed *= speed_factor
return (left_speed, right_speed) |
def get_gpuid(gpu_ids: Dict[(str, str)], gpus: List[GpuType]):
vendors = []
for i in range(len(gpus)):
if (gpus[i].vendor not in vendors):
vendors.append(gpus[i].vendor)
gpuvendor = ''.join(vendors).lower()
if (gpuvendor in gpu_ids):
return gpu_ids[gpuvendor]
else:
print('Unknown GPU, contact us on github to resolve this.')
return 'unknown' |
class TrivializeTopic(MethodView):
decorators = [login_required, allows.requires(IsAtleastModeratorInForum(), on_fail=FlashAndRedirect(message=_('You are not allowed to trivialize this topic'), level='danger', endpoint=(lambda *a, **k: current_topic.url)))]
def post(self, topic_id=None, slug=None):
topic = Topic.query.filter_by(id=topic_id).first_or_404()
topic.important = False
topic.save()
return redirect(topic.url) |
class QuotedEntryField(Gtk.Entry):
def __init__(self):
Gtk.Entry.__init__(self)
def get_state(self):
return urllib.parse.quote(self.get_text())
def set_state(self, state):
if (isinstance(state, list) or isinstance(state, tuple)):
state = state[0]
self.set_text(urllib.parse.unquote(state)) |
def test():
assert Doc.has_extension('has_number'), 'doc?'
ext = Doc.get_extension('has_number')
assert (ext[2] is not None), 'getter?'
assert ('getter=get_has_number' in __solution__), 'get_has_numbergetter?'
assert ('doc._.has_number' in __solution__), '?'
assert doc._.has_number, 'getter'
__msg__.good('!') |
class BinaryProjector(Filter):
vmin: float = pd.Field(..., title='Min Value', description='Minimum value to project to.')
vmax: float = pd.Field(..., title='Max Value', description='Maximum value to project to.')
beta: float = pd.Field(1.0, title='Beta', description='Steepness of the binarization, higher means more sharp transition at the expense of gradient accuracy and ease of optimization. Can be useful to ramp up in a scheduled way during optimization.')
eta: float = pd.Field(0.5, title='Eta', description='Halfway point in projection function.')
strict_binarize: bool = pd.Field(False, title='Binarize strictly', description='If False, the binarization is still continuous between min and max. If false, the values are snapped to the min and max values after projection.')
def evaluate(self, spatial_data: jnp.array) -> jnp.array:
num = (jnp.tanh((self.beta * self.eta)) + jnp.tanh((self.beta * (spatial_data - self.eta))))
den = (jnp.tanh((self.beta * self.eta)) + jnp.tanh((self.beta * (1.0 - self.eta))))
rho_bar = (num / den)
vals = (self.vmin + ((self.vmax - self.vmin) * rho_bar))
if self.strict_binarize:
vals = jnp.where((vals < ((self.vmin + self.vmax) / 2)), self.vmin, self.vmax)
else:
vals = jnp.where((vals < self.vmin), self.vmin, vals)
vals = jnp.where((vals > self.vmax), self.vmax, vals)
return vals |
def setup_previews():
global ch_preview_thread
global unloading
unloading = True
if (ch_preview_thread is not None):
ch_preview_thread.kill()
for w in sublime.windows():
for v in w.views():
v.settings().clear_on_change('color_helper.reload')
v.settings().erase('color_helper.scan')
v.settings().erase('color_helper.scan_override')
v.settings().set('color_helper.refresh', True)
v.erase_phantoms('color_helper')
unloading = False
if ch_settings.get('inline_previews', False):
ch_preview_thread = ChPreviewThread()
ch_preview_thread.start() |
class OptionSeriesFunnelStatesInactive(Options):
def animation(self) -> 'OptionSeriesFunnelStatesInactiveAnimation':
return self._config_sub_data('animation', OptionSeriesFunnelStatesInactiveAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def opacity(self):
return self._config_get(0.2)
def opacity(self, num: float):
self._config(num, js_type=False) |
class FixedUInt(int):
MAX_VALUE: ClassVar['FixedUInt']
__slots__ = ()
def __init__(self: T, value: int) -> None:
if (not isinstance(value, int)):
raise TypeError()
if ((value < 0) or (value > self.MAX_VALUE)):
raise ValueError()
def __radd__(self: T, left: int) -> T:
return self.__add__(left)
def __add__(self: T, right: int) -> T:
if (not isinstance(right, int)):
return NotImplemented
result = int.__add__(self, right)
if ((right < 0) or (result > self.MAX_VALUE)):
raise ValueError()
return int.__new__(self.__class__, result)
def wrapping_add(self: T, right: int) -> T:
if (not isinstance(right, int)):
return NotImplemented
if ((right < 0) or (right > self.MAX_VALUE)):
raise ValueError()
return int.__new__(self.__class__, (int.__add__(self, right) & self.MAX_VALUE))
def __iadd__(self: T, right: int) -> T:
return self.__add__(right)
def __sub__(self: T, right: int) -> T:
if (not isinstance(right, int)):
return NotImplemented
if ((right < 0) or (right > self.MAX_VALUE) or (self < right)):
raise ValueError()
return int.__new__(self.__class__, int.__sub__(self, right))
def wrapping_sub(self: T, right: int) -> T:
if (not isinstance(right, int)):
return NotImplemented
if ((right < 0) or (right > self.MAX_VALUE)):
raise ValueError()
return int.__new__(self.__class__, (int.__sub__(self, right) & self.MAX_VALUE))
def __rsub__(self: T, left: int) -> T:
if (not isinstance(left, int)):
return NotImplemented
if ((left < 0) or (left > self.MAX_VALUE) or (self > left)):
raise ValueError()
return int.__new__(self.__class__, int.__rsub__(self, left))
def __isub__(self: T, right: int) -> T:
return self.__sub__(right)
def __mul__(self: T, right: int) -> T:
if (not isinstance(right, int)):
return NotImplemented
result = int.__mul__(self, right)
if ((right < 0) or (result > self.MAX_VALUE)):
raise ValueError()
return int.__new__(self.__class__, result)
def wrapping_mul(self: T, right: int) -> T:
if (not isinstance(right, int)):
return NotImplemented
if ((right < 0) or (right > self.MAX_VALUE)):
raise ValueError()
return int.__new__(self.__class__, (int.__mul__(self, right) & self.MAX_VALUE))
def __rmul__(self: T, left: int) -> T:
return self.__mul__(left)
def __imul__(self: T, right: int) -> T:
return self.__mul__(right)
def __floordiv__(self: T, right: int) -> T:
if (not isinstance(right, int)):
return NotImplemented
if ((right < 0) or (right > self.MAX_VALUE)):
raise ValueError()
return int.__new__(self.__class__, int.__floordiv__(self, right))
def __rfloordiv__(self: T, left: int) -> T:
if (not isinstance(left, int)):
return NotImplemented
if ((left < 0) or (left > self.MAX_VALUE)):
raise ValueError()
return int.__new__(self.__class__, int.__rfloordiv__(self, left))
def __ifloordiv__(self: T, right: int) -> T:
return self.__floordiv__(right)
def __mod__(self: T, right: int) -> T:
if (not isinstance(right, int)):
return NotImplemented
if ((right < 0) or (right > self.MAX_VALUE)):
raise ValueError()
return int.__new__(self.__class__, int.__mod__(self, right))
def __rmod__(self: T, left: int) -> T:
if (not isinstance(left, int)):
return NotImplemented
if ((left < 0) or (left > self.MAX_VALUE)):
raise ValueError()
return int.__new__(self.__class__, int.__rmod__(self, left))
def __imod__(self: T, right: int) -> T:
return self.__mod__(right)
def __divmod__(self: T, right: int) -> Tuple[(T, T)]:
if (not isinstance(right, int)):
return NotImplemented
if ((right < 0) or (right > self.MAX_VALUE)):
raise ValueError()
result = super(FixedUInt, self).__divmod__(right)
return (int.__new__(self.__class__, result[0]), int.__new__(self.__class__, result[1]))
def __rdivmod__(self: T, left: int) -> Tuple[(T, T)]:
if (not isinstance(left, int)):
return NotImplemented
if ((left < 0) or (left > self.MAX_VALUE)):
raise ValueError()
result = super(FixedUInt, self).__rdivmod__(left)
return (int.__new__(self.__class__, result[0]), int.__new__(self.__class__, result[1]))
def __pow__(self: T, right: int, modulo: Optional[int]=None) -> T:
if (modulo is not None):
if (not isinstance(modulo, int)):
return NotImplemented
if ((modulo < 0) or (modulo > self.MAX_VALUE)):
raise ValueError()
if (not isinstance(right, int)):
return NotImplemented
result = int.__pow__(self, right, modulo)
if ((right < 0) or (right > self.MAX_VALUE) or (result > self.MAX_VALUE)):
raise ValueError()
return int.__new__(self.__class__, result)
def wrapping_pow(self: T, right: int, modulo: Optional[int]=None) -> T:
if (modulo is not None):
if (not isinstance(modulo, int)):
return NotImplemented
if ((modulo < 0) or (modulo > self.MAX_VALUE)):
raise ValueError()
if (not isinstance(right, int)):
return NotImplemented
if ((right < 0) or (right > self.MAX_VALUE)):
raise ValueError()
return int.__new__(self.__class__, (int.__pow__(self, right, modulo) & self.MAX_VALUE))
def __rpow__(self: T, left: int, modulo: Optional[int]=None) -> T:
if (modulo is not None):
if (not isinstance(modulo, int)):
return NotImplemented
if ((modulo < 0) or (modulo > self.MAX_VALUE)):
raise ValueError()
if (not isinstance(left, int)):
return NotImplemented
if ((left < 0) or (left > self.MAX_VALUE)):
raise ValueError()
return int.__new__(self.__class__, int.__rpow__(self, left, modulo))
def __ipow__(self: T, right: int, modulo: Optional[int]=None) -> T:
return self.__pow__(right, modulo)
def __and__(self: T, right: int) -> T:
if (not isinstance(right, int)):
return NotImplemented
if ((right < 0) or (right > self.MAX_VALUE)):
raise ValueError()
return int.__new__(self.__class__, int.__and__(self, right))
def __or__(self: T, right: int) -> T:
if (not isinstance(right, int)):
return NotImplemented
if ((right < 0) or (right > self.MAX_VALUE)):
raise ValueError()
return int.__new__(self.__class__, int.__or__(self, right))
def __xor__(self: T, right: int) -> T:
if (not isinstance(right, int)):
return NotImplemented
if ((right < 0) or (right > self.MAX_VALUE)):
raise ValueError()
return int.__new__(self.__class__, int.__xor__(self, right))
def __rxor__(self: T, left: int) -> T:
if (not isinstance(left, int)):
return NotImplemented
if ((left < 0) or (left > self.MAX_VALUE)):
raise ValueError()
return int.__new__(self.__class__, int.__rxor__(self, left))
def __ixor__(self: T, right: int) -> T:
return self.__xor__(right)
def __invert__(self: T) -> T:
return int.__new__(self.__class__, (int.__invert__(self) & self.MAX_VALUE))
def __rshift__(self: T, shift_by: int) -> T:
if (not isinstance(shift_by, int)):
return NotImplemented
return int.__new__(self.__class__, int.__rshift__(self, shift_by))
def to_be_bytes(self) -> 'Bytes':
bit_length = self.bit_length()
byte_length = ((bit_length + 7) // 8)
return self.to_bytes(byte_length, 'big') |
class PuzzleSystemCmdSet(CmdSet):
def at_cmdset_creation(self):
super().at_cmdset_creation()
self.add(CmdCreatePuzzleRecipe())
self.add(CmdEditPuzzle())
self.add(CmdArmPuzzle())
self.add(CmdListPuzzleRecipes())
self.add(CmdListArmedPuzzles())
self.add(CmdUsePuzzleParts()) |
def _calc_bezier(target: float, a: Tuple[(float, float)], b: Tuple[(float, float)], c: Tuple[(float, float)], p1: Tuple[(float, float)], p2: Tuple[(float, float)]) -> float:
if (target in (0, 1)):
return target
if ((target > 1) or (target < 0)):
return _extrapolate(target, p1, p2)
t = _solve_bezier_x(target, a[0], b[0], c[0])
y = _bezier(t, a[1], b[1], c[1])
return y |
def test_get(app, elasticapm_client):
client = TestClient(app)
response = client.get('/', headers={constants.TRACEPARENT_HEADER_NAME: '00-0af7651916cd43dd8448eb211c80319c-b7ad6b-03', constants.TRACESTATE_HEADER_NAME: 'foo=bar,bar=baz', 'REMOTE_ADDR': '127.0.0.1'})
assert (response.status_code == 200)
assert (len(elasticapm_client.events[constants.TRANSACTION]) == 1)
transaction = elasticapm_client.events[constants.TRANSACTION][0]
spans = elasticapm_client.spans_for_transaction(transaction)
assert (len(spans) == 1)
span = spans[0]
assert (transaction['name'] == 'GET /')
assert (transaction['result'] == 'HTTP 2xx')
assert (transaction['outcome'] == 'success')
assert (transaction['type'] == 'request')
assert (transaction['span_count']['started'] == 1)
request = transaction['context']['request']
assert (request['method'] == 'GET')
assert (request['socket'] == {'remote_address': '127.0.0.1'})
response = transaction['context']['response']
assert (response['status_code'] == 200)
assert (response['headers']['content-type'] == 'text/plain; charset=utf-8')
assert (span['name'] == 'test') |
class PostCreate(PostBase):
('title')
def validate_title(cls: Any, title: str, **kwargs: Any) -> Any:
if (len(title) == 0):
raise ValueError("Title can't be empty")
elif (len(title) > 100):
raise ValueError('Title is too long')
return title
('summary')
def validate_summary(cls: Any, summary: str, **kwargs: Any) -> Any:
if (len(summary) == 0):
raise ValueError("Summary can't be empty")
elif (len(summary) > 200):
raise ValueError('Summary is too long')
return summary
('body')
def validate_body(cls: Any, body: str, **kwargs: Any):
if (len(body) == 0):
raise ValueError("Body can't be empty")
return body |
def filter_firewall_dos_policy6_data(json):
option_list = ['anomaly', 'comments', 'dstaddr', 'interface', 'name', 'policyid', 'service', 'srcaddr', 'status']
json = remove_invalid_fields(json)
dictionary = {}
for attribute in option_list:
if ((attribute in json) and (json[attribute] is not None)):
dictionary[attribute] = json[attribute]
return dictionary |
class OptionSeriesBellcurveSonificationDefaultspeechoptionsMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def generate_mapcss(options: argparse.Namespace) -> None:
directory: Path = workspace.get_mapcss_path()
icons_with_outline_path: Path = workspace.get_mapcss_icons_path()
scheme: Scheme = Scheme.from_file(workspace.DEFAULT_SCHEME_PATH)
extractor: ShapeExtractor = ShapeExtractor(workspace.ICONS_PATH, workspace.ICONS_CONFIG_PATH)
collection: IconCollection = IconCollection.from_scheme(scheme, extractor)
collection.draw_icons(icons_with_outline_path, workspace.ICONS_LICENSE_PATH, color=Color('black'), outline=True, outline_opacity=0.5)
mapcss_writer: MapCSSWriter = MapCSSWriter(scheme, workspace.MAPCSS_ICONS_DIRECTORY_NAME, options.icons, options.ways, options.lifecycle)
with workspace.get_mapcss_file_path().open('w+', encoding='utf-8') as output_file:
mapcss_writer.write(output_file)
logging.info(f'MapCSS 0.2 scheme is written to {directory}.') |
.sphinx(buildername='html', srcdir=os.path.join(SOURCE_DIR, 'heading_slug_func'), freshenv=True)
def test_heading_slug_func(app, status, warning, get_sphinx_app_doctree, get_sphinx_app_output):
app.build()
assert ('build succeeded' in status.getvalue())
warnings = warning.getvalue().strip()
assert (warnings == '')
try:
get_sphinx_app_doctree(app, docname='index', regress=True)
finally:
get_sphinx_app_doctree(app, docname='index', resolve=True, regress=True)
get_sphinx_app_output(app, filename='index.html', regress_html=True, replace={'Permalink to this headline': 'Permalink to this heading'}) |
def test_custom_medium_to_gds(tmp_path):
geometry = td.Box(size=(2, 2, 2))
(nx, ny, nz) = (100, 90, 80)
x = np.linspace(0, 2, nx)
y = np.linspace((- 1), 1, ny)
z = np.linspace((- 1), 1, nz)
f = np.array([td.C_0])
(mx, my, mz, _) = np.meshgrid(x, y, z, f, indexing='ij', sparse=True)
data = (1 + (1 / (((1 + ((mx - 1) ** 2)) + (my ** 2)) + (mz ** 2))))
eps_diagonal_data = td.ScalarFieldDataArray(data, coords=dict(x=x, y=y, z=z, f=f))
eps_components = {f'eps_{d}{d}': eps_diagonal_data for d in 'xyz'}
eps_dataset = td.PermittivityDataset(**eps_components)
medium = td.CustomMedium(eps_dataset=eps_dataset, name='my_medium')
structure = td.Structure(geometry=geometry, medium=medium)
fname = str((tmp_path / 'structure-custom-x.gds'))
structure.to_gds_file(fname, x=1, permittivity_threshold=1.5, frequency=td.C_0)
cell = gdstk.read_gds(fname).cells[0]
assert np.allclose(cell.area(), np.pi, atol=0.01)
fname = str((tmp_path / 'structure-custom-z.gds'))
structure.to_gds_file(fname, z=0, permittivity_threshold=1.5, frequency=td.C_0)
cell = gdstk.read_gds(fname).cells[0]
assert np.allclose(cell.area(), (np.pi / 2), atol=0.03)
fname = str((tmp_path / 'structure-empty.gds'))
structure.to_gds_file(fname, x=(- 0.1), permittivity_threshold=1.5, frequency=td.C_0)
cell = gdstk.read_gds(fname).cells[0]
assert (len(cell.polygons) == 0) |
def Commutes_Fissioning(a1, a2, aenv1, aenv2, a1_no_loop_var=False):
(W1, R1, RG1, Red1, All1) = getsets([ES.WRITE_H, ES.READ_H, ES.READ_G, ES.REDUCE, ES.ALL_H], a1)
(W2, R2, RG2, Red2, All2) = getsets([ES.WRITE_H, ES.READ_H, ES.READ_G, ES.REDUCE, ES.ALL_H], a2)
WG1 = get_changing_globset(aenv1)
WG2 = get_changing_globset(aenv2)
write_commute12 = ADef(is_empty(LIsct(W1, All2)))
if a1_no_loop_var:
a1_idempotent = AAnd(ADef(is_empty(Red1)), ADef(is_empty(LIsct(W1, R1))), ADef(is_empty(LIsct(WG1, RG1))))
write_commute12 = AOr(write_commute12, a1_idempotent)
pred = AAnd(write_commute12, ADef(is_empty(LIsct(W2, All1))), ADef(is_empty(LIsct(Red1, R2))), ADef(is_empty(LIsct(Red2, R1))), ADef(is_empty(LIsct(WG1, RG2))), ADef(is_empty(LIsct(WG2, RG1))))
return pred |
def main():
if sys.stdout.isatty():
bit_colors = ['\x1b[39m', '\x1b[91m', '\x1b[92m', '\x1b[93m', '\x1b[94m', '\x1b[95m', '\x1b[96m', '\x1b[31m', '\x1b[32m', '\x1b[33m', '\x1b[34m', '\x1b[35m', '\x1b[36m']
colors = {'NONE': '\x1b[0m', 'DUPLICATE': '\x1b[101;97m'}
else:
bit_colors = ['']
colors = {'NONE': '', 'DUPLICATE': ''}
parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('files', nargs='*', type=str, help='Input files')
args = parser.parse_args()
all_bits = []
for (i, f) in enumerate(args.files):
bits = load_just_bits(f)
all_bits.append(bits)
cstr = bit_colors[(i % len(bit_colors))]
bstr = ('O' if (len(args.files) == 1) else chr((65 + i)))
print((((cstr + bstr) + colors['NONE']) + (': %s #%d' % (f, len(bits)))))
print('')
max_frames = (max([bit[0] for bits in all_bits for bit in bits]) + 1)
max_bits = (max([bit[1] for bits in all_bits for bit in bits]) + 1)
for r in range(3):
line = (' ' * 3)
for c in range(max_bits):
bstr = ('%03d' % c)
line += bstr[r]
print(line)
print('')
for r in range(max_frames):
line = ('%2d ' % r)
for c in range(max_bits):
got_bit = False
bit_str = (colors['NONE'] + '-')
for (i, bits) in enumerate(all_bits):
cstr = bit_colors[(i % len(bit_colors))]
bstr = ('O' if (len(args.files) == 1) else chr((65 + i)))
if ((r, c) in bits):
if (not got_bit):
bit_str = (cstr + bstr)
else:
bit_str = ((colors['DUPLICATE'] + '#') + colors['NONE'])
got_bit = True
line += bit_str
line += colors['NONE']
print(line) |
def extractRainingblackWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return _buildReleaseMessage(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
(scope='function', name='data_category')
def fixture_data_category(test_config: FidesConfig) -> Generator:
fides_key = 'foo'
(yield DataCategory(fides_key=fides_key, parent_key=None))
_api.delete(url=test_config.cli.server_url, resource_type='data_category', resource_id=fides_key, headers=CONFIG.user.auth_header) |
class EditorAreaPaneTestCase(unittest.TestCase):
(USING_WX, 'EditorAreaPane is not implemented in WX')
def test_create_editor(self):
area = EditorAreaPane()
area.register_factory(Editor, (lambda obj: isinstance(obj, int)))
self.assertTrue(isinstance(area.create_editor(0), Editor))
(USING_WX, 'EditorAreaPane is not implemented in WX')
def test_factories(self):
area = EditorAreaPane()
area.register_factory(Editor, (lambda obj: isinstance(obj, int)))
self.assertEqual(area.get_factory(0), Editor)
self.assertEqual(area.get_factory('foo'), None)
area.unregister_factory(Editor)
self.assertEqual(area.get_factory(0), None) |
class OptionSeriesScatterSonificationDefaultspeechoptionsMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestFetchUpdates(UpdateInfoMetadataTestCase):
('bodhi.server.metadata.log.warning')
def test_build_unassociated(self, warning):
update = self.db.query(Update).one()
update.date_stable = update.date_testing = None
u = base.create_update(self.db, ['TurboGears-1.0.2.2-4.fc17'])
u.builds[0].update = None
self.db.flush()
md = UpdateInfoMetadata(update.release, update.request, self.db, self.temprepo, close_shelf=False)
warning.assert_called_once_with('TurboGears-1.0.2.2-4.fc17 does not have a corresponding update')
assert (md.updates == set([])) |
def main():
global action
pygame.init()
pygame.joystick.init()
try:
joystick = pygame.joystick.Joystick(0)
except:
print('Please connect the handle first.')
return
joystick.init()
done = False
start_time = 0
while (not done):
for event_ in pygame.event.get():
if (event_.type == pygame.QUIT):
done = True
elif ((event_.type == pygame.JOYBUTTONDOWN) or (event_.type == pygame.JOYBUTTONUP)):
buttons = joystick.get_numbuttons()
for i in range(buttons):
button = joystick.get_button(i)
if (i == 7):
if (button == 1):
action = 7
break
else:
action = 0
if (i == 1):
if (button == 1):
action = 11
break
if (i == 0):
if (button == 1):
action = 10
break
if (i == 3):
if (button == 1):
action = 9
break
if ((action == 9) and (button == 0)):
action = 0
break
if (i == 2):
if (button == 1):
action = 8
break
if ((action == 8) and (button == 0)):
action = 0
break
if (i == 4):
if (button == 1):
action = 18
start_time = time.time()
break
if ((start_time != 0) and (button == 0)):
if ((time.time() - start_time) > 2):
start_time = 0
break
else:
start_time = 0
action = 0
if (i == 5):
if (button == 1):
action = 21
start_time = time.time()
break
if ((start_time != 0) and (button == 0)):
if ((time.time() - start_time) > 2):
start_time = 0
break
else:
start_time = 0
action = 0
elif (event_.type == pygame.JOYAXISMOTION):
axes = joystick.get_numaxes()
for i in range(axes):
axis = joystick.get_axis(i)
if (i == 1):
if (axis < (- 3.e-05)):
action = 1
break
elif (axis > (- 3.e-05)):
action = 2
break
else:
action = 0
if (i == 0):
if (axis < 0):
action = 3
break
elif (axis > 0):
action = 4
break
else:
action = 0
if (i == 2):
if (axis < 0):
action = 17
break
elif (axis > 0):
action = 16
break
else:
action = 0
if (i == 3):
if (axis < (- 3.e-05)):
action = 5
break
elif (axis > (- 3.e-05)):
action = 6
break
else:
action = 0
if (i == 4):
if (axis > 0.9):
action = 19
start_time = time.time()
break
if ((start_time != 0) and (axis == (- 1.0))):
if ((time.time() - start_time) > 2):
start_time = 0
break
else:
start_time = 0
action = 0
if (i == 5):
if (axis > 0.9):
action = 20
start_time = time.time()
break
if ((start_time != 0) and (axis == (- 1.0))):
if ((time.time() - start_time) > 2):
start_time = 0
break
else:
start_time = 0
action = 0
elif (event_.type == pygame.JOYHATMOTION):
hat = joystick.get_hat(0)
if (hat == (0, 1)):
action = 12
elif (hat == (0, (- 1))):
action = 13
elif (hat == ((- 1), 0)):
action = 14
elif (hat == (1, 0)):
action = 15
elif (hat == (0, 0)):
action = 0
pygame.quit() |
class LDPBase(metaclass=abc.ABCMeta):
def randomize(self, value):
raise NotImplementedError
def _check_epsilon(cls, epsilon):
if (not (epsilon >= 0)):
raise ValueError('ERR: the range of epsilon={} is wrong.'.format(epsilon))
return epsilon
def _check_value(self, value):
raise NotImplementedError |
def match_ethernet_dst_address(self, of_ports, priority=None):
pkt_matchdst = simple_eth_packet(eth_dst='00:01:01:01:01:01')
match = parse.packet_to_flow_match(pkt_matchdst)
self.assertTrue((match is not None), 'Could not generate flow match from pkt')
match.wildcards = (ofp.OFPFW_ALL ^ ofp.OFPFW_DL_DST)
match_send_flowadd(self, match, priority, of_ports[1])
return (pkt_matchdst, match) |
class TestAstNodeSerialization():
def save_and_load(node: AbstractSyntaxTreeNode):
serializer = AstNodeSerializer()
data = serializer.serialize(node)
return serializer.deserialize(data)
.parametrize('node', [SeqNode(true_value(LogicCondition.generate_new_context())), SeqNode(logic_cond('x1', LogicCondition.generate_new_context()))])
def test_sequence_node(self, node: SeqNode):
AbstractSyntaxTree(root=node, condition_map={})
result = self.save_and_load(node)
assert (node == result)
.parametrize('context, node', [((context := LogicCondition.generate_new_context()), CodeNode([], true_value(context))), ((context := LogicCondition.generate_new_context()), CodeNode([], false_value(context))), ((context := LogicCondition.generate_new_context()), CodeNode([Assignment(var('a'), const(1))], true_value(context)))])
def test_code_node(self, context, node: CodeNode):
assert (node == self.save_and_load(node))
.parametrize('node', [ConditionNode(logic_cond('x1'), true_value()), ConditionNode(true_value(), true_value()), ConditionNode(false_value(), true_value())])
def test_condition_node(self, node: ConditionNode):
assert (node == self.save_and_load(node))
.parametrize('node', [TrueNode(true_value()), TrueNode(false_value())])
def test_true_node(self, node: TrueNode):
assert (node == self.save_and_load(node))
.parametrize('node', [FalseNode(true_value()), FalseNode(false_value())])
def test_false_node(self, node: FalseNode):
assert (node == self.save_and_load(node))
.parametrize('node', [WhileLoopNode(logic_cond('x1'), true_value()), WhileLoopNode(logic_cond('x1'), false_value())])
def test_while_loop_node(self, node: WhileLoopNode):
assert (node == self.save_and_load(node))
.parametrize('node', [DoWhileLoopNode(logic_cond('x1'), true_value()), DoWhileLoopNode(logic_cond('x1'), false_value())])
def test_do_while_loop_node(self, node: DoWhileLoopNode):
assert (node == self.save_and_load(node))
.parametrize('node', [ForLoopNode(Assignment(var('i'), const(0)), logic_cond('x1'), Assignment(var('i'), BinaryOperation(OperationType.plus, [var('i'), const(1)])), true_value()), ForLoopNode(Assignment(var('i'), const(0)), logic_cond('x1'), Assignment(var('i'), BinaryOperation(OperationType.plus, [var('i'), const(1)])), false_value())])
def test_for_loop_node(self, node: ForLoopNode):
assert (node == self.save_and_load(node))
.parametrize('node', [SwitchNode(var('a'), true_value()), SwitchNode(const(1), true_value()), SwitchNode(var('a'), false_value())])
def test_switch_node(self, node: SwitchNode):
AbstractSyntaxTree(root=node, condition_map={})
assert (node == self.save_and_load(node))
.parametrize('node', [CaseNode(var('a'), const(0), true_value()), CaseNode(var('a'), const(0), false_value())])
def test_case_node(self, node: CaseNode):
assert (node == self.save_and_load(node)) |
class MicrosoftAudioApi(AudioInterface):
def audio__text_to_speech(self, language: str, text: str, option: str, voice_id: str, audio_format: str, speaking_rate: int, speaking_pitch: int, speaking_volume: int, sampling_rate: int) -> ResponseType[TextToSpeechDataClass]:
speech_config = speechsdk.SpeechConfig(subscription=self.api_settings['speech']['subscription_key'], region=self.api_settings['speech']['service_region'])
speech_config.speech_synthesis_voice_name = voice_id
(ext, audio_format) = get_right_audio_support_and_sampling_rate(audio_format, 0, speechsdk.SpeechSynthesisOutputFormat._member_names_)
speech_config.set_speech_synthesis_output_format(getattr(speechsdk.SpeechSynthesisOutputFormat, audio_format))
text = generate_right_ssml_text(text, voice_id, speaking_rate, speaking_pitch, speaking_volume)
speech_synthesizer = speechsdk.SpeechSynthesizer(speech_config=speech_config)
response = (speech_synthesizer.speak_text_async(text).get() if (not is_ssml(text)) else speech_synthesizer.speak_ssml_async(text).get())
if (response.reason == speechsdk.ResultReason.Canceled):
cancellation_details = response.cancellation_details
raise ProviderException(str(cancellation_details.error_details))
audio_content = BytesIO(response.audio_data)
audio = base64.b64encode(audio_content.read()).decode('utf-8')
voice_type = 1
audio_content.seek(0)
resource_url = upload_file_bytes_to_s3(audio_content, f'.{ext}', USER_PROCESS)
standardized_response = TextToSpeechDataClass(audio=audio, voice_type=voice_type, audio_resource_url=resource_url)
return ResponseType[TextToSpeechDataClass](original_response={}, standardized_response=standardized_response)
def audio__speech_to_text_async__launch_job(self, file: str, language: str, speakers: int, profanity_filter: bool, vocabulary: Optional[List[str]], audio_attributes: tuple, model: str=None, file_url: str='', provider_params=dict()) -> AsyncLaunchJobResponseType:
(export_format, channels, frame_rate) = audio_attributes
if (not language):
raise LanguageException('Language not provided')
content_url = file_url
if (not content_url):
content_url = upload_file_to_s3(file, ((Path(file).stem + '.') + export_format))
headers = self.headers['speech']
headers['Content-Type'] = 'application/json'
config = {'contentUrls': [content_url], 'properties': {'wordLevelTimestampsEnabled': True, 'profanityFilterMode': 'None'}, 'locale': language, 'displayName': 'test batch transcription'}
if (int(channels) == 1):
config['properties'].update({'diarizationEnabled': True})
if profanity_filter:
config['properties'].update({'profanityFilterMode': 'Masked'})
config.update(provider_params)
response = requests.post(url=self.url['speech'], headers=headers, data=json.dumps(config))
if (response.status_code == 201):
result_location = response.headers['Location']
provider_id = result_location.split('/')[(- 1)]
return AsyncLaunchJobResponseType(provider_job_id=provider_id)
else:
raise ProviderException(response.json().get('message'), code=response.status_code)
def audio__speech_to_text_async__get_job_result(self, provider_job_id: str) -> AsyncBaseResponseType[SpeechToTextAsyncDataClass]:
headers = self.headers['speech']
response = requests.get(url=f"{self.url['speech']}/{provider_job_id}/files", headers=headers)
original_response = None
if (response.status_code == 200):
data = response.json()['values']
if data:
files_urls = [entry['links']['contentUrl'] for entry in data if (entry['kind'] == 'Transcription')]
text = ''
diarization_entries = []
speakers = set()
for file_url in files_urls:
response = requests.get(file_url, headers=headers)
original_response = response.json()
if (response.status_code != 200):
error = original_response.get('message')
raise ProviderException(error)
if (original_response['combinedRecognizedPhrases'] and (len(original_response['combinedRecognizedPhrases']) > 0)):
data = original_response['combinedRecognizedPhrases'][0]
text += data['display']
for recognized_status in original_response['recognizedPhrases']:
if (recognized_status['recognitionStatus'] == 'Success'):
if ('speaker' in recognized_status):
speaker = recognized_status['speaker']
for word_info in recognized_status['nBest'][0]['words']:
speakers.add(speaker)
start_time = convert_pt_date_from_string(word_info['offset'])
end_time = (start_time + convert_pt_date_from_string(word_info['duration']))
diarization_entries.append(SpeechDiarizationEntry(segment=word_info['word'], speaker=speaker, start_time=str(start_time), end_time=str(end_time), confidence=float(word_info['confidence'])))
diarization = SpeechDiarization(total_speakers=len(speakers), entries=diarization_entries)
if (len(speakers) == 0):
diarization.error_message = 'Use mono audio files for diarization'
standardized_response = SpeechToTextAsyncDataClass(text=text, diarization=diarization)
return AsyncResponseType[SpeechToTextAsyncDataClass](original_response=original_response, standardized_response=standardized_response, provider_job_id=provider_job_id)
else:
return AsyncPendingResponseType[SpeechToTextAsyncDataClass](provider_job_id=provider_job_id)
else:
error = response.json().get('message')
if error:
if ('entity cannot be found' in error):
raise AsyncJobException(reason=AsyncJobExceptionReason.DEPRECATED_JOB_ID, code=response.status_code)
raise ProviderException(error, code=response.status_code)
raise ProviderException(response.json(), code=response.status_code) |
class PGEncryptedString(TypeDecorator):
impl = BYTEA
python_type = String
cache_ok = True
def __init__(self):
super().__init__()
self.passphrase = CONFIG.user.encryption_key
def bind_expression(self, bindparam):
bindparam = type_coerce(bindparam, JSON)
return func.pgp_sym_encrypt(cast(bindparam, Text), self.passphrase)
def column_expression(self, column):
return cast(func.pgp_sym_decrypt(column, self.passphrase), JSON)
def process_bind_param(self, value, dialect):
pass
def process_literal_param(self, value, dialect):
pass
def process_result_value(self, value, dialect):
pass |
def parse_disc(disc_id, device):
xl_tracks = []
disc_tags = dict()
disc_tags['musicbrainz_albumid'] = disc_id.id
disc_tags['__freedb_disc_id'] = disc_id.freedb_id
if (disc_id.mcn and ('' not in disc_id.mcn)):
disc_tags['__mcn'] = disc_id.mcn
for discid_track in disc_id.tracks:
track_tags = disc_tags.copy()
track_tags['tracknumber'] = '{0}/{1}'.format(discid_track.number, len(disc_id.tracks))
track_tags['title'] = ('Track %d' % discid_track.number)
track_tags['__length'] = discid_track.seconds
if discid_track.isrc:
track_tags['isrc'] = discid_track.isrc
track_uri = ('cdda://%d/#%s' % (discid_track.number, device))
track = Track(uri=track_uri, scan=False)
track.set_tags(**track_tags)
xl_tracks.append(track)
return xl_tracks |
class TestParse(unittest.TestCase):
def test_parse_header(self):
import loxi
(msg_ver, msg_type, msg_len, msg_xid) = ofp.message.parse_header('\x01\x04 e\x124Vx')
self.assertEquals(1, msg_ver)
self.assertEquals(4, msg_type)
self.assertEquals(45032, msg_len)
self.assertEquals(, msg_xid)
with self.assertRaisesRegexp(loxi.ProtocolError, 'too short'):
ofp.message.parse_header('\x01\x04 e\x124V')
def test_parse_message(self):
import loxi
import loxi.of10 as ofp
buf = '\x01\x00\x00\x08\x124Vx'
msg = ofp.message.parse_message(buf)
assert (msg.xid == )
test_klasses = [x for x in ofp.message.__dict__.values() if ((type(x) == type) and issubclass(x, ofp.message.message) and (not hasattr(x, 'subtypes')))]
for klass in test_klasses:
self.assertIsInstance(ofp.message.parse_message(klass(xid=1).pack()), klass)
def test_parse_unknown_message(self):
import loxi
import loxi.of10 as ofp
buf = '\x01\x00\x08\x124Vx'
msg = ofp.message.parse_message(buf)
self.assertIsInstance(msg, ofp.message.message) |
def extractNovelEndeavorsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesWordcloudSonificationDefaultinstrumentoptionsMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
def check_custom_header(url, header_name):
request_header = {'Access-Control-Request-Headers': header_name}
req_custom_header = requests.options(url, header_name, verify=False)
try:
if (req_custom_header.headers['Access-Control-Allow-Headers'] == header_name):
return True
else:
return False
except:
return False |
class Context():
def __init__(self, interpreter, tokenizer=None, truncation_threshold=(- 3e+38)):
self.interpreter = interpreter
self.tokenizer = tokenizer
self.truncation_threshold = truncation_threshold
def __enter__(self):
set_context(self)
return self
def __exit__(self, exc_type, exc_value, traceback):
pop_context()
return False
def get(cls):
ctx = _context.get()
if (len(ctx) == 0):
return None
return ctx[(- 1)] |
class CreateTransform(Runner):
async def __call__(self, es, params):
transform_id = mandatory(params, 'transform-id', self)
body = mandatory(params, 'body', self)
defer_validation = params.get('defer-validation', False)
(await es.transform.put_transform(transform_id=transform_id, body=body, defer_validation=defer_validation))
def __repr__(self, *args, **kwargs):
return 'create-transform' |
def arrow_actor(color=colors.peacock, opacity=1.0, resolution=24):
source = tvtk.ArrowSource(tip_resolution=resolution, shaft_resolution=resolution)
mapper = tvtk.PolyDataMapper()
configure_input_data(mapper, source.output)
prop = tvtk.Property(opacity=opacity, color=color)
actor = tvtk.Actor(mapper=mapper, property=prop)
source.update()
return actor |
class MyDependencyState(depgraph.DependencyState):
def __init__(self, *args, **kwargs):
super(depgraph.DependencyState, self).__init__(*args, **kwargs)
self.pending_links = set()
def get_done_state(self):
return (self.loc_key, frozenset(self.pending))
def extend(self, loc_key):
new_state = self.__class__(loc_key, self.pending)
new_state.links = set(self.links)
new_state.history = (self.history + [loc_key])
new_state.pending_links = set(self.pending_links)
return new_state |
def filter_firewall_traffic_class_data(json):
option_list = ['class_id', 'class_name']
json = remove_invalid_fields(json)
dictionary = {}
for attribute in option_list:
if ((attribute in json) and (json[attribute] is not None)):
dictionary[attribute] = json[attribute]
return dictionary |
class Cyclopolis(BikeShareSystem):
sync = True
meta = {'system': 'Cyclopolis', 'company': ['Cyclopolis Systems']}
def __init__(self, tag, mapstyle, feed_url, meta):
super(Cyclopolis, self).__init__(tag, meta)
self.feed_url = feed_url
self.mapstyle = mapstyle
def update(self, scraper=None):
if (scraper is None):
scraper = utils.PyBikesScraper()
stations = []
html = scraper.request(self.feed_url)
LAT_LNG_RGX = (LAT_LNG_RGX_GOOGLE if (self.mapstyle == 'google') else LAT_LNG_RGX_MAPBOX)
data = zip(re.findall(LAT_LNG_RGX, html, re.DOTALL), re.findall(DATA_RGX, html, re.DOTALL))
for (lat_lng, info) in data:
latitude = float(lat_lng[0])
longitude = float(lat_lng[1])
fields = re.split('<br.?/?>', re.sub('<\\\\?/?b>', '', info))
extra = {}
if (len(fields) == 4):
(name, raw_bikes, raw_free, status) = fields
else:
(name, raw_bikes, raw_slots, raw_free, status) = fields
slots = int(raw_slots.split(':')[(- 1)])
extra['slots'] = slots
try:
bikes = int(raw_bikes.split(': ')[(- 1)])
except ValueError:
bikes = 0
try:
free = int(raw_free.split(':')[(- 1)])
except ValueError:
free = 0
if (status == 'offline'):
extra['closed'] = True
station = CyclopolisStation(name, latitude, longitude, bikes, free, extra)
stations.append(station)
self.stations = stations |
class AsyncContractCaller(BaseContractCaller):
w3: 'AsyncWeb3'
def __init__(self, abi: ABI, w3: 'AsyncWeb3', address: ChecksumAddress, transaction: Optional[TxParams]=None, block_identifier: BlockIdentifier=None, ccip_read_enabled: Optional[bool]=None, decode_tuples: Optional[bool]=False) -> None:
super().__init__(abi, w3, address, decode_tuples=decode_tuples)
if self.abi:
if (transaction is None):
transaction = {}
self._functions = filter_by_type('function', self.abi)
for func in self._functions:
fn = AsyncContractFunction.factory(func['name'], w3=w3, contract_abi=self.abi, address=self.address, function_identifier=func['name'], decode_tuples=decode_tuples)
block_id = parse_block_identifier_no_extra_call(w3, block_identifier)
caller_method = partial(self.call_function, fn, transaction=transaction, block_identifier=block_id, ccip_read_enabled=ccip_read_enabled)
setattr(self, func['name'], caller_method)
def __call__(self, transaction: Optional[TxParams]=None, block_identifier: BlockIdentifier=None, ccip_read_enabled: Optional[bool]=None) -> 'AsyncContractCaller':
if (transaction is None):
transaction = {}
return type(self)(self.abi, self.w3, self.address, transaction=transaction, block_identifier=block_identifier, ccip_read_enabled=ccip_read_enabled, decode_tuples=self.decode_tuples) |
class TestSuperFencesCustomLegacyArithmatexGeneric(util.MdCase):
extension = ['pymdownx.superfences']
extension_configs = {'pymdownx.superfences': {'custom_fences': [{'name': 'math', 'class': 'arithmatex', 'format': arithmatex.fence_generic_format}]}}
def test_legacy_arithmatex_generic(self):
with warnings.catch_warnings(record=True) as w:
self.check_markdown('\n ```math\n E(\\mathbf{v}, \\mathbf{h}) = -\\sum_{i,j}w_{ij}v_i h_j - \\sum_i b_i v_i - \\sum_j c_j h_j\n ```\n ', '\n <div class="arithmatex">\\[\n E(\\mathbf{v}, \\mathbf{h}) = -\\sum_{i,j}w_{ij}v_i h_j - \\sum_i b_i v_i - \\sum_j c_j h_j\n \\]</div>\n ', True)
self.assertTrue((len(w) == 1))
self.assertTrue(issubclass(w[(- 1)].category, DeprecationWarning)) |
class HTTPURLHandler(PathHandler):
MAX_FILENAME_LEN = 250
def __init__(self) -> None:
super().__init__()
self.cache_map: Dict[(str, str)] = {}
def _get_supported_prefixes(self) -> List[str]:
return [' ' 'ftp://']
def _get_local_path(self, path: str, force: bool=False, cache_dir: Optional[str]=None, **kwargs: Any) -> str:
self._check_kwargs(kwargs)
if (force or (path not in self.cache_map) or (not os.path.exists(self.cache_map[path]))):
logger = logging.getLogger(__name__)
parsed_url = urlparse(path)
dirname = os.path.join(get_cache_dir(cache_dir), os.path.dirname(parsed_url.path.lstrip('/')))
filename = path.split('/')[(- 1)]
if parsed_url.query:
filename = filename.split('?').pop(0)
if (len(filename) > self.MAX_FILENAME_LEN):
filename = ((filename[:100] + '_') + uuid.uuid4().hex)
cached = os.path.join(dirname, filename)
with file_lock(cached):
if (not os.path.isfile(cached)):
logger.info('Downloading {} ...'.format(path))
cached = download(path, dirname, filename=filename)
logger.info('URL {} cached in {}'.format(path, cached))
self.cache_map[path] = cached
return self.cache_map[path]
def _open(self, path: str, mode: str='r', buffering: int=(- 1), **kwargs: Any) -> Union[(IO[str], IO[bytes])]:
self._check_kwargs(kwargs)
assert (mode in ('r', 'rb')), '{} does not support open with {} mode'.format(self.__class__.__name__, mode)
assert (buffering == (- 1)), f'{self.__class__.__name__} does not support the `buffering` argument'
local_path = self._get_local_path(path, force=False)
return open(local_path, mode) |
def test_butterworth_returns_correct_value_with_bandstop_filter_type_and_float32_precision(trace):
(b, a) = signal.butter(3, [(.0 / (.0 / 2)), (.0 / (.0 / 2))], 'bandstop')
b = b.astype('float32')
a = a.astype('float32')
expected = signal.lfilter(b, a, trace)
result = scared.signal_processing.butterworth(trace, .0, [.0, .0], filter_type=scared.signal_processing.FilterType.BAND_STOP)
assert (max_diff_percent(expected, result) < 1e-06) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.