code stringlengths 281 23.7M |
|---|
def test_disposing_handler5():
m1 = MyComponent4()
m2 = MyComponent4()
m1.set_other(m2)
loop.iter()
m2_ref = weakref.ref(m2)
del m2
gc.collect()
assert (m2_ref() is not None)
m2_ref().dispose()
m1.set_other(None)
loop.iter()
gc.collect()
assert (m2_ref() is None) |
def add_moredoc(app, objtype):
cls = autodoc.AutoDirective._registry[objtype]
documenter = type(('More' + cls.__name__), (MoreInfoDocumenter, cls), {})
autodoc.AutoDirective._registry[('more' + objtype)] = documenter
app.add_directive(('automore' + objtype), autodoc.AutoDirective)
dirname = getattr(cls, 'directivetype', cls.objtype)
dircls = python.PythonDomain.directives[dirname]
directive = type(('NoDupes' + dircls.__name__), (NoDupesObjectDirective, dircls), {})
python.PythonDomain.directives[('more' + dirname)] = directive |
class AnonymizerStageService(PrivateComputationStageService):
def __init__(self, onedocker_svc: OneDockerService, onedocker_binary_config_map: DefaultDict[(str, OneDockerBinaryConfig)]) -> None:
self._onedocker_svc = onedocker_svc
self._onedocker_binary_config_map = onedocker_binary_config_map
self._logger: logging.Logger = logging.getLogger(__name__)
async def run_async(self, pc_instance: PrivateComputationInstance, server_certificate_provider: CertificateProvider, ca_certificate_provider: CertificateProvider, server_certificate_path: str, ca_certificate_path: str, server_ips: Optional[List[str]]=None, server_hostnames: Optional[List[str]]=None, server_private_key_ref_provider: Optional[PrivateKeyReferenceProvider]=None) -> PrivateComputationInstance:
self._logger.info('Running anonymizer')
container_instances = (await self._start_containers(pc_instance, server_ips=server_ips))
stage_state = StageStateInstance(pc_instance.infra_config.instance_id, pc_instance.current_stage.name, containers=container_instances, status=StageStateInstanceStatus.STARTED)
pc_instance.infra_config.instances.append(stage_state)
return pc_instance
async def _start_containers(self, pc_instance: PrivateComputationInstance, server_ips: Optional[List[str]]=None) -> List[ContainerInstance]:
self._logger.info("Starting anonymizer containers (well, not actually - I'm a skeleton)")
return [ContainerInstance('dummy_container_dne', ip_address='127.0.0.1', status=ContainerInstanceStatus.STARTED)]
def _dummy_get_status(self, pc_instance: PrivateComputationInstance) -> PrivateComputationInstanceStatus:
self._logger.info("Fetching anonymizer status (well, not actually - I'm a skeleton)")
stage_flow = pc_instance.infra_config.stage_flow
status = pc_instance.infra_config.status
if stage_flow.is_initialized_status(status):
return pc_instance.current_stage.started_status
elif stage_flow.is_started_status(status):
return pc_instance.current_stage.completed_status
else:
return status
def get_status(self, pc_instance: PrivateComputationInstance) -> PrivateComputationInstanceStatus:
return self._dummy_get_status(pc_instance)
def stop_service(self, pc_instance: PrivateComputationInstance) -> None:
stop_stage_service(pc_instance, self._onedocker_svc) |
class DirParamType(click.ParamType):
name = 'directory path'
def convert(self, value: typing.Any, param: typing.Optional[click.Parameter], ctx: typing.Optional[click.Context]) -> typing.Any:
p = pathlib.Path(value)
remote_directory = (None if getattr(ctx.obj, 'is_remote', False) else False)
if (p.exists() and p.is_dir()):
return FlyteDirectory(path=value, remote_directory=remote_directory)
raise click.BadParameter(f'parameter should be a valid directory path, {value}') |
def test_gradient_computation():
(in_matrix, result_matrix) = construct_pre_processing_matrix()
adj_hat_torch = GraphConvBlock.preprocess_adj_to_adj_hat(in_matrix)
assert (adj_hat_torch.requires_grad is False)
self_scaling_param = torch.tensor(1.0, requires_grad=False)
adj_hat_torch = GraphConvBlock.preprocess_adj_to_adj_hat(in_matrix, self_scaling_param)
assert (adj_hat_torch.requires_grad is False)
self_scaling_param = torch.tensor(1.0, requires_grad=True)
adj_hat_torch = GraphConvBlock.preprocess_adj_to_adj_hat(in_matrix, self_scaling_param)
assert (adj_hat_torch.requires_grad is True)
loss = sum(sum((adj_hat_torch - result_matrix)))
loss.backward()
assert torch.isclose(self_scaling_param.grad, torch.tensor(0.1078), rtol=0.001)
self_scaling_param = torch.tensor(1.0, requires_grad=True)
with torch.set_grad_enabled(False):
adj_hat_torch = GraphConvBlock.preprocess_adj_to_adj_hat(in_matrix, self_scaling_param)
assert (adj_hat_torch.requires_grad is False) |
class InclinedDiskModel(FunctionModel2DScalarDeformedRadial):
def __init__(self, inc=0, pa=0, degrees=True, **kwargs):
super(InclinedDiskModel, self).__init__('sersic', **kwargs)
self.n = 1
if degrees:
self.incdeg = inc
self.padeg = pa
else:
self.inc = inc
self.pa = pa |
def find_free_port(preferred_port=None):
if (preferred_port is not None):
if (not is_port_in_use(preferred_port)):
return preferred_port
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
s.bind(('', 0))
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
return s.getsockname()[1] |
class OptionSeriesPyramidSonificationContexttracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class OptionPlotoptionsPolygonOnpointConnectoroptions(Options):
def dashstyle(self):
return self._config_get(None)
def dashstyle(self, text: str):
self._config(text, js_type=False)
def stroke(self):
return self._config_get(None)
def stroke(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(1)
def width(self, num: float):
self._config(num, js_type=False) |
class SOLARAdapter(NewHFChatModelAdapter):
support_4bit: bool = True
support_8bit: bool = False
def do_match(self, lower_model_name_or_path: Optional[str]=None):
return (lower_model_name_or_path and ('solar-' in lower_model_name_or_path) and ('instruct' in lower_model_name_or_path)) |
class TestEmptySearch():
def setup_class(cls):
cls.node = LocalNode()
cls.node.start()
cls.address_1 = 'address_1'
cls.public_key_1 = 'public_key_1'
cls.multiplexer = Multiplexer([_make_local_connection(cls.address_1, cls.public_key_1, cls.node)])
cls.multiplexer.connect()
cls.dialogues = OefSearchDialogues(cls.address_1)
def test_empty_search_result(self):
(search_services_request, sending_dialogue) = self.dialogues.create(counterparty=OEF_LOCAL_NODE_SEARCH_ADDRESS, performative=OefSearchMessage.Performative.SEARCH_SERVICES, query=Query(constraints=[], model=None))
envelope = Envelope(to=search_services_request.to, sender=search_services_request.sender, message=search_services_request)
self.multiplexer.put(envelope)
response_envelope = self.multiplexer.get(block=True, timeout=2.0)
assert (response_envelope.protocol_specification_id == OefSearchMessage.protocol_specification_id)
search_result = cast(OefSearchMessage, response_envelope.message)
response_dialogue = self.dialogues.update(search_result)
assert (response_dialogue == sending_dialogue)
assert (search_result.performative == OefSearchMessage.Performative.SEARCH_RESULT)
assert (search_result.agents == ())
def teardown_class(cls):
cls.multiplexer.disconnect()
cls.node.stop() |
def _author(forenames=None, surname=LAST_NAME_1, email=EMAIL_1, affiliation=None):
if (forenames is None):
forenames = [FIRST_NAME_1]
author = TEI_E.author()
persName = TEI_E.persName()
author.append(persName)
for (i, forename) in enumerate(forenames):
persName.append(TEI_E.forename(forename, type=('first' if (i == 0) else 'middle')))
if surname:
persName.append(TEI_E.surname(surname))
if email:
author.append(TEI_E.email(email))
if (affiliation is not None):
author.append(affiliation)
return author |
class DemoController(Handler):
view = Instance(DemoView)
def init(self, info):
self.view = info.object
return True
def save(self, ui_info):
ui = self.view.edit_traits(view='save_file_view')
if (ui.result == True):
self.view._save()
def load(self, ui_info):
ui = self.view.edit_traits(view='load_file_view')
if (ui.result == True):
self.view._load() |
class OptionSeriesWindbarbSonificationDefaultinstrumentoptionsMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def get_rules():
cwd = os.path.split(__file__)[0]
rulepath = os.path.join(cwd, 'rules')
items = os.listdir(rulepath)
items.sort()
ret = []
specials = {}
for item in [os.path.join(rulepath, item) for item in items if item.endswith('.yaml')]:
with open(item, 'r', encoding='utf-8') as fp:
try:
text = fp.read()
text = text.replace('\t', ' ')
dat = yaml.load(text, Loader=yaml.SafeLoader)
(rules, special) = load_validate_rules(item, dat)
if rules:
ret.append(rules)
if special:
for (key, val) in special.items():
specials[key] = val
assert ('starturls' in rules)
except (yaml.scanner.ScannerError, yaml.parser.ParserError):
print('ERROR!')
print("Attempting to load file: '{}'".format(item))
traceback.print_exc()
except ValidationError:
print('ERROR!')
print("Validation error when trying to load file: '{}'".format(item))
traceback.print_exc()
print(dat)
except AssertionError:
print('ERROR!')
print("Validation error when trying to load file: '{}'".format(item))
traceback.print_exc()
assert [True for ruleset in ret if (('starturls' in ruleset) and (ruleset['starturls'] == None))], 'You must have a base ruleset for matching generic sites (with a baseurl value of `None`)'
print('Loaded rulesets ({}):'.format(len(ret)))
return (ret, specials) |
def create_scope_ws(path='/', query_string='', headers=None, host=DEFAULT_HOST, scheme=None, port=None, remote_addr=None, root_path=None, include_server=True, subprotocols=None, spec_version='2.1') -> Dict[(str, Any)]:
scope = create_scope(path=path, query_string=query_string, headers=headers, host=host, scheme=(scheme or 'ws'), port=port, remote_addr=remote_addr, root_path=root_path, include_server=include_server)
scope['type'] = ScopeType.WS
scope['asgi']['spec_version'] = spec_version
del scope['method']
if (subprotocols is not None):
scope['subprotocols'] = subprotocols
return scope |
_view(['GET'])
_classes([IsAuthenticated])
def open(request):
queue = Jupyter_queue.objects.all()
if (len(queue) > 0):
if (queue[0].user.id == request.user.id):
return Response({'status': 'Success', 'message': 'opened Successfully'}, status=status.HTTP_200_OK)
now = datetime.now()
current = now.strftime('%m/%d/%Y, %H:%M:%S')
timeout = (queue[0].created_at + timedelta(seconds=10)).strftime('%m/%d/%Y, %H:%M:%S')
if (current > timeout):
queue[0].delete()
else:
return Response({'status': 'Error', 'message': 'Juputerhub Auth Is Busy'}, status=status.HTTP_406_NOT_ACCEPTABLE)
Jupyter_queue.objects.create(user=request.user)
return Response({'status': 'Success', 'message': 'opened Successfully'}, status=status.HTTP_200_OK) |
class Frame():
def __init__(self, idx):
self.idx = idx
self.throws = []
def total_pins(self):
return sum(self.throws)
def is_strike(self):
return ((self.total_pins == 10) and (len(self.throws) == 1))
def is_spare(self):
return ((self.total_pins == 10) and (len(self.throws) == 2))
def is_open(self):
return ((self.total_pins < 10) and (len(self.throws) == 2))
def is_closed(self):
return ((self.total_pins == 10) or (len(self.throws) == 2))
def throw(self, pins):
if ((self.total_pins + pins) > 10):
raise ValueError("a frame's rolls cannot exceed 10")
self.throws.append(pins)
def score(self, next_throws):
result = self.total_pins
if self.is_strike():
result += sum(next_throws[:2])
elif self.is_spare():
result += sum(next_throws[:1])
return result |
class RCareWorldGymWrapper(RCareWorldBaseEnv, gym.Env):
def __init__(self, executable_file: str=None, scene_file: str=None, custom_channels: list=[], assets: list=[], **kwargs):
RCareWorldBaseEnv.__init__(self, executable_file=executable_file, scene_file=scene_file, custom_channels=custom_channels, assets=assets, **kwargs)
def close(self):
RCareWorldBaseEnv.close(self) |
class GetDeletedDbTest(TestModelMixin, TestBase):
databases = {'default', 'mysql', 'postgres'}
def testGetDeletedDb(self):
with reversion.create_revision(using='postgres'):
obj = TestModel.objects.create()
obj.delete()
self.assertEqual(Version.objects.get_deleted(TestModel).count(), 0)
self.assertEqual(Version.objects.using('postgres').get_deleted(TestModel).count(), 1)
def testGetDeletedDbMySql(self):
with reversion.create_revision(using='mysql'):
obj = TestModel.objects.create()
obj.delete()
self.assertEqual(Version.objects.get_deleted(TestModel).count(), 0)
self.assertEqual(Version.objects.using('mysql').get_deleted(TestModel).count(), 1) |
class BackgroundThread(object):
KEYPAD_ESC = 10
KEYPAD_ENT = 11
def __init__(self, callbackaddr, interval=0.005, WiegandOBJ=None):
self.interval = interval
self.pin = ''
self.enablerun = True
self.callbackfunc = callbackaddr
self.WR = WiegandOBJ
thread = threading.Thread(target=self.run, args=())
thread.daemon = True
thread.start()
def analyzekey(self, keycode):
key = int(keycode, 2)
if ((key >= 0) and (key < 10)):
self.pin = ''.join([self.pin, chr((48 + key))])
elif (key == self.KEYPAD_ESC):
if (len(self.pin) > 0):
self.pin = ''
else:
self.callbackfunc(1, 'ESC')
elif (key == self.KEYPAD_ENT):
if (len(self.pin) > 0):
self.callbackfunc(2, str(self.pin))
self.pin = ''
else:
self.callbackfunc(1, 'ENT')
def run(self):
while self.enablerun:
if (self.WR.GetPendingBitCount() > 0):
try:
(wstr, wbl) = self.WR.ReadData()
if ((wbl > 3) and (wbl < 5)):
self.analyzekey(wstr)
elif ((wbl > 6) and (wbl < 9)):
self.analyzekey(wstr[:4])
self.analyzekey(wstr[4:8])
elif (wbl > 20):
self.callbackfunc(3, str(self.binaryToInt(wstr, wbl)))
except:
pass
time.sleep(self.interval)
def stoprun(self):
self.enablerun = False
def clearbuffer(self):
self.pin = ''
def binaryToInt(self, binary_string, blen):
binary_string = binary_string[1:(blen - 1)]
try:
result = int(binary_string, 2)
except:
result = 0
return result |
def build_config(name):
errors = 0
os.system(f'rm -rf examples/{name}')
os.system(f'mkdir -p examples/{name} && cd examples/{name} && python3 ../../litedram/gen.py ../{name}.yml')
errors += (not os.path.isfile(f'examples/{name}/build/gateware/litedram_core.v'))
os.system(f'rm -rf examples/{name}')
return errors |
def grouped_data_per_team(data):
teams = {}
for (name, members) in app.config['USAGE_TREEMAP_TEAMS'].items():
for member in members:
teams[member] = name
result = {}
for (user, value) in data.items():
key = teams.get(user, user)
result.setdefault(key, {})
result[key][user] = value
return result |
class TestDefaultAccountEv(BaseEvenniaTest):
def test_characters_property(self):
self.account.db._playable_characters = [self.char1, None]
self.assertEqual(self.account.characters.all(), [self.char1])
self.assertEqual(self.account.db._playable_characters, [self.char1])
def test_add_character_to_playable_list(self):
self.assertEqual(self.account.characters.all(), [])
self.account.characters.add(self.char1)
self.assertEqual(self.account.characters.all(), [self.char1])
def test_remove_character_from_playable_list(self):
self.account.characters.add(self.char1)
self.assertEqual(self.account.characters.all(), [self.char1])
self.account.characters.remove(self.char1)
self.assertEqual(self.account.characters.all(), [])
def test_puppet_success(self):
self.account.msg = MagicMock()
with patch('evennia.accounts.accounts._MULTISESSION_MODE', 2):
self.account.puppet_object(self.session, self.char1)
self.account.msg.assert_called_with('You are already puppeting this object.')
('evennia.accounts.accounts.time.time', return_value=10000)
def test_idle_time(self, mock_time):
self.session.cmd_last_visible = (10000 - 10)
idle = self.account.idle_time
self.assertEqual(idle, 10)
with patch('evennia.SESSION_HANDLER.sessions_from_account', return_value=[]) as mock_sessh:
idle = self.account.idle_time
self.assertEqual(idle, None)
('evennia.accounts.accounts.time.time', return_value=10000)
def test_connection_time(self, mock_time):
self.session.conn_time = (10000 - 10)
conn = self.account.connection_time
self.assertEqual(conn, 10)
with patch('evennia.SESSION_HANDLER.sessions_from_account', return_value=[]) as mock_sessh:
idle = self.account.connection_time
self.assertEqual(idle, None)
def test_create_account(self):
acct = create.account('TestAccount3', '', 'testpassword123', locks='test:all()', tags=[('tag1', 'category1'), ('tag2', 'category2', 'data1'), ('tag3', None)], attributes=[('key1', 'value1', 'category1', 'edit:false()', True), ('key2', 'value2')])
acct.save()
self.assertTrue(acct.pk)
def test_at_look(self):
ret = self.account.at_look()
self.assertTrue(('Out-of-Character' in ret))
ret = self.account.at_look(target=self.obj1)
self.assertTrue(('Obj' in ret))
ret = self.account.at_look(session=self.session)
self.assertTrue(('*' in ret))
ret = self.account.at_look(target=self.obj1, session=self.session)
self.assertTrue(('Obj' in ret))
ret = self.account.at_look(target='Invalid', session=self.session)
self.assertEqual(ret, 'Invalid has no in-game appearance.')
def test_msg(self):
self.account.msg |
class FirewallDConfigZone(DbusServiceObject):
persistent = True
default_polkit_auth_required = config.dbus.PK_ACTION_CONFIG
_exceptions
def __init__(self, parent, conf, zone, item_id, *args, **kwargs):
super(FirewallDConfigZone, self).__init__(*args, **kwargs)
self.parent = parent
self.config = conf
self.obj = zone
self.item_id = item_id
self.busname = args[0]
self.path = args[1]
self._log_prefix = ('config.zone.%d' % self.item_id)
dbus_introspection_prepare_properties(self, config.dbus.DBUS_INTERFACE_CONFIG_ZONE)
_handle_exceptions
def __del__(self):
pass
_handle_exceptions
def unregister(self):
self.remove_from_connection()
_handle_exceptions
def _get_property(self, property_name):
if (property_name == 'name'):
return dbus.String(self.obj.name)
elif (property_name == 'filename'):
return dbus.String(self.obj.filename)
elif (property_name == 'path'):
return dbus.String(self.obj.path)
elif (property_name == 'default'):
return dbus.Boolean(self.obj.default)
elif (property_name == 'builtin'):
return dbus.Boolean(self.obj.builtin)
else:
raise dbus.exceptions.DBusException(("org.freedesktop.DBus.Error.InvalidArgs: Property '%s' does not exist" % property_name))
_service_method(dbus.PROPERTIES_IFACE, in_signature='ss', out_signature='v')
_handle_exceptions
def Get(self, interface_name, property_name, sender=None):
interface_name = dbus_to_python(interface_name, str)
property_name = dbus_to_python(property_name, str)
log.debug1("%s.Get('%s', '%s')", self._log_prefix, interface_name, property_name)
if (interface_name != config.dbus.DBUS_INTERFACE_CONFIG_ZONE):
raise dbus.exceptions.DBusException(("org.freedesktop.DBus.Error.UnknownInterface: Interface '%s' does not exist" % interface_name))
return self._get_property(property_name)
_service_method(dbus.PROPERTIES_IFACE, in_signature='s', out_signature='a{sv}')
_handle_exceptions
def GetAll(self, interface_name, sender=None):
interface_name = dbus_to_python(interface_name, str)
log.debug1("%s.GetAll('%s')", self._log_prefix, interface_name)
if (interface_name != config.dbus.DBUS_INTERFACE_CONFIG_ZONE):
raise dbus.exceptions.DBusException(("org.freedesktop.DBus.Error.UnknownInterface: Interface '%s' does not exist" % interface_name))
ret = {}
for x in ['name', 'filename', 'path', 'default', 'builtin']:
ret[x] = self._get_property(x)
return dbus.Dictionary(ret, signature='sv')
_polkit_require_auth(config.dbus.PK_ACTION_CONFIG)
_service_method(dbus.PROPERTIES_IFACE, in_signature='ssv')
_handle_exceptions
def Set(self, interface_name, property_name, new_value, sender=None):
interface_name = dbus_to_python(interface_name, str)
property_name = dbus_to_python(property_name, str)
new_value = dbus_to_python(new_value)
log.debug1("%s.Set('%s', '%s', '%s')", self._log_prefix, interface_name, property_name, new_value)
self.parent.accessCheck(sender)
if (interface_name != config.dbus.DBUS_INTERFACE_CONFIG_ZONE):
raise dbus.exceptions.DBusException(("org.freedesktop.DBus.Error.UnknownInterface: Interface '%s' does not exist" % interface_name))
raise dbus.exceptions.DBusException(("org.freedesktop.DBus.Error.PropertyReadOnly: Property '%s' is read-only" % property_name))
.signal(dbus.PROPERTIES_IFACE, signature='sa{sv}as')
def PropertiesChanged(self, interface_name, changed_properties, invalidated_properties):
interface_name = dbus_to_python(interface_name, str)
changed_properties = dbus_to_python(changed_properties)
invalidated_properties = dbus_to_python(invalidated_properties)
log.debug1("%s.PropertiesChanged('%s', '%s', '%s')", self._log_prefix, interface_name, changed_properties, invalidated_properties)
_polkit_require_auth(config.dbus.PK_ACTION_INFO)
_service_method(dbus.INTROSPECTABLE_IFACE, out_signature='s')
_handle_exceptions
def Introspect(self, sender=None):
log.debug2('%s.Introspect()', self._log_prefix)
data = super(FirewallDConfigZone, self).Introspect(self.path, self.busname.get_bus())
return dbus_introspection_add_properties(self, data, config.dbus.DBUS_INTERFACE_CONFIG_ZONE)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, out_signature='(sssbsasa(ss)asba(ssss)asasasasa(ss)b)')
_handle_exceptions
def getSettings(self, sender=None):
log.debug1('%s.getSettings()', self._log_prefix)
settings = self.config.get_zone_config(self.obj)
if (settings[4] == DEFAULT_ZONE_TARGET):
_settings = list(settings)
_settings[4] = 'default'
settings = tuple(_settings)
return settings
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, out_signature='a{sv}')
_handle_exceptions
def getSettings2(self, sender=None):
log.debug1('%s.getSettings2()', self._log_prefix)
settings = self.config.get_zone_config_dict(self.obj)
if (settings['target'] == DEFAULT_ZONE_TARGET):
settings['target'] = 'default'
return settings
def _checkDuplicateInterfacesSources(self, settings):
old_settings = self.config.get_zone_config_dict(self.obj)
old_ifaces = (set(old_settings['interfaces']) if ('interfaces' in old_settings) else set())
old_sources = (set(old_settings['sources']) if ('sources' in old_settings) else set())
if isinstance(settings, tuple):
added_ifaces = (set(settings[Zone.index_of('interfaces')]) - old_ifaces)
added_sources = (set(settings[Zone.index_of('sources')]) - old_sources)
else:
new_ifaces = (set(settings['interfaces']) if ('interfaces' in settings) else set())
new_sources = (set(settings['sources']) if ('sources' in settings) else set())
added_ifaces = (new_ifaces - old_ifaces)
added_sources = (new_sources - old_sources)
for iface in added_ifaces:
if self.parent.getZoneOfInterface(iface):
raise FirewallError(errors.ZONE_CONFLICT, iface)
for source in added_sources:
if self.parent.getZoneOfSource(source):
raise FirewallError(errors.ZONE_CONFLICT, source)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='(sssbsasa(ss)asba(ssss)asasasasa(ss)b)')
_handle_exceptions
def update(self, settings, sender=None):
settings = dbus_to_python(settings)
log.debug1("%s.update('...')", self._log_prefix)
self.parent.accessCheck(sender)
if (settings[4] == 'default'):
_settings = list(settings)
_settings[4] = DEFAULT_ZONE_TARGET
settings = tuple(_settings)
self._checkDuplicateInterfacesSources(settings)
self.obj = self.config.set_zone_config(self.obj, settings)
self.Updated(self.obj.name)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='a{sv}')
_handle_exceptions
def update2(self, settings, sender=None):
settings = dbus_to_python(settings)
log.debug1("%s.update2('...')", self._log_prefix)
self.parent.accessCheck(sender)
if (('target' in settings) and (settings['target'] == 'default')):
settings['target'] = DEFAULT_ZONE_TARGET
self._checkDuplicateInterfacesSources(settings)
self.obj = self.config.set_zone_config_dict(self.obj, settings)
self.Updated(self.obj.name)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE)
_handle_exceptions
def loadDefaults(self, sender=None):
log.debug1('%s.loadDefaults()', self._log_prefix)
self.parent.accessCheck(sender)
self.obj = self.config.load_zone_defaults(self.obj)
self.Updated(self.obj.name)
.signal(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, signature='s')
_handle_exceptions
def Updated(self, name):
log.debug1(("%s.Updated('%s')" % (self._log_prefix, name)))
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE)
_handle_exceptions
def remove(self, sender=None):
log.debug1('%s.removeZone()', self._log_prefix)
self.parent.accessCheck(sender)
self.config.remove_zone(self.obj)
self.parent.removeZone(self.obj)
.signal(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, signature='s')
_handle_exceptions
def Removed(self, name):
log.debug1(("%s.Removed('%s')" % (self._log_prefix, name)))
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='s')
_handle_exceptions
def rename(self, name, sender=None):
name = dbus_to_python(name, str)
log.debug1("%s.rename('%s')", self._log_prefix, name)
self.parent.accessCheck(sender)
self.obj = self.config.rename_zone(self.obj, name)
self.Renamed(name)
.signal(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, signature='s')
_handle_exceptions
def Renamed(self, name):
log.debug1(("%s.Renamed('%s')" % (self._log_prefix, name)))
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, out_signature='s')
_handle_exceptions
def getVersion(self, sender=None):
log.debug1('%s.getVersion()', self._log_prefix)
return self.getSettings()[0]
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='s')
_handle_exceptions
def setVersion(self, version, sender=None):
version = dbus_to_python(version, str)
log.debug1("%s.setVersion('%s')", self._log_prefix, version)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[0] = version
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, out_signature='s')
_handle_exceptions
def getShort(self, sender=None):
log.debug1('%s.getShort()', self._log_prefix)
return self.getSettings()[1]
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='s')
_handle_exceptions
def setShort(self, short, sender=None):
short = dbus_to_python(short, str)
log.debug1("%s.setShort('%s')", self._log_prefix, short)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[1] = short
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, out_signature='s')
_handle_exceptions
def getDescription(self, sender=None):
log.debug1('%s.getDescription()', self._log_prefix)
return self.getSettings()[2]
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='s')
_handle_exceptions
def setDescription(self, description, sender=None):
description = dbus_to_python(description, str)
log.debug1("%s.setDescription('%s')", self._log_prefix, description)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[2] = description
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, out_signature='s')
_handle_exceptions
def getTarget(self, sender=None):
log.debug1('%s.getTarget()', self._log_prefix)
settings = self.getSettings()
return (settings[4] if (settings[4] != DEFAULT_ZONE_TARGET) else 'default')
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='s')
_handle_exceptions
def setTarget(self, target, sender=None):
target = dbus_to_python(target, str)
log.debug1("%s.setTarget('%s')", self._log_prefix, target)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[4] = (target if (target != 'default') else DEFAULT_ZONE_TARGET)
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, out_signature='as')
_handle_exceptions
def getServices(self, sender=None):
log.debug1('%s.getServices()', self._log_prefix)
return self.getSettings()[5]
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='as')
_handle_exceptions
def setServices(self, services, sender=None):
services = dbus_to_python(services, list)
log.debug1("%s.setServices('[%s]')", self._log_prefix, ','.join(services))
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[5] = services
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='s')
_handle_exceptions
def addService(self, service, sender=None):
service = dbus_to_python(service, str)
log.debug1("%s.addService('%s')", self._log_prefix, service)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if (service in settings[5]):
raise FirewallError(errors.ALREADY_ENABLED, service)
settings[5].append(service)
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='s')
_handle_exceptions
def removeService(self, service, sender=None):
service = dbus_to_python(service, str)
log.debug1("%s.removeService('%s')", self._log_prefix, service)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if (service not in settings[5]):
raise FirewallError(errors.NOT_ENABLED, service)
settings[5].remove(service)
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='s', out_signature='b')
_handle_exceptions
def queryService(self, service, sender=None):
service = dbus_to_python(service, str)
log.debug1("%s.queryService('%s')", self._log_prefix, service)
return (service in self.getSettings()[5])
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, out_signature='a(ss)')
_handle_exceptions
def getPorts(self, sender=None):
log.debug1('%s.getPorts()', self._log_prefix)
return self.getSettings()[6]
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='a(ss)')
_handle_exceptions
def setPorts(self, ports, sender=None):
_ports = []
for port in dbus_to_python(ports, list):
if isinstance(port, list):
_ports.append(tuple(port))
else:
_ports.append(port)
ports = _ports
log.debug1("%s.setPorts('[%s]')", self._log_prefix, ','.join((("('%s, '%s')" % (port[0], port[1])) for port in ports)))
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[6] = ports
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='ss')
_handle_exceptions
def addPort(self, port, protocol, sender=None):
port = dbus_to_python(port, str)
protocol = dbus_to_python(protocol, str)
log.debug1("%s.addPort('%s', '%s')", self._log_prefix, port, protocol)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
existing_port_ids = list(filter((lambda x: (x[1] == protocol)), settings[6]))
for port_id in existing_port_ids:
if portInPortRange(port, port_id[0]):
raise FirewallError(errors.ALREADY_ENABLED, ('%s:%s' % (port, protocol)))
(added_ranges, removed_ranges) = coalescePortRange(port, [_port for (_port, _protocol) in existing_port_ids])
for range in removed_ranges:
settings[6].remove((portStr(range, '-'), protocol))
for range in added_ranges:
settings[6].append((portStr(range, '-'), protocol))
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='ss')
_handle_exceptions
def removePort(self, port, protocol, sender=None):
port = dbus_to_python(port, str)
protocol = dbus_to_python(protocol, str)
log.debug1("%s.removePort('%s', '%s')", self._log_prefix, port, protocol)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
existing_port_ids = list(filter((lambda x: (x[1] == protocol)), settings[6]))
for port_id in existing_port_ids:
if portInPortRange(port, port_id[0]):
break
else:
raise FirewallError(errors.NOT_ENABLED, ('%s:%s' % (port, protocol)))
(added_ranges, removed_ranges) = breakPortRange(port, [_port for (_port, _protocol) in existing_port_ids])
for range in removed_ranges:
settings[6].remove((portStr(range, '-'), protocol))
for range in added_ranges:
settings[6].append((portStr(range, '-'), protocol))
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='ss', out_signature='b')
_handle_exceptions
def queryPort(self, port, protocol, sender=None):
port = dbus_to_python(port, str)
protocol = dbus_to_python(protocol, str)
log.debug1("%s.queryPort('%s', '%s')", self._log_prefix, port, protocol)
for (_port, _protocol) in self.getSettings()[6]:
if (portInPortRange(port, _port) and (protocol == _protocol)):
return True
return False
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, out_signature='as')
_handle_exceptions
def getProtocols(self, sender=None):
log.debug1('%s.getProtocols()', self._log_prefix)
return self.getSettings()[13]
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='as')
_handle_exceptions
def setProtocols(self, protocols, sender=None):
protocols = dbus_to_python(protocols, list)
log.debug1("%s.setProtocols('[%s]')", self._log_prefix, ','.join(protocols))
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[13] = protocols
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='s')
_handle_exceptions
def addProtocol(self, protocol, sender=None):
protocol = dbus_to_python(protocol, str)
log.debug1("%s.addProtocol('%s')", self._log_prefix, protocol)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if (protocol in settings[13]):
raise FirewallError(errors.ALREADY_ENABLED, protocol)
settings[13].append(protocol)
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='s')
_handle_exceptions
def removeProtocol(self, protocol, sender=None):
protocol = dbus_to_python(protocol, str)
log.debug1("%s.removeProtocol('%s')", self._log_prefix, protocol)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if (protocol not in settings[13]):
raise FirewallError(errors.NOT_ENABLED, protocol)
settings[13].remove(protocol)
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='s', out_signature='b')
_handle_exceptions
def queryProtocol(self, protocol, sender=None):
protocol = dbus_to_python(protocol, str)
log.debug1("%s.queryProtocol('%s')", self._log_prefix, protocol)
return (protocol in self.getSettings()[13])
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, out_signature='a(ss)')
_handle_exceptions
def getSourcePorts(self, sender=None):
log.debug1('%s.getSourcePorts()', self._log_prefix)
return self.getSettings()[14]
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='a(ss)')
_handle_exceptions
def setSourcePorts(self, ports, sender=None):
_ports = []
for port in dbus_to_python(ports, list):
if isinstance(port, list):
_ports.append(tuple(port))
else:
_ports.append(port)
ports = _ports
log.debug1("%s.setSourcePorts('[%s]')", self._log_prefix, ','.join((("('%s, '%s')" % (port[0], port[1])) for port in ports)))
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[14] = ports
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='ss')
_handle_exceptions
def addSourcePort(self, port, protocol, sender=None):
port = dbus_to_python(port, str)
protocol = dbus_to_python(protocol, str)
log.debug1("%s.addSourcePort('%s', '%s')", self._log_prefix, port, protocol)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
existing_port_ids = list(filter((lambda x: (x[1] == protocol)), settings[14]))
for port_id in existing_port_ids:
if portInPortRange(port, port_id[0]):
raise FirewallError(errors.ALREADY_ENABLED, ('%s:%s' % (port, protocol)))
(added_ranges, removed_ranges) = coalescePortRange(port, [_port for (_port, _protocol) in existing_port_ids])
for range in removed_ranges:
settings[14].remove((portStr(range, '-'), protocol))
for range in added_ranges:
settings[14].append((portStr(range, '-'), protocol))
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='ss')
_handle_exceptions
def removeSourcePort(self, port, protocol, sender=None):
port = dbus_to_python(port, str)
protocol = dbus_to_python(protocol, str)
log.debug1("%s.removeSourcePort('%s', '%s')", self._log_prefix, port, protocol)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
existing_port_ids = list(filter((lambda x: (x[1] == protocol)), settings[14]))
for port_id in existing_port_ids:
if portInPortRange(port, port_id[0]):
break
else:
raise FirewallError(errors.NOT_ENABLED, ('%s:%s' % (port, protocol)))
(added_ranges, removed_ranges) = breakPortRange(port, [_port for (_port, _protocol) in existing_port_ids])
for range in removed_ranges:
settings[14].remove((portStr(range, '-'), protocol))
for range in added_ranges:
settings[14].append((portStr(range, '-'), protocol))
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='ss', out_signature='b')
_handle_exceptions
def querySourcePort(self, port, protocol, sender=None):
port = dbus_to_python(port, str)
protocol = dbus_to_python(protocol, str)
log.debug1("%s.querySourcePort('%s', '%s')", self._log_prefix, port, protocol)
for (_port, _protocol) in self.getSettings()[14]:
if (portInPortRange(port, _port) and (protocol == _protocol)):
return True
return False
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, out_signature='as')
_handle_exceptions
def getIcmpBlocks(self, sender=None):
log.debug1('%s.getIcmpBlocks()', self._log_prefix)
return self.getSettings()[7]
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='as')
_handle_exceptions
def setIcmpBlocks(self, icmptypes, sender=None):
icmptypes = dbus_to_python(icmptypes, list)
log.debug1("%s.setIcmpBlocks('[%s]')", self._log_prefix, ','.join(icmptypes))
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[7] = icmptypes
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='s')
_handle_exceptions
def addIcmpBlock(self, icmptype, sender=None):
icmptype = dbus_to_python(icmptype, str)
log.debug1("%s.addIcmpBlock('%s')", self._log_prefix, icmptype)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if (icmptype in settings[7]):
raise FirewallError(errors.ALREADY_ENABLED, icmptype)
settings[7].append(icmptype)
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='s')
_handle_exceptions
def removeIcmpBlock(self, icmptype, sender=None):
icmptype = dbus_to_python(icmptype, str)
log.debug1("%s.removeIcmpBlock('%s')", self._log_prefix, icmptype)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if (icmptype not in settings[7]):
raise FirewallError(errors.NOT_ENABLED, icmptype)
settings[7].remove(icmptype)
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='s', out_signature='b')
_handle_exceptions
def queryIcmpBlock(self, icmptype, sender=None):
icmptype = dbus_to_python(icmptype, str)
log.debug1("%s.queryIcmpBlock('%s')", self._log_prefix, icmptype)
return (icmptype in self.getSettings()[7])
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, out_signature='b')
_handle_exceptions
def getIcmpBlockInversion(self, sender=None):
log.debug1('%s.getIcmpBlockInversion()', self._log_prefix)
return self.getSettings()[15]
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='b')
_handle_exceptions
def setIcmpBlockInversion(self, flag, sender=None):
flag = dbus_to_python(flag, bool)
log.debug1("%s.setIcmpBlockInversion('%s')", self._log_prefix, flag)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[15] = flag
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE)
_handle_exceptions
def addIcmpBlockInversion(self, sender=None):
log.debug1('%s.addIcmpBlockInversion()', self._log_prefix)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if settings[15]:
raise FirewallError(errors.ALREADY_ENABLED, 'icmp-block-inversion')
settings[15] = True
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE)
_handle_exceptions
def removeIcmpBlockInversion(self, sender=None):
log.debug1('%s.removeIcmpBlockInversion()', self._log_prefix)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if (not settings[15]):
raise FirewallError(errors.NOT_ENABLED, 'icmp-block-inversion')
settings[15] = False
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, out_signature='b')
_handle_exceptions
def queryIcmpBlockInversion(self, sender=None):
log.debug1('%s.queryIcmpBlockInversion()', self._log_prefix)
return self.getSettings()[15]
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, out_signature='b')
_handle_exceptions
def getMasquerade(self, sender=None):
log.debug1('%s.getMasquerade()', self._log_prefix)
return self.getSettings()[8]
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='b')
_handle_exceptions
def setMasquerade(self, masquerade, sender=None):
masquerade = dbus_to_python(masquerade, bool)
log.debug1("%s.setMasquerade('%s')", self._log_prefix, masquerade)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[8] = masquerade
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE)
_handle_exceptions
def addMasquerade(self, sender=None):
log.debug1('%s.addMasquerade()', self._log_prefix)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if settings[8]:
raise FirewallError(errors.ALREADY_ENABLED, 'masquerade')
settings[8] = True
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE)
_handle_exceptions
def removeMasquerade(self, sender=None):
log.debug1('%s.removeMasquerade()', self._log_prefix)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if (not settings[8]):
raise FirewallError(errors.NOT_ENABLED, 'masquerade')
settings[8] = False
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, out_signature='b')
_handle_exceptions
def queryMasquerade(self, sender=None):
log.debug1('%s.queryMasquerade()', self._log_prefix)
return self.getSettings()[8]
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, out_signature='a(ssss)')
_handle_exceptions
def getForwardPorts(self, sender=None):
log.debug1('%s.getForwardPorts()', self._log_prefix)
return self.getSettings()[9]
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='a(ssss)')
_handle_exceptions
def setForwardPorts(self, ports, sender=None):
_ports = []
for port in dbus_to_python(ports, list):
if isinstance(port, list):
_ports.append(tuple(port))
else:
_ports.append(port)
ports = _ports
log.debug1("%s.setForwardPorts('[%s]')", self._log_prefix, ','.join((("('%s, '%s', '%s', '%s')" % (port[0], port[1], port[2], port[3])) for port in ports)))
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[9] = ports
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='ssss')
_handle_exceptions
def addForwardPort(self, port, protocol, toport, toaddr, sender=None):
port = dbus_to_python(port, str)
protocol = dbus_to_python(protocol, str)
toport = dbus_to_python(toport, str)
toaddr = dbus_to_python(toaddr, str)
log.debug1("%s.addForwardPort('%s', '%s', '%s', '%s')", self._log_prefix, port, protocol, toport, toaddr)
self.parent.accessCheck(sender)
fwp_id = (port, protocol, str(toport), str(toaddr))
settings = list(self.getSettings())
if (fwp_id in settings[9]):
raise FirewallError(errors.ALREADY_ENABLED, ('%s:%s:%s:%s' % (port, protocol, toport, toaddr)))
settings[9].append(fwp_id)
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='ssss')
_handle_exceptions
def removeForwardPort(self, port, protocol, toport, toaddr, sender=None):
port = dbus_to_python(port, str)
protocol = dbus_to_python(protocol, str)
toport = dbus_to_python(toport, str)
toaddr = dbus_to_python(toaddr, str)
log.debug1("%s.removeForwardPort('%s', '%s', '%s', '%s')", self._log_prefix, port, protocol, toport, toaddr)
self.parent.accessCheck(sender)
fwp_id = (port, protocol, str(toport), str(toaddr))
settings = list(self.getSettings())
if (fwp_id not in settings[9]):
raise FirewallError(errors.NOT_ENABLED, ('%s:%s:%s:%s' % (port, protocol, toport, toaddr)))
settings[9].remove(fwp_id)
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='ssss', out_signature='b')
_handle_exceptions
def queryForwardPort(self, port, protocol, toport, toaddr, sender=None):
port = dbus_to_python(port, str)
protocol = dbus_to_python(protocol, str)
toport = dbus_to_python(toport, str)
toaddr = dbus_to_python(toaddr, str)
log.debug1("%s.queryForwardPort('%s', '%s', '%s', '%s')", self._log_prefix, port, protocol, toport, toaddr)
fwp_id = (port, protocol, str(toport), str(toaddr))
return (fwp_id in self.getSettings()[9])
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, out_signature='as')
_handle_exceptions
def getInterfaces(self, sender=None):
log.debug1('%s.getInterfaces()', self._log_prefix)
return self.getSettings()[10]
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='as')
_handle_exceptions
def setInterfaces(self, interfaces, sender=None):
interfaces = dbus_to_python(interfaces, list)
log.debug1("%s.setInterfaces('[%s]')", self._log_prefix, ','.join(interfaces))
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[10] = interfaces
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='s')
_handle_exceptions
def addInterface(self, interface, sender=None):
interface = dbus_to_python(interface, str)
log.debug1("%s.addInterface('%s')", self._log_prefix, interface)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if (interface in settings[10]):
raise FirewallError(errors.ALREADY_ENABLED, interface)
settings[10].append(interface)
self.update(settings)
ifcfg_set_zone_of_interface(self.obj.name, interface)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='s')
_handle_exceptions
def removeInterface(self, interface, sender=None):
interface = dbus_to_python(interface, str)
log.debug1("%s.removeInterface('%s')", self._log_prefix, interface)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if (interface not in settings[10]):
raise FirewallError(errors.NOT_ENABLED, interface)
settings[10].remove(interface)
self.update(settings)
ifcfg_set_zone_of_interface('', interface)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='s', out_signature='b')
_handle_exceptions
def queryInterface(self, interface, sender=None):
interface = dbus_to_python(interface, str)
log.debug1("%s.queryInterface('%s')", self._log_prefix, interface)
return (interface in self.getSettings()[10])
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, out_signature='as')
_handle_exceptions
def getSources(self, sender=None):
log.debug1('%s.getSources()', self._log_prefix)
return self.getSettings()[11]
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='as')
_handle_exceptions
def setSources(self, sources, sender=None):
sources = dbus_to_python(sources, list)
log.debug1("%s.setSources('[%s]')", self._log_prefix, ','.join(sources))
self.parent.accessCheck(sender)
settings = list(self.getSettings())
settings[11] = sources
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='s')
_handle_exceptions
def addSource(self, source, sender=None):
source = dbus_to_python(source, str)
log.debug1("%s.addSource('%s')", self._log_prefix, source)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if (source in settings[11]):
raise FirewallError(errors.ALREADY_ENABLED, source)
settings[11].append(source)
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='s')
_handle_exceptions
def removeSource(self, source, sender=None):
source = dbus_to_python(source, str)
log.debug1("%s.removeSource('%s')", self._log_prefix, source)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
if (source not in settings[11]):
raise FirewallError(errors.NOT_ENABLED, source)
settings[11].remove(source)
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='s', out_signature='b')
_handle_exceptions
def querySource(self, source, sender=None):
source = dbus_to_python(source, str)
log.debug1("%s.querySource('%s')", self._log_prefix, source)
return (source in self.getSettings()[11])
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, out_signature='as')
_handle_exceptions
def getRichRules(self, sender=None):
log.debug1('%s.getRichRules()', self._log_prefix)
return self.getSettings()[12]
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='as')
_handle_exceptions
def setRichRules(self, rules, sender=None):
rules = dbus_to_python(rules, list)
log.debug1("%s.setRichRules('[%s]')", self._log_prefix, ','.join(rules))
self.parent.accessCheck(sender)
settings = list(self.getSettings())
rules = [str(Rich_Rule(rule_str=r)) for r in rules]
settings[12] = rules
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='s')
_handle_exceptions
def addRichRule(self, rule, sender=None):
rule = dbus_to_python(rule, str)
log.debug1("%s.addRichRule('%s')", self._log_prefix, rule)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
rule_str = str(Rich_Rule(rule_str=rule))
if (rule_str in settings[12]):
raise FirewallError(errors.ALREADY_ENABLED, rule)
settings[12].append(rule_str)
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='s')
_handle_exceptions
def removeRichRule(self, rule, sender=None):
rule = dbus_to_python(rule, str)
log.debug1("%s.removeRichRule('%s')", self._log_prefix, rule)
self.parent.accessCheck(sender)
settings = list(self.getSettings())
rule_str = str(Rich_Rule(rule_str=rule))
if (rule_str not in settings[12]):
raise FirewallError(errors.NOT_ENABLED, rule)
settings[12].remove(rule_str)
self.update(settings)
_service_method(config.dbus.DBUS_INTERFACE_CONFIG_ZONE, in_signature='s', out_signature='b')
_handle_exceptions
def queryRichRule(self, rule, sender=None):
rule = dbus_to_python(rule, str)
log.debug1("%s.queryRichRule('%s')", self._log_prefix, rule)
rule_str = str(Rich_Rule(rule_str=rule))
return (rule_str in self.getSettings()[12]) |
class WebsocketProvider(JSONBaseProvider):
logger = logging.getLogger('web3.providers.WebsocketProvider')
_loop = None
def __init__(self, endpoint_uri: Optional[Union[(URI, str)]]=None, websocket_kwargs: Optional[Any]=None, websocket_timeout: int=DEFAULT_WEBSOCKET_TIMEOUT) -> None:
self.endpoint_uri = URI(endpoint_uri)
self.websocket_timeout = websocket_timeout
if (self.endpoint_uri is None):
self.endpoint_uri = get_default_endpoint()
if (WebsocketProvider._loop is None):
WebsocketProvider._loop = _get_threaded_loop()
if (websocket_kwargs is None):
websocket_kwargs = {}
else:
found_restricted_keys = set(websocket_kwargs).intersection(RESTRICTED_WEBSOCKET_KWARGS)
if found_restricted_keys:
raise Web3ValidationError(f'{RESTRICTED_WEBSOCKET_KWARGS} are not allowed in websocket_kwargs, found: {found_restricted_keys}')
self.conn = PersistentWebSocket(self.endpoint_uri, websocket_kwargs)
super().__init__()
def __str__(self) -> str:
return f'WS connection {self.endpoint_uri}'
async def coro_make_request(self, request_data: bytes) -> RPCResponse:
async with self.conn as conn:
(await asyncio.wait_for(conn.send(request_data), timeout=self.websocket_timeout))
return json.loads((await asyncio.wait_for(conn.recv(), timeout=self.websocket_timeout)))
def make_request(self, method: RPCEndpoint, params: Any) -> RPCResponse:
self.logger.debug(f'Making request WebSocket. URI: {self.endpoint_uri}, Method: {method}')
request_data = self.encode_rpc_request(method, params)
future = asyncio.run_coroutine_threadsafe(self.coro_make_request(request_data), WebsocketProvider._loop)
return future.result() |
class OSDWindow(Gtk.Window):
__hide_id = None
__fadeout_id = None
__autohide = True
__options = None
geometry = dict(x=20, y=20, width=300, height=120)
def __init__(self, css_provider, options, allow_resize_move):
Gtk.Window.__init__(self, type=Gtk.WindowType.TOPLEVEL)
self.__options = options
self.set_title('Exaile OSD')
self.set_keep_above(True)
self.stick()
self.set_skip_pager_hint(True)
self.set_skip_taskbar_hint(True)
self.set_deletable(False)
self.connect('delete-event', (lambda _widget, _event: self.hide_immediately))
self.connect('screen-changed', self.__on_screen_changed)
style_context = self.get_style_context()
style_context.add_provider(css_provider, Gtk.STYLE_PROVIDER_PRIORITY_APPLICATION)
self.info_area = info.TrackInfoPane(player.PLAYER)
self.info_area.set_default_text(_('No track played yet'))
self.info_area.set_auto_update(True)
self.info_area.cover.set_property('visible', True)
self.info_area.info_label.set_selectable(False)
self.info_area.show_all()
self.add(self.info_area)
self.__setup_resize_move_related_stuff(allow_resize_move)
self.add_events(Gdk.EventMask.LEAVE_NOTIFY_MASK)
self.connect('leave-notify-event', self.__on_leave_notify_event)
self.add_events(Gdk.EventMask.STRUCTURE_MASK)
self.connect('window-state-event', self.__on_window_state_event)
self.info_area.set_display_progress(True)
self.__on_screen_changed(self, None)
def __setup_resize_move_related_stuff(self, allow_resize_move):
self.set_decorated(False)
self.set_resizable(allow_resize_move)
if allow_resize_move:
self.set_type_hint(Gdk.WindowTypeHint.NORMAL)
self.set_title(_('Move or resize OSD'))
self.connect('realize', (lambda _widget: self.get_window().set_decorations((Gdk.WMDecoration.RESIZEH | Gdk.WMDecoration.TITLE))))
else:
self.set_type_hint(Gdk.WindowTypeHint.NOTIFICATION)
self.set_accept_focus(allow_resize_move)
self.__autohide = (not allow_resize_move)
def __on_window_state_event(self, _widget, win_state):
illegal_states = ((((Gdk.WindowState.FULLSCREEN | Gdk.WindowState.ICONIFIED) | Gdk.WindowState.TILED) | Gdk.WindowState.MAXIMIZED) | Gdk.WindowState.BELOW)
if ((win_state.changed_mask & illegal_states) and (win_state.new_window_state & illegal_states)):
GLib.idle_add(self.restore_geometry_and_show)
return Gdk.EVENT_STOP
else:
return Gdk.EVENT_PROPAGATE
def destroy_osd(self):
if self.is_visible():
xl_settings.set_option('plugin/osd/position', list(self.get_position()))
(width, height) = self.get_size()
xl_settings.set_option('plugin/osd/width', width)
xl_settings.set_option('plugin/osd/height', height)
self.hide_immediately()
if self.__fadeout_id:
GLib.source_remove(self.__fadeout_id)
self.__fadeout_id = None
if self.__hide_id:
GLib.source_remove(self.__hide_id)
self.__hide_id = None
Gtk.Window.destroy(self)
def __start_fadeout(self):
self.__hide_id = None
gdk_display = self.get_window().get_display()
if ((Gtk.get_major_version() > 3) or ((Gtk.get_major_version() == 3) and (Gtk.get_minor_version() >= 20))):
gdk_seat = gdk_display.get_default_seat()
gdk_device = gdk_seat.get_pointer()
else:
gdk_device_manager = gdk_display.get_device_manager()
gdk_device = gdk_device_manager.get_client_pointer()
(window, _posx, _posy) = gdk_device.get_window_at_position()
if (window and (window is self.get_window())):
self.show_for_a_while()
return
if (self.__options['use_alpha'] is True):
if (self.__fadeout_id is None):
self.__fadeout_id = GLib.timeout_add(30, self.__do_fadeout_step)
else:
Gtk.Window.hide(self)
return False
def show_for_a_while(self):
if self.__fadeout_id:
GLib.source_remove(self.__fadeout_id)
self.__fadeout_id = None
if (Gtk.Widget.get_opacity(self) < 1):
Gtk.Widget.set_opacity(self, 1)
if self.__hide_id:
do_assert((self.__fadeout_id is None))
GLib.source_remove(self.__hide_id)
self.__hide_id = None
if self.__autohide:
self.__hide_id = GLib.timeout_add_seconds(self.__options['display_duration'], self.__start_fadeout)
Gtk.Window.present(self)
def restore_geometry_and_show(self):
geo = self.geometry
self.set_default_size(geo['width'], geo['height'])
self.move(geo['x'], geo['y'])
self.show_for_a_while()
allocation = Gdk.Rectangle()
allocation.x = geo['x']
allocation.y = geo['y']
allocation.width = geo['width']
allocation.height = geo['height']
_sanitize_window_geometry(super(Gtk.Window, self), allocation, 10, 0.2, 0.2)
def set_autohide(self, do_autohide):
self.__autohide = do_autohide
if do_autohide:
do_assert((self.__hide_id is None))
do_assert((self.__fadeout_id is None))
GLib.idle_add(self.show_for_a_while)
def __do_fadeout_step(self):
do_assert((self.__hide_id is None))
if (Gtk.Widget.get_opacity(self) > 0.001):
Gtk.Widget.set_opacity(self, (Gtk.Widget.get_opacity(self) - 0.05))
return True
else:
self.__fadeout_id = None
Gtk.Window.hide(self)
return False
def __on_screen_changed(self, _widget, _oldscreen):
screen = self.get_screen()
visual = screen.get_rgba_visual()
if (visual is None):
visual = screen.get_system_visual()
self.__options['use_alpha'] = False
LOGGER.warning('OSD: Disabling alpha channel because the Gtk+ backend does not support it.')
self.set_visual(visual)
'\n def __on_size_allocate(self, _widget, _allocation):\n """\n Applies the non-rectangular shape\n """\n # TODO: make this work again\n # Bug in pycairo: cairo_region_* functions are not available before\n # version 1.11.0, see # we might want to enable this code below once pycairo is distributed on\n # most Linux distros.\n # cairo_region = cairo.Region.create_rectangle(allocation)\n # as a result, calling\n # self.get_window().shape_combine_region(cairo_region, 0, 0)\n # is impossible. Thus, it is impossible to shape the window.\n # Instead, we have to work around this issue by leaving parts\n # of the window undrawn.\n\n # leave the old code here for reference:\n width, height = allocation.width, allocation.height\n mask = Gdk.Pixmap(None, width, height, 1)\n context = mask.cairo_create()\n\n context.set_source_rgb(0, 0, 0)\n context.set_operator(cairo.OPERATOR_CLEAR)\n context.paint()\n\n radius = self.__options[\'border_radius\']\n inner = (radius, radius, width - radius, height - radius)\n\n context.set_source_rgb(1, 1, 1)\n context.set_operator(cairo.OPERATOR_SOURCE)\n # Top left corner\n context.arc(inner.x, inner.y, radius, 1.0 * pi, 1.5 * pi)\n # Top right corner\n context.arc(inner.width, inner.y, radius, 1.5 * pi, 2.0 * pi)\n # Bottom right corner\n context.arc(inner.width, inner.height, radius, 0.0 * pi, 0.5 * pi)\n # Bottom left corner\n context.arc(inner.x, inner.height, radius, 0.5 * pi, 1.0 * pi)\n context.fill()\n\n self.shape_combine_mask(mask, 0, 0)\n '
def __on_leave_notify_event(self, _widget, event_crossing):
if (event_crossing.detail == Gdk.NotifyType.NONLINEAR):
self.show_for_a_while()
return Gdk.EVENT_PROPAGATE
def hide_immediately(self):
if self.__fadeout_id:
GLib.source_remove(self.__fadeout_id)
self.__fadeout_id = None
if self.__hide_id:
GLib.source_remove(self.__hide_id)
self.__hide_id = None
Gtk.Window.hide(self) |
class NameExcludeTestCase(IncludeExcludeMixIn, unittest.TestCase):
def test_filter_id(self):
expected = self.sequences[2:]
actual = list(transform.name_exclude(self.sequences, 'sequenceid[12]'))
self.assertEqual(3, len(actual))
self.assertEqual(expected, actual)
def test_filter_description(self):
expected = self.sequences[:3]
actual = list(transform.name_exclude(self.sequences, 'sequence id 4|test seq'))
self.assertEqual(expected, actual) |
def test_get_dataframe():
g = xtgeo.grid_from_file(GFILE1, fformat='egrid')
names = ['SOIL', 'SWAT', 'PRESSURE']
dates = []
x = xtgeo.gridproperties_from_file(RFILE1, fformat='unrst', names=names, dates=dates, grid=g)
df = x.get_dataframe(activeonly=True, ijk=True, xyz=False)
print(df.head())
assert (df['SWAT_'].mean() == pytest.approx(0.87802, abs=0.001))
assert (df['PRESSURE_'].mean() == pytest.approx(334.523, abs=0.005)) |
def test_multiple_tickets_discount(db):
ticket_a = TicketSubFactory(price=50.0)
ticket_b = TicketSubFactory(price=495.8, event=ticket_a.event)
ticket_c = TicketSubFactory(price=321.3, event=ticket_a.event)
ticket_d = TicketSubFactory(price=500.0, event=ticket_a.event, type='free')
discount = DiscountCodeTicketSubFactory(type='percent', value=50.0, tickets=[ticket_a, ticket_b])
DiscountCodeTicketSubFactory(type='amount', value=100.0, tickets=[ticket_c])
db.session.commit()
tickets_dict = _create_ticket_dict([ticket_a, ticket_b, ticket_c, ticket_d], [2, 3, 1, 2])
amount_data = calculate_order_amount(tickets_dict, discount_code=discount.id)
assert (amount_data['total'] == 1115.0)
assert (amount_data['discount'] == 793.7)
assert (amount_data['tickets'][0]['quantity'] == 2)
assert (amount_data['tickets'][0]['price'] == 50.0)
assert (amount_data['tickets'][0]['sub_total'] == 50.0)
assert (amount_data['tickets'][0]['discount']['total'] == 50.0)
assert (amount_data['tickets'][0]['discount']['amount'] == 25.0)
assert (amount_data['tickets'][0]['discount']['percent'] == 50.0)
assert (amount_data['tickets'][1]['quantity'] == 3)
assert (amount_data['tickets'][1]['price'] == 495.8)
assert (amount_data['tickets'][1]['sub_total'] == 743.7)
assert (amount_data['tickets'][1]['discount']['total'] == 743.7)
assert (amount_data['tickets'][1]['discount']['amount'] == 247.9)
assert (amount_data['tickets'][1]['discount']['percent'] == 50.0)
assert (amount_data['tickets'][2]['quantity'] == 1)
assert (amount_data['tickets'][2]['price'] == 321.3)
assert (amount_data['tickets'][2]['sub_total'] == 321.3)
assert (amount_data['tickets'][2]['discount'] is None) |
def input_bot_token(data: DataModel, default=None):
prompt = _('Your Telegram Bot token: ')
if default:
prompt += f'[{default}] '
while True:
ans = input(prompt)
if (not ans):
if default:
return default
else:
print(_('Bot token is required. Please try again.'))
continue
else:
try:
Bot(ans, request=data.request).get_me()
except TelegramError as e:
print_wrapped(str(e))
print()
print(_('Please try again.'))
continue
return ans |
class OptionSeriesNetworkgraphSonificationTracksMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def create_and_populate_bq_table(client, name, schema, table_data):
table = client.get_or_create_table(name, schema)
if (not table_data):
return
with tempfile.NamedTemporaryFile('wt') as f:
writer = csv.writer(f)
for item in table_data:
writer.writerow(dict_to_row(item, schema))
f.seek(0)
table.insert_rows_from_csv(f.name, schema) |
_renderer(wrap_type=TestNumberOfColumnsWithMissingValues)
class TestNumberOfColumnsWithMissingValuesRenderer(BaseTestMissingValuesRenderer):
def render_html(self, obj: TestNumberOfMissingValues) -> TestHtmlInfo:
info = super().render_html(obj)
metric_result = obj.metric.get_result()
return self.get_table_with_missing_values_and_percents_by_column(info, metric_result, 'number_of_columns_with_missing_values') |
('flytekit.configuration.plugin.FlyteRemote', spec=FlyteRemote)
def test_pyflyte_backfill(mock_remote):
mock_remote.generate_console_url.return_value = 'ex'
runner = CliRunner()
with runner.isolated_filesystem():
result = runner.invoke(pyflyte.main, ['backfill', '--parallel', '-p', 'flytesnacks', '-d', 'development', '--from-date', 'now', '--backfill-window', '5 day', 'daily'])
assert (result.exit_code == 0)
assert ('Execution launched' in result.output) |
def fortios_extender(data, fos):
fos.do_member_operation('extender', 'lte-carrier-list')
if data['extender_lte_carrier_list']:
resp = extender_lte_carrier_list(data, fos)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'extender_lte_carrier_list'))
return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {}) |
class OptionSeriesHeatmapSonificationTracksMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def __cookVacmViewInfo(snmpEngine, viewName, subTree):
mibBuilder = snmpEngine.msgAndPduDsp.mibInstrumController.mibBuilder
(vacmViewTreeFamilyEntry,) = mibBuilder.importSymbols('SNMP-VIEW-BASED-ACM-MIB', 'vacmViewTreeFamilyEntry')
tblIdx = vacmViewTreeFamilyEntry.getInstIdFromIndices(viewName, subTree)
return (vacmViewTreeFamilyEntry, tblIdx) |
def create_file_object_entry(file_object: FileObject) -> FileObjectEntry:
sanitize(file_object.virtual_file_path)
return FileObjectEntry(uid=file_object.uid, sha256=file_object.sha256, file_name=file_object.file_name, root_firmware=[], parent_files=[], included_files=[], depth=file_object.depth, size=file_object.size, comments=file_object.comments, is_firmware=isinstance(file_object, Firmware), firmware=None, analyses=[]) |
def rows_by_other_config(manager, system_id, key, value, table='Bridge', fn=None):
matched_rows = match_rows(manager, system_id, table, (lambda r: ((key in r.other_config) and (r.other_config.get(key) == value))))
if (matched_rows and (fn is not None)):
return [fn(row) for row in matched_rows]
return matched_rows |
class TestZ3LogicCondition():
.parametrize('z3_term, length', [(Z3LogicCondition.initialize_true(context), 0), (Z3LogicCondition.initialize_false(context), 0), (z3_x[1].copy(), 1), ((~ z3_x[1].copy()), 1), ((z3_x[1].copy() | z3_x[2].copy()), 2), ((z3_x[1].copy() & z3_x[2].copy()), 2), (((z3_x[1].copy() & z3_x[2].copy()) | z3_x[3].copy()), 3), (((z3_x[1].copy() & z3_x[2].copy()) | (z3_x[1].copy() & z3_x[3].copy())), 4)])
def test_len(self, z3_term, length):
assert (len(z3_term) == length)
.parametrize('z3_term, result', (_get_is_instance_test_case(symbol=True, neg_symbol=True) + [((~ (z3_x[1].copy() | z3_x[2].copy())), False)]))
def test_is_literal(self, z3_term, result):
assert (z3_term.is_literal == result)
.parametrize('z3_term, result', [(z3_x[1].copy(), True), ((~ z3_x[1].copy()), True), ((z3_x[1].copy() | z3_x[2].copy()), True), (((~ z3_x[1].copy()) | z3_x[2].copy()), True), ((((~ z3_x[1].copy()) | z3_x[2].copy()) | z3_x[3].copy()).simplify(), True), ((z3_x[1].copy() & z3_x[2].copy()), False), (((z3_x[1].copy() | z3_x[2].copy()) & z3_x[3].copy()), False), (((z3_x[1].copy() & z3_x[2].copy()) | z3_x[3].copy()), False)])
def test_is_disjunction_of_literals(self, z3_term, result):
assert (z3_term.is_disjunction_of_literals == result)
.parametrize('z3_term, result', [(z3_x[1].copy(), True), ((~ z3_x[1].copy()), True), ((z3_x[1].copy() | z3_x[2].copy()), True), (((~ z3_x[1].copy()) | z3_x[2].copy()), True), ((((~ z3_x[1].copy()) | z3_x[2].copy()) | z3_x[3].copy()).simplify(), True), ((z3_x[1].copy() & z3_x[2].copy()), True), (((z3_x[1].copy() | z3_x[2].copy()) & z3_x[3].copy()), True), (((z3_x[1].copy() | (~ z3_x[2].copy())) & (~ z3_x[3].copy())), True), (((z3_x[1].copy() & z3_x[2].copy()) | z3_x[3].copy()), False), ((((z3_x[1].copy() & z3_x[2].copy()) | z3_x[3].copy()) & z3_x[4].copy()), False)])
def test_is_cnf_form(self, z3_term, result):
assert (z3_term.is_cnf_form == result)
.parametrize('term1, term2, result', [(((((((z3_x[1].copy() & (~ z3_x[1].copy())) | (~ z3_x[2].copy())) | (z3_x[3].copy() & (z3_x[4].copy() | (~ z3_x[4].copy())))) | (~ ((z3_x[5].copy() & z3_x[2].copy()) & (~ z3_x[1].copy())))) | ((~ (z3_x[5].copy() & (~ z3_x[5].copy()))) & z3_x[1].copy())) | (~ (z3_x[3].copy() | (~ z3_x[3].copy())))), (((z3_x[1].copy() | (~ z3_x[5].copy())) | (~ z3_x[2].copy())) | z3_x[3].copy()), True), ((((z3_x[1].copy() | (z3_x[2].copy() & (~ z3_x[1].copy()))) | (z3_x[3].copy() & (~ (z3_x[1].copy() | z3_x[2].copy())))) | ((z3_x[5].copy() & z3_x[4].copy()) & (~ z3_x[1].copy()))), (((z3_x[1].copy() | z3_x[2].copy()) | z3_x[3].copy()) | (z3_x[5].copy() & z3_x[4].copy())), True), ((((z3_x[1].copy() | (z3_x[2].copy() & (~ z3_x[1].copy()))) | (z3_x[3].copy() & (~ (z3_x[1].copy() | z3_x[2].copy())))) | ((z3_x[5].copy() & z3_x[4].copy()) & (~ z3_x[1].copy()))), ((((z3_x[1].copy() | z3_x[2].copy()) | z3_x[3].copy()) | z3_x[5].copy()) & (((z3_x[1].copy() | z3_x[4].copy()) | z3_x[2].copy()) | z3_x[3].copy())), True), ((z3_x[1].copy() & z3_x[2].copy()), ((z3_x[1].copy() & z3_x[2].copy()) & z3_x[3].copy()), False), ((z3_x[1].copy() & z3_x[2].copy()), ((z3_x[1].copy() & z3_x[2].copy()) | z3_x[1].copy()), False)])
def test_is_equivalent_to(self, term1, term2, result):
assert (term1.is_equivalent_to(term2) == result)
.parametrize('term1, term2, result', [(z3_x[1].copy(), (z3_x[1].copy() | z3_x[2].copy()), True), (z3_x[1].copy(), (z3_x[1].copy() & z3_x[2].copy()), False), (((z3_x[1].copy() | z3_x[2].copy()) & ((~ z3_x[1].copy()) | z3_x[3].copy())), (((z3_x[1].copy() & z3_x[3].copy()) | ((~ z3_x[1].copy()) & z3_x[2].copy())) | (z3_x[1].copy() & z3_x[4].copy())), True), (((z3_x[1].copy() | z3_x[2].copy()) & ((~ z3_x[1].copy()) | z3_x[3].copy())), (((z3_x[1].copy() & z3_x[3].copy()) | (z3_x[1].copy() & z3_x[2].copy())) | (z3_x[1].copy() & z3_x[4].copy())), False)])
def test_does_imply(self, term1, term2, result):
assert (term1.does_imply(term2) == result)
.parametrize('term1, term2, result', [(true_value, false_value, False), (false_value, true_value, False), ((z3_x[1].copy() & (~ z3_x[1].copy())), true_value, False), ((z3_x[1].copy() | (~ z3_x[1].copy())), false_value, False), (z3_x[1].copy(), (~ z3_x[1].copy()), True), ((z3_x[1].copy() | z3_x[2].copy()), ((~ z3_x[1].copy()) & (~ z3_x[2].copy())), True), ((z3_x[1].copy() & z3_x[2].copy()), (~ (z3_x[1].copy() & z3_x[2].copy())), True), ((z3_x[1].copy() | z3_x[2].copy()), (((~ z3_x[1].copy()) & (~ z3_x[2].copy())) | z3_x[1].copy()), False), ((z3_x[1].copy() & z3_x[2].copy()), (((~ z3_x[1].copy()) | (~ z3_x[2].copy())) & z3_x[1].copy()), False)])
def test_is_complementary_to(self, term1, term2, result):
assert (term1.is_complementary_to(term2) == result)
.parametrize('z3_term', [BoolVal(True, ctx=context), BoolVal(False, ctx=context), b_x[1], Not(b_x[1]), And(b_x[1], b_x[2]), Or(b_x[1], b_x[2]), And(Or(b_x[1], b_x[2]), b_x[3])])
def test_init(self, z3_term):
cond = Z3LogicCondition(z3_term)
assert (cond._condition == z3_term)
def test_initialize_symbol(self):
new_context = Context()
cond = Z3LogicCondition.initialize_symbol('x1', new_context)
assert (cond._condition == Bool('x1', ctx=new_context))
def test_initialize_true(self):
new_context = Context()
cond = Z3LogicCondition.initialize_true(new_context)
assert (cond._condition == BoolVal(True, ctx=new_context))
def test_initialize_false(self):
new_context = Context()
cond = Z3LogicCondition.initialize_false(new_context)
assert (cond._condition == BoolVal(False, ctx=new_context))
.parametrize('term1, term2', _get_operation_instances())
def test_and(self, term1, term2):
cond = (Z3LogicCondition(term1) & Z3LogicCondition(term2))
assert (cond._condition == And(term1, term2))
.parametrize('term1, term2', _get_operation_instances())
def test_or(self, term1, term2):
cond = (Z3LogicCondition(term1) | Z3LogicCondition(term2))
assert (cond._condition == Or(term1, term2))
.parametrize('term1, term2', _get_operation_instances())
def test_negate(self, term1, term2):
cond = (~ Z3LogicCondition(term1))
assert (cond._condition == Not(term1))
.parametrize('z3_term, string', [(BoolVal(True, ctx=context), 'true'), (BoolVal(False, ctx=context), 'false'), (Or(And(b_x[1]), Not(b_x[2]), And(b_x[3], Or(Not(b_x[4]))), Not(And(b_x[5], b_x[2], Not(b_x[1]))), And(Not(And(b_x[5], Not(b_x[5]))), b_x[1]), Not(Or(b_x[3], Not(b_x[3])))), '(x1 | !x2 | (x3 & !x4) | !(x5 & x2 & !x1) | (!(x5 & !x5) & x1) | !(x3 | !x3))'), (Or(And(b_x[1], Not(b_x[1])), Not(b_x[2]), And(b_x[3], Or(b_x[4], Not(b_x[4]))), Not(And(b_x[5], b_x[2], Not(b_x[1]))), And(Not(And(b_x[5], Not(b_x[5]))), b_x[1]), Not(Or(b_x[3], Not(b_x[3])))), '((x1 & !x1) | !x2 | (x3 & (x4 | !x4)) | !(x5 & x2 & !x1) | (!(x5 & !x5) & x1) | !(x3 | !x3))')])
def test_string(self, z3_term, string):
cond = Z3LogicCondition(z3_term)
assert (str(cond) == string)
.parametrize('term, result', _get_is_instance_test_case(true_val=True))
def test_is_true(self, term, result):
assert (term.is_true == result)
.parametrize('term, result', _get_is_instance_test_case(false_val=True))
def test_is_false(self, term, result):
assert (term.is_false == result)
.parametrize('term, result', _get_is_instance_test_case(or_f=True))
def test_is_disjunction(self, term, result):
assert (term.is_disjunction == result)
.parametrize('term, result', _get_is_instance_test_case(and_f=True))
def test_is_conjunction(self, term, result):
assert (term.is_conjunction == result)
.parametrize('term, result', _get_is_instance_test_case(neg_symbol=True))
def test_is_negation(self, term, result):
assert (term.is_negation == result)
.parametrize('term, operands', [(true_value, []), (false_value, []), (z3_x[1].copy(), []), ((z3_x[1].copy() | z3_x[2].copy()), [z3_x[1].copy(), z3_x[2].copy()]), ((z3_x[1].copy() & z3_x[2].copy()), [z3_x[1].copy(), z3_x[2].copy()]), ((~ z3_x[1].copy()), [z3_x[1].copy()]), (((z3_x[1].copy() | z3_x[2].copy()) & z3_x[3].copy()), [(z3_x[1].copy() | z3_x[2].copy()), z3_x[3].copy()])])
def test_operands(self, term, operands):
assert ([str(op) for op in term.operands] == [str(op) for op in operands])
.parametrize('term, result', [(BoolVal(True, ctx=context), False), (BoolVal(False, ctx=context), False), (Not(b_x[1]), False), (And(b_x[1], b_x[2]), False), (Or(Not(b_x[1]), b_x[1]), False), (b_x[1], True)])
def test_is_symbol(self, term, result):
cond = Z3LogicCondition(term)
assert (cond.is_symbol == result)
.parametrize('term1, term2, result', [(b_x[1], b_x[2], False), (b_x[1], Not(b_x[1]), False), (b_x[1], And(b_x[1]), False), (b_x[1], Or(b_x[1]), False), (BitVecVal(1, 32), BitVecVal(3, 32), False), (BitVecVal(2, 32), BitVecVal(2, 32), True), (And(b_x[1], b_x[2], b_x[2]), And(b_x[1], b_x[1], b_x[2]), False), (And(b_x[1], And(b_x[2], b_x[3])), And(And(b_x[1], b_x[2]), b_x[3]), True), (And(b_x[1], b_x[2], b_x[2]), And(b_x[1], b_x[2]), False), (And(b_x[1], b_x[2]), And(b_x[1], b_x[1], b_x[2]), False), (And(b_x[1], b_x[2]), And(b_x[2], b_x[1]), True), (And(b_x[1], Or(b_x[2], b_x[3])), And(Or(b_x[3], b_x[2]), b_x[1]), True)])
def test_is_equal_to(self, term1, term2, result):
cond1 = Z3LogicCondition(term1)
cond2 = Z3LogicCondition(term2)
assert (cond1.is_equal_to(cond2) == result)
.parametrize('term1, term2, result', [(b_x[1], Bool(f'x2', ctx=Context()), False), (b_x[1], Bool(f'x1', ctx=Context()), True), (b_x[1], Not(Bool(f'x1', ctx=Context())), False), (BoolVal(True, ctx=context), BoolVal(True, Context()), True), (BoolVal(False, ctx=context), BoolVal(False, Context()), True), (BoolVal(False, ctx=context), BoolVal(True, Context()), False), (And(b_x[1], And(b_x[2], b_x[3])), And(And(Bool(f'x1', ctx=(new_ctx := Context())), Bool(f'x2', new_ctx)), Bool(f'x3', new_ctx)), True), (And(b_x[1], b_x[2]), And(Bool(f'x2', (new_ctx := Context())), Bool(f'x1', new_ctx)), True), (And(b_x[1], Or(b_x[2], b_x[3])), And(Or(Bool(f'x3', (new_ctx := Context())), Bool(f'x2', new_ctx)), Bool(f'x1', new_ctx)), True)])
def test_is_equal_to_different_context(self, term1, term2, result):
cond1 = Z3LogicCondition(term1)
cond2 = Z3LogicCondition(term2)
assert ((cond1.is_equal_to(cond2) == result) and (cond1.context != cond2.context))
.parametrize('term, cnf_term', _get_normal_forms('cnf'))
def test_to_cnf(self, term, cnf_term):
assert term.to_cnf().is_equal_to(cnf_term)
.parametrize('term, dnf_term', _get_normal_forms('dnf'))
def test_to_dnf(self, term, dnf_term):
assert term.to_dnf().is_equal_to(dnf_term)
.parametrize('term, simplified', [((((z3_x[1].copy() & (~ z3_x[2].copy())) & (z3_x[3].copy() | (~ (z3_x[4].copy() & z3_x[2].copy())))) & (~ ((z3_x[5].copy() & z3_x[2].copy()) & (~ z3_x[1].copy())))), (z3_x[1].copy() & (~ z3_x[2].copy()))), ((((z3_x[1].copy() | (z3_x[2].copy() & (~ z3_x[1].copy()))) | (z3_x[3].copy() & (~ (z3_x[1].copy() | z3_x[2].copy())))) | ((z3_x[5].copy() & z3_x[4].copy()) & (~ z3_x[1].copy()))), (((z3_x[1].copy() | z3_x[2].copy()) | z3_x[3].copy()) | (z3_x[5].copy() & z3_x[4].copy()))), (((((((z3_x[1].copy() & (~ z3_x[1].copy())) | (~ z3_x[2].copy())) | (z3_x[3].copy() & (z3_x[4].copy() | (~ z3_x[4].copy())))) | (~ ((z3_x[5].copy() & z3_x[2].copy()) & (~ z3_x[1].copy())))) | ((~ (z3_x[5].copy() & (~ z3_x[5].copy()))) & z3_x[1].copy())) | (~ (z3_x[3].copy() | (~ z3_x[3].copy())))), (((z3_x[1].copy() | (~ z3_x[5].copy())) | (~ z3_x[2].copy())) | z3_x[3].copy()))])
def test_simplify(self, term, simplified):
cond = term.simplify()
assert cond.is_equal_to(simplified)
.parametrize('term, result', [(true_value, []), (false_value, []), (z3_x[1].copy(), [z3_x[1].copy()]), ((~ z3_x[1].copy()), [z3_x[1].copy()]), ((((z3_x[1].copy() & (~ z3_x[2].copy())) & (z3_x[3].copy() | (~ (z3_x[4].copy() & z3_x[2].copy())))) & (~ ((z3_x[5].copy() & z3_x[2].copy()) & (~ z3_x[1].copy())))), [z3_x[1].copy(), z3_x[2].copy(), z3_x[3].copy(), z3_x[4].copy(), z3_x[2].copy(), z3_x[5].copy(), z3_x[2].copy(), z3_x[1].copy()])])
def test_get_symbols(self, term, result):
assert ([str(symbol) for symbol in term.get_symbols()] == [str(symbol) for symbol in result])
.parametrize('term, result', [(true_value, []), (false_value, []), (z3_x[1].copy(), [z3_x[1].copy()]), ((~ z3_x[1].copy()), [(~ z3_x[1].copy())]), ((z3_x[1].copy() | z3_x[2].copy()), [z3_x[1].copy(), z3_x[2].copy()]), (((~ z3_x[1].copy()) | z3_x[2].copy()), [(~ z3_x[1].copy()), z3_x[2].copy()]), ((z3_x[1].copy() & z3_x[2].copy()), [z3_x[1].copy(), z3_x[2].copy()]), ((((z3_x[1].copy() & (~ z3_x[2].copy())) & (z3_x[3].copy() | (~ (z3_x[4].copy() & z3_x[2].copy())))) & (~ ((z3_x[5].copy() & z3_x[2].copy()) & (~ z3_x[1].copy())))), [z3_x[1].copy(), (~ z3_x[2].copy()), z3_x[3].copy(), z3_x[4].copy(), z3_x[2].copy(), z3_x[5].copy(), z3_x[2].copy(), (~ z3_x[1].copy())])])
def test_get_literals(self, term, result):
assert ([str(literal) for literal in term.get_literals()] == [str(literal) for literal in result])
def test_get_literals_error(self):
term = Z3LogicCondition(Or(And(b_x[1], (BitVec('a', 32, context) < const_5)), b_x[3]))
with pytest.raises(AssertionError):
list(term.get_literals())
.parametrize('term, condition, result', [(true_value, z3_x[2].copy(), true_value), (false_value, z3_x[2].copy(), false_value), (z3_x[2].copy(), z3_x[2].copy(), true_value), (z3_x[2].copy(), z3_x[3].copy(), z3_x[2].copy()), ((z3_x[1].copy() | z3_x[2].copy()), z3_x[2].copy(), true_value)])
def test_substitute_by_true_basics(self, term, condition, result):
assert (term.substitute_by_true(condition).simplify() == result)
.parametrize('condition, result', [((((((z3_x[1].copy() | z3_x[2].copy()) | z3_x[3].copy()) & (z3_x[4].copy() | z3_x[5].copy())) & z3_x[6].copy()) & z3_x[7].copy()), true_value), (z3_x[6].copy(), ((((z3_x[1].copy() | z3_x[2].copy()) | z3_x[3].copy()) & (z3_x[4].copy() | z3_x[5].copy())) & z3_x[7].copy())), ((z3_x[4].copy() | z3_x[5].copy()), ((((z3_x[1].copy() | z3_x[2].copy()) | z3_x[3].copy()) & z3_x[6].copy()) & z3_x[7].copy())), ((z3_x[6].copy() & (z3_x[4].copy() | z3_x[5].copy())), (((z3_x[1].copy() | z3_x[2].copy()) | z3_x[3].copy()) & z3_x[7].copy())), ((z3_x[6].copy() & z3_x[7].copy()), (((z3_x[1].copy() | z3_x[2].copy()) | z3_x[3].copy()) & (z3_x[4].copy() | z3_x[5].copy()))), ((z3_x[1].copy() | z3_x[2].copy()), (((z3_x[4].copy() | z3_x[5].copy()) & z3_x[6].copy()) & z3_x[7].copy())), (((((((z3_x[1].copy() | z3_x[2].copy()) | z3_x[3].copy()) & (z3_x[4].copy() | z3_x[5].copy())) & z3_x[6].copy()) & z3_x[7].copy()) & z3_x[8].copy()), true_value)])
def test_substitute_by_true(self, condition, result):
term = (((((z3_x[1].copy() | z3_x[2].copy()) | z3_x[3].copy()) & (z3_x[4].copy() | z3_x[5].copy())) & z3_x[6].copy()) & z3_x[7].copy())
term.substitute_by_true(condition)
assert (term.simplify() == result.simplify())
def test_substitute_by_no_equivalence(self):
term = (((z3_x[1].copy() | z3_x[2].copy()) & ((~ z3_x[1].copy()) | z3_x[3].copy())) & (z3_x[4].copy() | z3_x[5].copy()))
term.substitute_by_true(z3_x[1].copy())
result = (z3_x[3].copy() & (z3_x[4].copy() | z3_x[5].copy()))
assert (term.simplify() == result.simplify())
.parametrize('term, conditions, result', [((z3_x[1].copy() & z3_x[2].copy()), [Condition(OperationType.equal, [var_a, constant_5]), Condition(OperationType.less_or_equal_us, [var_a, constant_10])], z3_x[1].copy()), ((z3_x[1].copy() & z3_x[2].copy()), [Condition(OperationType.less, [var_a, constant_5]), Condition(OperationType.less_or_equal_us, [var_a, constant_10])], (z3_x[1].copy() & z3_x[2].copy())), ((z3_x[1].copy() & z3_x[2].copy()), [Condition(OperationType.less, [var_a, constant_20]), Condition(OperationType.less_or_equal_us, [var_a, constant_10])], z3_x[2].copy()), ((z3_x[1].copy() & (~ z3_x[2].copy())), [Condition(OperationType.less, [var_a, constant_20]), Condition(OperationType.greater_us, [var_a, constant_10])], (~ z3_x[2].copy()))])
def test_remove_redundancy(self, term, conditions, result):
class MockConditionHandler(ConditionHandler):
def add_condition(self, condition: Condition) -> LogicCondition:
symbol = self._get_next_symbol()
z3_condition = PseudoZ3LogicCondition.initialize_from_condition(condition, self._logic_context).simplify()
condition_symbol = ConditionSymbol(condition, symbol, z3_condition)
self._condition_map[symbol] = condition_symbol
return symbol
def _get_next_symbol(self) -> Z3LogicCondition:
self._symbol_counter += 1
return Z3LogicCondition.initialize_symbol(f'x{self._symbol_counter}', self._logic_context)
condition_handler = MockConditionHandler()
condition_handler._logic_context = term.context
for cond in conditions:
condition_handler.add_condition(cond)
assert (term.remove_redundancy(condition_handler) == result)
.parametrize('term, bound1, bound2, result', [(And(b_x[1], b_x[2], b_x[3], b_x[4]), 3, 10, True), (Or(b_x[1], And(b_x[2], Not(b_x[1])), And(b_x[3], Not(Or(b_x[1], b_x[2]))), And(And(b_x[5], b_x[4], Not(b_x[1])))), 5, 10, True), (Or(b_x[1], And(b_x[2], Not(b_x[1])), And(b_x[3], Not(Or(b_x[1], b_x[2]))), And(And(b_x[5], b_x[4], Not(b_x[1])))), 6, 10, False), (Or(b_x[1], And(b_x[2], Not(b_x[1])), And(b_x[3], Not(Or(b_x[1], b_x[2]))), And(And(b_x[5], b_x[4], Not(b_x[1])))), 6, 8, True), (Or(b_x[1], And(b_x[2], Not(b_x[1])), And(b_x[3], Not(Or(b_x[1], b_x[2]))), And(And(b_x[5], b_x[4], Not(b_x[1])))), 6, 9, False)])
def test_too_large_to_simplify(self, term, bound1, bound2, result):
assert (Z3Implementation(True, bound1, bound2)._too_large_to_fully_simplify(term) == result)
.parametrize('term, new_term', [(b_x[1], b_x[1]), (BoolVal(True, ctx=context), BoolVal(True, ctx=context)), (BoolVal(False, ctx=context), BoolVal(False, ctx=context)), (Not(And((BitVec('a', 32, context) < BitVecVal(4, 32, context)), Or((BitVec('a', 32, context) > const10), Not((BitVec('a', 32, context) == const_20))))), Or(Not((BitVec('a', 32, context) < BitVecVal(4, 32, context))), And(Not((BitVec('a', 32, context) > const10)), (BitVec('a', 32, context) == const_20)))), (Not(And(b_x[1], b_x[2], Not(b_x[3]))), Or(Not(b_x[1]), Not(b_x[2]), b_x[3])), (Not(Or(b_x[1], b_x[2], Not(b_x[3]))), And(Not(b_x[1]), Not(b_x[2]), b_x[3])), (Not(Not(And(b_x[1], b_x[2], Not(b_x[3])))), And(b_x[1], b_x[2], Not(b_x[3]))), (And(b_x[1], Not(b_x[2]), Or(b_x[3], Not(And(b_x[4], b_x[2]))), Not(And(b_x[5], b_x[2], Not(b_x[1])))), And(b_x[1], Not(b_x[2]), Or(b_x[3], Or(Not(b_x[4]), Not(b_x[2]))), Or(Not(b_x[5]), Not(b_x[2]), b_x[1]))), (Or(And(BoolVal(False, ctx=context)), Not(b_x[2]), And(b_x[3], BoolVal(True, ctx=context)), Not(And(b_x[5], b_x[2], Not(b_x[1]))), And(Not(BoolVal(False, ctx=context)), b_x[1]), Not(BoolVal(True, ctx=context))), Or(And(BoolVal(False, ctx=context)), Not(b_x[2]), And(b_x[3], BoolVal(True, ctx=context)), Or(Not(b_x[5]), Not(b_x[2]), b_x[1]), And(BoolVal(True, ctx=context), b_x[1]), BoolVal(False, ctx=context)))])
def test_resolve_negation(self, term, new_term):
assert (Z3Implementation(True)._resolve_negation(term) == new_term)
.parametrize('term, result', [(Z3LogicCondition(Or(And(b_x[1]), Not(b_x[2]), And(b_x[3], Or(Not(b_x[4]))), Not(And(b_x[5], b_x[2], Not(b_x[1]))), And(Not(And(b_x[5], Not(b_x[5]))), b_x[1]), Not(Or(b_x[3], Not(b_x[3]))))), '(a < 0x1 | b == 0x2 | (c <= 0x3 & d <= 0x4) | !(e >= 0x5 & b != 0x2 & a >= 0x1) | (!(e >= 0x5 & e < 0x5) & a < 0x1) | !(c <= 0x3 | c > 0x3))'), (Z3LogicCondition(Or(And(b_x[1], Not(b_x[1])), Not(b_x[2]), And(b_x[3], Or(b_x[4], Not(b_x[4]))), Not(And(b_x[5], b_x[2], Not(b_x[1]))), And(Not(And(b_x[5], Not(b_x[5]))), b_x[1]), Not(Or(b_x[3], Not(b_x[3]))))), '((a < 0x1 & a >= 0x1) | b == 0x2 | (c <= 0x3 & (d > 0x4 | d <= 0x4)) | !(e >= 0x5 & b != 0x2 & a >= 0x1) | (!(e >= 0x5 & e < 0x5) & a < 0x1) | !(c <= 0x3 | c > 0x3))')])
def test_rich_string_representation(self, term, result):
condition_map = {z3_x[1].copy(): Condition(OperationType.less, [Variable('a'), Constant(1)]), z3_x[2].copy(): Condition(OperationType.not_equal, [Variable('b'), Constant(2)]), z3_x[3].copy(): Condition(OperationType.less_or_equal, [Variable('c'), Constant(3)]), z3_x[4].copy(): Condition(OperationType.greater, [Variable('d'), Constant(4)]), z3_x[5].copy(): Condition(OperationType.greater_or_equal, [Variable('e'), Constant(5)])}
assert (term.rich_string_representation(condition_map) == result) |
def test_custom_training():
(task_config=SagemakerTrainingJobConfig(training_job_resource_config=TrainingJobResourceConfig(instance_type='ml-xlarge', volume_size_in_gb=1), algorithm_specification=AlgorithmSpecification(algorithm_name=AlgorithmName.CUSTOM)))
def my_custom_trainer(x: int) -> int:
return x
assert (my_custom_trainer.python_interface.inputs == {'x': int})
assert (my_custom_trainer.python_interface.outputs == {'o0': int})
assert (my_custom_trainer(x=10) == 10)
assert (my_custom_trainer.get_custom(_get_reg_settings()) == {'algorithmSpecification': {}, 'trainingJobResourceConfig': {'instanceCount': '1', 'instanceType': 'ml-xlarge', 'volumeSizeInGb': '1'}}) |
class RequestInformation():
def __init__(self, method: 'RPCEndpoint', params: Any, response_formatters: Tuple[(Callable[(..., Any)], ...)], subscription_id: str=None):
self.method = method
self.params = params
self.response_formatters = response_formatters
self.subscription_id = subscription_id
self.middleware_response_processors: List[Callable[(..., Any)]] = [] |
class FaqSchema(SoftDeletionSchema):
class Meta():
type_ = 'faq'
self_view = 'v1.faq_detail'
self_view_kwargs = {'id': '<id>'}
inflect = dasherize
id = fields.Str(dump_only=True)
question = fields.Str(required=True)
answer = fields.Str(required=True)
event = Relationship(self_view='v1.faq_event', self_view_kwargs={'id': '<id>'}, related_view='v1.event_detail', related_view_kwargs={'faq_id': '<id>'}, schema='EventSchemaPublic', type_='event')
faq_type = Relationship(self_view='v1.faq_faq_type', self_view_kwargs={'id': '<id>'}, related_view='v1.faq_type_detail', related_view_kwargs={'faq_id': '<id>'}, schema='FaqTypeSchemaPublic', type_='faq-type') |
class AttributionSpec(AbstractObject):
def __init__(self, api=None):
super(AttributionSpec, self).__init__()
self._isAttributionSpec = True
self._api = api
class Field(AbstractObject.Field):
event_type = 'event_type'
window_days = 'window_days'
_field_types = {'event_type': 'string', 'window_days': 'int'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
def get_scene_preferences():
pref = preference_manager.preferences
res = {}
res['stereo'] = ast.literal_eval(pref.get('tvtk.scene.stereo'))
res['magnification'] = ast.literal_eval(pref.get('tvtk.scene.magnification'))
res['foreground'] = ast.literal_eval(pref.get('tvtk.scene.foreground_color'))
res['background'] = ast.literal_eval(pref.get('tvtk.scene.background_color'))
return res |
def getgw(iface):
result = ''
try:
output = os.popen(('route -n | grep ' + str(iface)))
for line in output:
l = line.split()
if ((l[2].strip() == '0.0.0.0') and (l[1].strip() != '0.0.0.0')):
result = l[1]
break
except:
result = ''
return result |
class SX1262(SX126X):
TX_DONE = SX126X_IRQ_TX_DONE
RX_DONE = SX126X_IRQ_RX_DONE
ADDR_FILT_OFF = SX126X_GFSK_ADDRESS_FILT_OFF
ADDR_FILT_NODE = SX126X_GFSK_ADDRESS_FILT_NODE
ADDR_FILT_NODE_BROAD = SX126X_GFSK_ADDRESS_FILT_NODE_BROADCAST
PREAMBLE_DETECT_OFF = SX126X_GFSK_PREAMBLE_DETECT_OFF
PREAMBLE_DETECT_8 = SX126X_GFSK_PREAMBLE_DETECT_8
PREAMBLE_DETECT_16 = SX126X_GFSK_PREAMBLE_DETECT_16
PREAMBLE_DETECT_24 = SX126X_GFSK_PREAMBLE_DETECT_24
PREAMBLE_DETECT_32 = SX126X_GFSK_PREAMBLE_DETECT_32
STATUS = ERROR
def __init__(self, spi_bus, spi_ce, irq, rst, gpio):
super().__init__(spi_bus, spi_ce, irq, rst, gpio)
self.frequency = 0
self._callbackFunction = self._dummyFunction
def begin(self, freq=434.0, bw=125.0, sf=9, cr=7, syncWord=SX126X_SYNC_WORD_PRIVATE, power=14, currentLimit=60.0, preambleLength=8, implicit=False, implicitLen=255, crcOn=True, txIq=False, rxIq=False, tcxoVoltage=1.6, useRegulatorLDO=False, blocking=True):
state = super().begin(bw, sf, cr, syncWord, currentLimit, preambleLength, tcxoVoltage, useRegulatorLDO, txIq, rxIq)
ASSERT(state)
if (not implicit):
state = super().explicitHeader()
else:
state = super().implicitHeader(implicitLen)
ASSERT(state)
state = super().setCRC(crcOn)
ASSERT(state)
state = self.setFrequency(freq)
ASSERT(state)
state = self.setOutputPower(power)
ASSERT(state)
state = super().fixPaClamping()
ASSERT(state)
state = self.setBlockingCallback(blocking)
return state
def beginFSK(self, freq=434.0, br=48.0, freqDev=50.0, rxBw=156.2, power=14, currentLimit=60.0, preambleLength=16, dataShaping=0.5, syncWord=[45, 1], syncBitsLength=16, addrFilter=SX126X_GFSK_ADDRESS_FILT_OFF, addr=0, crcLength=2, crcInitial=7439, crcPolynomial=4129, crcInverted=True, whiteningOn=True, whiteningInitial=256, fixedPacketLength=False, packetLength=255, preambleDetectorLength=SX126X_GFSK_PREAMBLE_DETECT_16, tcxoVoltage=1.6, useRegulatorLDO=False, blocking=True):
state = super().beginFSK(br, freqDev, rxBw, currentLimit, preambleLength, dataShaping, preambleDetectorLength, tcxoVoltage, useRegulatorLDO)
ASSERT(state)
state = super().setSyncBits(syncWord, syncBitsLength)
ASSERT(state)
if (addrFilter == SX126X_GFSK_ADDRESS_FILT_OFF):
state = super().disableAddressFiltering()
elif (addrFilter == SX126X_GFSK_ADDRESS_FILT_NODE):
state = super().setNodeAddress(addr)
elif (addrFilter == SX126X_GFSK_ADDRESS_FILT_NODE_BROADCAST):
state = super().setBroadcastAddress(addr)
else:
state = ERR_UNKNOWN
ASSERT(state)
state = super().setCRC(crcLength, crcInitial, crcPolynomial, crcInverted)
ASSERT(state)
state = super().setWhitening(whiteningOn, whiteningInitial)
ASSERT(state)
if fixedPacketLength:
state = super().fixedPacketLengthMode(packetLength)
else:
state = super().variablePacketLengthMode(packetLength)
ASSERT(state)
state = self.setFrequency(freq)
ASSERT(state)
state = self.setOutputPower(power)
ASSERT(state)
state = super().fixPaClamping()
ASSERT(state)
state = self.setBlockingCallback(blocking)
return state
def setFrequency(self, freq, calibrate=True):
if ((freq < 150.0) or (freq > 960.0)):
return ERR_INVALID_FREQUENCY
state = ERR_NONE
if calibrate:
data = bytearray(2)
if (freq > 900.0):
data[0] = SX126X_CAL_IMG_902_MHZ_1
data[1] = SX126X_CAL_IMG_902_MHZ_2
elif (freq > 850.0):
data[0] = SX126X_CAL_IMG_863_MHZ_1
data[1] = SX126X_CAL_IMG_863_MHZ_2
elif (freq > 770.0):
data[0] = SX126X_CAL_IMG_779_MHZ_1
data[1] = SX126X_CAL_IMG_779_MHZ_2
elif (freq > 460.0):
data[0] = SX126X_CAL_IMG_470_MHZ_1
data[1] = SX126X_CAL_IMG_470_MHZ_2
else:
data[0] = SX126X_CAL_IMG_430_MHZ_1
data[1] = SX126X_CAL_IMG_430_MHZ_2
state = super().calibrateImage(data)
ASSERT(state)
self.frequency = freq
return super().setFrequencyRaw(freq)
def setOutputPower(self, power):
if (not ((power >= (- 9)) and (power <= 22))):
return ERR_INVALID_OUTPUT_POWER
ocp = bytearray(1)
state = super().readRegister(SX126X_REG_OCP_CONFIGURATION, ocp, 1)[0]
ASSERT(state)
state = super().setPaConfig(4, _SX126X_PA_CONFIG_SX1262)
ASSERT(state)
state = super().setTxParams(power)
ASSERT(state)
return super().writeRegister(SX126X_REG_OCP_CONFIGURATION, ocp, 1)
def setTxIq(self, txIq):
self._txIq = txIq
def setRxIq(self, rxIq):
self._rxIq = rxIq
if (not self.blocking):
ASSERT(super().startReceive())
def setPreambleDetectorLength(self, preambleDetectorLength):
self._preambleDetectorLength = preambleDetectorLength
if (not self.blocking):
ASSERT(super().startReceive())
def setBlockingCallback(self, blocking, callback=None):
self.blocking = blocking
if (not self.blocking):
state = super().startReceive()
ASSERT(state)
if (callback != None):
self._callbackFunction = callback
super().setDio1Action(self._onIRQ)
else:
self._callbackFunction = self._dummyFunction
super().clearDio1Action()
return state
else:
state = super().standby()
ASSERT(state)
self._callbackFunction = self._dummyFunction
super().clearDio1Action()
return state
def recv(self, len_=0):
if (not self.blocking):
return self._readData(len_)
else:
return self._receive(len_)
def send(self, data):
if (not self.blocking):
return self._startTransmit(data)
else:
return self._transmit(data)
def _events(self):
return super().getIrqStatus()
def _receive(self, len_=0):
state = ERR_NONE
length = len_
if (len_ == 0):
length = SX126X_MAX_PACKET_LENGTH
data = bytearray(length)
try:
(state, data) = super().receive(data, length)
except AssertionError as e:
state = list(ERROR.keys())[list(ERROR.values()).index(str(e))]
if (state == ERR_NONE):
if (len_ == 0):
length = super().getPacketLength(False)
data = data[:length]
else:
return (b'', state)
return (bytes(data), state)
def _transmit(self, data):
if (isinstance(data, bytes) or isinstance(data, bytearray)):
pass
else:
return (0, ERR_INVALID_PACKET_TYPE)
state = super().transmit(data, len(data))
return (len(data), state)
def _readData(self, len_=0):
state = ERR_NONE
length = super().getPacketLength()
if ((len_ < length) and (len_ != 0)):
length = len_
data = bytearray(length)
try:
(state, data) = super().readData(data, length)
except AssertionError as e:
state = list(ERROR.keys())[list(ERROR.values()).index(str(e))]
ASSERT(super().startReceive())
if (state == ERR_NONE):
return (bytes(data), state)
else:
return (b'', state)
def _startTransmit(self, data):
if (isinstance(data, bytes) or isinstance(data, bytearray)):
pass
else:
return (0, ERR_INVALID_PACKET_TYPE)
state = super().startTransmit(data, len(data))
return (len(data), state)
def _dummyFunction(self, *args):
pass
def _onIRQ(self, callback):
events = self._events()
if (events & SX126X_IRQ_TX_DONE):
super().startReceive()
self._callbackFunction(events) |
(IProviderExtensionRegistry)
class ProviderExtensionRegistry(ExtensionRegistry):
_providers = List(IExtensionProvider)
def set_extensions(self, extension_point_id, extensions):
raise TypeError('extension points cannot be set')
def add_provider(self, provider):
events = self._add_provider(provider)
for (extension_point_id, (refs, added, index)) in events.items():
self._call_listeners(refs, extension_point_id, added, [], index)
def get_providers(self):
return self._providers[:]
def remove_provider(self, provider):
events = self._remove_provider(provider)
for (extension_point_id, (refs, removed, index)) in events.items():
self._call_listeners(refs, extension_point_id, [], removed, index)
def _get_extensions(self, extension_point_id):
if (extension_point_id not in self._extension_points):
logger.warning(('getting extensions of unknown extension point <%s>' % extension_point_id))
extensions = []
elif (extension_point_id in self._extensions):
extensions = self._extensions[extension_point_id]
else:
extensions = self._initialize_extensions(extension_point_id)
self._extensions[extension_point_id] = extensions
all = []
for extensions_of_single_provider in extensions:
all.extend(extensions_of_single_provider)
return all
def _add_provider(self, provider):
self._add_provider_extension_points(provider)
events = self._add_provider_extensions(provider)
self._providers.append(provider)
return events
def _add_provider_extensions(self, provider):
events = {}
for (extension_point_id, extensions) in self._extensions.items():
new = provider.get_extensions(extension_point_id)
if (len(new) > 0):
index = sum(map(len, extensions))
refs = self._get_listener_refs(extension_point_id)
events[extension_point_id] = (refs, new[:], index)
extensions.append(new)
return events
def _add_provider_extension_points(self, provider):
for extension_point in provider.get_extension_points():
self._extension_points[extension_point.id] = extension_point
def _remove_provider(self, provider):
events = self._remove_provider_extensions(provider)
self._remove_provider_extension_points(provider, events)
self._providers.remove(provider)
return events
def _remove_provider_extensions(self, provider):
events = {}
index = self._providers.index(provider)
for (extension_point_id, extensions) in self._extensions.items():
old = extensions[index]
if (len(old) > 0):
offset = sum(map(len, extensions[:index]))
refs = self._get_listener_refs(extension_point_id)
events[extension_point_id] = (refs, old[:], offset)
del extensions[index]
return events
def _remove_provider_extension_points(self, provider, events):
for extension_point in provider.get_extension_points():
del self._extension_points[extension_point.id]
_trait_change('_providers:extension_point_changed')
def _providers_extension_point_changed(self, obj, trait_name, old, event):
logger.debug('provider <%s> extension point changed', obj)
extension_point_id = event.extension_point_id
if (extension_point_id not in self._extensions):
return
extensions = self._extensions[extension_point_id]
provider_index = self._providers.index(obj)
extensions[provider_index] = obj.get_extensions(extension_point_id)
offset = sum(map(len, extensions[:provider_index]))
index = self._translate_index(event.index, offset)
refs = self._get_listener_refs(extension_point_id)
self._call_listeners(refs, extension_point_id, event.added, event.removed, index)
def _initialize_extensions(self, extension_point_id):
extensions = []
for provider in self._providers:
extensions.append(provider.get_extensions(extension_point_id)[:])
logger.debug('extensions to <%s> <%s>', extension_point_id, extensions)
return extensions
def _translate_index(self, index, offset):
if isinstance(index, slice):
index = slice((index.start + offset), (index.stop + offset), index.step)
else:
index = (index + offset)
return index |
class OptionSeriesBubbleMarker(Options):
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def fillOpacity(self):
return self._config_get(0.5)
def fillOpacity(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get(None)
def lineColor(self, value: Any):
self._config(value, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def states(self) -> 'OptionSeriesBubbleMarkerStates':
return self._config_sub_data('states', OptionSeriesBubbleMarkerStates)
def symbol(self):
return self._config_get('circle')
def symbol(self, text: str):
self._config(text, js_type=False) |
def test_override_hydra_config_value_from_config_file() -> None:
config_loader = ConfigLoaderImpl(config_search_path=create_config_search_path('hydra/test_utils/configs'))
cfg = config_loader.load_configuration(config_name='overriding_output_dir.yaml', overrides=[], run_mode=RunMode.RUN)
assert (cfg.hydra.run.dir == 'foo') |
class CheckJsonFormat(object):
def __init__(self, use_tabs=False, allow_comments=False):
self.use_tabs = use_tabs
self.allow_comments = allow_comments
self.fail = False
def index_lines(self, text):
self.line_range = []
count = 1
last = 0
for m in re.finditer('\n', text):
self.line_range.append((last, (m.end(0) - 1), count))
last = m.end(0)
count += 1
def get_line(self, pt):
line = None
for r in self.line_range:
if ((pt >= r[0]) and (pt <= r[1])):
line = r[2]
break
return line
def check_comments(self, text):
def remove_comments(group):
return ''.join([x[0] for x in RE_LINE_PRESERVE.findall(group)])
def evaluate(m):
text = ''
g = m.groupdict()
if (g['code'] is None):
if (not self.allow_comments):
self.log_failure(E_COMMENTS, self.get_line(m.start(0)))
text = remove_comments(g['comments'])
else:
text = g['code']
return text
content = ''.join(map((lambda m: evaluate(m)), RE_COMMENT.finditer(text)))
return content
def check_dangling_commas(self, text):
def check_comma(g, m, line):
self.log_failure(E_COMMA, line)
if (g['square_comma'] is not None):
return (g['square_ws'] + g['square_bracket'])
else:
return (g['curly_ws'] + g['curly_bracket'])
def evaluate(m):
g = m.groupdict()
return (check_comma(g, m, self.get_line(m.start(0))) if (g['code'] is None) else g['code'])
return ''.join(map((lambda m: evaluate(m)), RE_TRAILING_COMMA.finditer(text)))
def log_failure(self, code, line=None):
if line:
print(('%s: Line %d - %s' % (code, line, VIOLATION_MSG[code])))
else:
print(('%s: %s' % (code, VIOLATION_MSG[code])))
self.fail = True
def check_format(self, file_name):
self.fail = False
comment_align = None
with codecs.open(file_name, encoding='utf-8') as f:
count = 1
for line in f:
indent_match = (RE_LINE_INDENT_TAB if self.use_tabs else RE_LINE_INDENT_SPACE).match(line)
end_comment = (((comment_align is not None) or (indent_match and indent_match.group(2))) and RE_COMMENT_END.search(line))
if ((count == 1) and (line.strip() == '')):
self.log_failure(W_NL_START, count)
if (not line.endswith('\n')):
self.log_failure(W_NL_END, count)
if RE_TRAILING_SPACES.match(line):
self.log_failure(W_TRAILING_SPACE, count)
if (comment_align is not None):
if (comment_align.match(line) is None):
self.log_failure(W_COMMENT_INDENT, count)
if end_comment:
comment_align = None
elif (indent_match is None):
self.log_failure(W_INDENT, count)
elif ((comment_align is None) and indent_match.group(2)):
alignment = (indent_match.group(1) if (indent_match.group(1) is not None) else '')
if (not end_comment):
comment_align = re.compile(((PATTERN_COMMENT_INDENT_TAB if self.use_tabs else PATTERN_COMMENT_INDENT_SPACE) % alignment))
count += 1
f.seek(0)
text = f.read()
self.index_lines(text)
text = self.check_comments(text)
self.index_lines(text)
text = self.check_dangling_commas(text)
try:
json.loads(text)
except Exception as e:
self.log_failure(E_MALFORMED)
print(e)
return self.fail |
class HeuristicsManager(object):
cfg_heuristics = None
log = None
__sites_object = {}
__sites_heuristics = {}
__heuristics_condition = None
__condition_allowed = ['(', ')', ' and ', ' or ', ' not ']
def __init__(self, cfg_heuristics, sites_object, crawler_class):
self.cfg_heuristics = cfg_heuristics
for site in sites_object:
self.__sites_object[site['url']] = site
self.log = logging.getLogger(__name__)
self.crawler_class = crawler_class
def is_article(self, response, url):
site = self.__sites_object[url]
heuristics = self.__get_enabled_heuristics(url)
self.log.info('Checking site: %s', response.url)
statement = self.__get_condition(url)
self.log.debug('Condition (original): %s', statement)
for (heuristic, condition) in heuristics.items():
heuristic_func = getattr(self, heuristic)
result = heuristic_func(response, site)
check = self.__evaluate_result(result, condition)
statement = re.sub(('\\b%s\\b' % heuristic), str(check), statement)
self.log.debug('Checking heuristic (%s) result (%s) on condition (%s): %s', heuristic, result, condition, check)
self.log.debug('Condition (evaluated): %s', statement)
is_article = eval(statement)
self.log.debug('Article accepted: %s', is_article)
return is_article
def __get_condition(self, url):
if (self.__heuristics_condition is not None):
return self.__heuristics_condition
if ('pass_heuristics_condition' in self.__sites_object[url]):
condition = self.__sites_object[url]['pass_heuristics_condition']
else:
condition = self.cfg_heuristics['pass_heuristics_condition']
disalloweds = condition
heuristics = self.__get_enabled_heuristics(url)
for allowed in self.__condition_allowed:
disalloweds = disalloweds.replace(allowed, ' ')
for (heuristic, _) in heuristics.items():
disalloweds = re.sub(('\\b%s\\b' % heuristic), ' ', disalloweds)
disalloweds = disalloweds.split(' ')
for disallowed in disalloweds:
if (disallowed != ''):
self.log.error('Misconfiguration: In the condition, an unknown heuristic was found and will be ignored: %s', disallowed)
condition = re.sub(('\\b%s\\b' % disallowed), 'True', condition)
self.__heuristics_condition = condition
return condition
def __evaluate_result(self, result, condition):
if isinstance(result, bool):
return result
if isinstance(condition, basestring):
if ((condition.startswith("'") and condition.endswith("'")) or (condition.startswith('"') and condition.endswith('"'))):
if isinstance(result, basestring):
self.log.debug('Condition %s recognized as string.', condition)
return (result == condition[1:(- 1)])
return self.__evaluation_error(result, condition, 'Result not string')
if (not isinstance(result, (float, int))):
return self.__evaluation_error(result, condition, 'Result not number on comparision')
if condition.startswith('='):
number = self.__try_parse_number(condition[1:])
if isinstance(number, bool):
return self.__evaluation_error(result, condition, 'Number not parsable (=)')
return (result == number)
if condition.startswith('>='):
number = self.__try_parse_number(condition[2:])
if isinstance(number, bool):
return self.__evaluation_error(result, condition, 'Number not parsable (>=)')
return (result >= number)
if condition.startswith('<='):
number = self.__try_parse_number(condition[2:])
if isinstance(number, bool):
return self.__evaluation_error(result, condition, 'Number not parsable (<=)')
return (result <= number)
if condition.startswith('>'):
number = self.__try_parse_number(condition[1:])
if isinstance(number, bool):
return self.__evaluation_error(result, condition, 'Number not parsable (>)')
return (result > number)
if condition.startswith('<'):
number = self.__try_parse_number(condition[1:])
if isinstance(number, bool):
return self.__evaluation_error(result, condition, 'Number not parsable (<)')
return (result < number)
number = self.__try_parse_number(condition)
if isinstance(number, bool):
return self.__evaluation_error(result, condition, 'Number not parsable')
return (result == number)
if (isinstance(condition, (float, int)) and isinstance(result, (float, int))):
return (condition == result)
return self.__evaluation_error(result, condition, 'Unknown')
def __evaluation_error(self, result, condition, throw):
self.log.error('Result does not match condition, dropping item. Result %s; Condition: %s; Throw: %s', result, condition, throw)
return False
def __try_parse_number(self, string):
try:
return int(string)
except ValueError:
try:
return float(string)
except ValueError:
return False
def __get_enabled_heuristics(self, url):
if (url in self.__sites_heuristics):
return self.__sites_heuristics[url]
site = self.__sites_object[url]
heuristics = dict(self.cfg_heuristics['enabled_heuristics'])
if ('overwrite_heuristics' in site):
for (heuristic, value) in site['overwrite_heuristics'].items():
if ((value is False) and (heuristic in heuristics)):
del heuristics[heuristic]
else:
heuristics[heuristic] = value
self.__sites_heuristics[site['url']] = heuristics
self.log.debug('Enabled heuristics for %s: %s', site['url'], heuristics)
return heuristics |
class OptionPlotoptionsVariwideSonificationDefaultinstrumentoptionsMappingHighpass(Options):
def frequency(self) -> 'OptionPlotoptionsVariwideSonificationDefaultinstrumentoptionsMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsVariwideSonificationDefaultinstrumentoptionsMappingHighpassFrequency)
def resonance(self) -> 'OptionPlotoptionsVariwideSonificationDefaultinstrumentoptionsMappingHighpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsVariwideSonificationDefaultinstrumentoptionsMappingHighpassResonance) |
class Camera():
selectable_shutter_speeds = {(1 / 4): (1 / 4), (1 / 8): (1 / 8), (1 / 15): (1 / 16), (1 / 30): (1 / 32), (1 / 60): (1 / 64), (1 / 125): (1 / 128), (1 / 250): (1 / 256), (1 / 500): (1 / 512)}
selectable_film_speeds = (25, 50, 100, 200, 400, 800)
def __init__(self):
self.back = Back(camera=self)
self.exposure_control_system = ExposureControlSystem(mode='Shutter priority', camera=self, film_speed=100, battery=1.44)
self.film_advance_mechanism = FilmAdvanceMechanism(camera=self)
self.film_rewind_mechanism = FilmRewindMechanism(camera=self)
self.lens_cap = LensCap(on=False)
self.film = Film(camera=self)
self.environment = Environment(scene_luminosity=4096)
self.frame_counter = 0
self.film_speed = 100
self.shutter_speed = (1 / 125)
self.aperture = 'A'
self.exposure_indicator = self.exposure_control_system.read_meter
self.shutter_button = ShutterButton(camera=self)
self.film_advance_lever = FilmAdvanceLever(camera=self)
def shutter_speed(self):
return self._shutter_speed
_speed.setter
def shutter_speed(self, value):
if (not (value in self.selectable_shutter_speeds)):
possible_settings = ', '.join([f'1/{int((1 / s))}' for s in self.selectable_shutter_speeds.keys()])
raise self.NonExistentShutterSpeed(f'Possible shutter speeds are {possible_settings}')
self.exposure_control_system.shutter.timer = self.selectable_shutter_speeds[value]
self._shutter_speed = value
class NonExistentShutterSpeed(Exception):
pass
def aperture(self):
return self._aperture
def aperture(self, value):
if (value == 'A'):
self.exposure_control_system.mode = 'Shutter priority'
elif (not (1.7 <= value <= 16)):
raise self.ApertureOutOfRange
else:
self.exposure_control_system.mode = 'Manual'
self.exposure_control_system.aperture_set_lever.aperture = value
self._aperture = value
class ApertureOutOfRange(Exception):
pass
def film_speed(self):
return self._film_speed
_speed.setter
def film_speed(self, value):
if (not (value in self.selectable_film_speeds)):
possible_settings = ', '.join([f'{s}' for s in self.selectable_film_speeds])
raise self.NonExistentFilmSpeed(f'Possible film speeds are {possible_settings}')
self.exposure_control_system.film_speed = value
self._film_speed = value
class NonExistentFilmSpeed(Exception):
pass
def state(self):
print(' Camera state ')
print()
print(' Controls ')
print(f'Film speed: {self.film_speed} ISO')
print(f'Selected speed: 1/{int((1 / self.shutter_speed))}')
print()
print(' Indicators ')
print(f'Exposure indicator {self.exposure_indicator()}')
print(f'Frame counter: {self.frame_counter}')
print()
print(' Mechanical ')
print(f'Back closed: {self.back.closed}')
print(f'Lens cap on: {self.lens_cap.on}')
print(f'Film advance mechanism: {self.film_advance_mechanism.advanced}')
print(f'Shutter cocked: {self.exposure_control_system.shutter.cocked}')
print(f'Shutter timer: 1/{int((1 / self.exposure_control_system.shutter.timer))} seconds')
print(f'Iris aperture: /{self.exposure_control_system.iris.aperture:.2g}')
print(f'Camera exposure settings: {self.exposure_control_system.exposure_value()} EV')
print()
print(' Metering ')
print(f'Metered light: {self.exposure_control_system.light_meter.reading()} cd/m^2')
print(f'Exposure target: {self.exposure_control_system.measured_ev()} EV')
print(f'Mode: {self.exposure_control_system.mode}')
print(f'Battery: {self.exposure_control_system.battery} V')
print(f'Film speed: {self.exposure_control_system.film_speed} ISO')
print()
print(' Film ')
print(f'Speed: {self.film.speed} ISO')
print(f'Rewound into cartridge: {self.film.fully_rewound}')
print(f'Exposed frames: {self.film.frame} (of {self.film.frames})')
print(f'Ruined: {self.film.ruined}')
print()
print(' Environment ')
print(f'Scene luminosity: {self.environment.scene_luminosity} cd/m^2') |
class Gzip(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'cache_condition': (str, none_type), 'content_types': (str, none_type), 'extensions': (str, none_type), 'name': (str,)}
_property
def discriminator():
return None
attribute_map = {'cache_condition': 'cache_condition', 'content_types': 'content_types', 'extensions': 'extensions', 'name': 'name'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def test_not_force_defaults_radio_checked():
html = '<input type="radio" name="radio-1" class="my_radio" value="cb">'
expected_html = '<input type="radio" name="radio-1" class="my_radio" value="cb" checked="checked">'
rendered_html = htmlfill.render(html, defaults={'radio-1': 'cb'}, force_defaults=False)
assert (expected_html == rendered_html), rendered_html |
_deserializable
class PineconeDB(BaseVectorDB):
BATCH_SIZE = 100
def __init__(self, config: Optional[PineconeDBConfig]=None):
if (config is None):
self.config = PineconeDBConfig()
else:
if (not isinstance(config, PineconeDBConfig)):
raise TypeError('config is not a `PineconeDBConfig` instance. Please make sure the type is right and that you are passing an instance.')
self.config = config
self.client = self._setup_pinecone_index()
super().__init__(config=self.config)
def _initialize(self):
if (not self.embedder):
raise ValueError('Embedder not set. Please set an embedder with `set_embedder` before initialization.')
def _setup_pinecone_index(self):
pinecone.init(api_key=os.environ.get('PINECONE_API_KEY'), environment=os.environ.get('PINECONE_ENV'), **self.config.extra_params)
self.index_name = self._get_index_name()
indexes = pinecone.list_indexes()
if ((indexes is None) or (self.index_name not in indexes)):
pinecone.create_index(name=self.index_name, metric=self.config.metric, dimension=self.config.vector_dimension)
return pinecone.Index(self.index_name)
def get(self, ids: Optional[List[str]]=None, where: Optional[Dict[(str, any)]]=None, limit: Optional[int]=None):
existing_ids = list()
if (ids is not None):
for i in range(0, len(ids), 1000):
result = self.client.fetch(ids=ids[i:(i + 1000)])
batch_existing_ids = list(result.get('vectors').keys())
existing_ids.extend(batch_existing_ids)
return {'ids': existing_ids}
def add(self, embeddings: List[List[float]], documents: List[str], metadatas: List[object], ids: List[str], **kwargs: Optional[Dict[(str, any)]]):
docs = []
print('Adding documents to Pinecone...')
embeddings = self.embedder.embedding_fn(documents)
for (id, text, metadata, embedding) in zip(ids, documents, metadatas, embeddings):
docs.append({'id': id, 'values': embedding, 'metadata': {**metadata, 'text': text}})
for chunk in chunks(docs, self.BATCH_SIZE, desc='Adding chunks in batches...'):
self.client.upsert(chunk, **kwargs)
def query(self, input_query: List[str], n_results: int, where: Dict[(str, any)], citations: bool=False, **kwargs: Optional[Dict[(str, any)]]) -> Union[(List[Tuple[(str, Dict)]], List[str])]:
query_vector = self.embedder.embedding_fn([input_query])[0]
data = self.client.query(vector=query_vector, filter=where, top_k=n_results, include_metadata=True, **kwargs)
contexts = []
for doc in data['matches']:
metadata = doc['metadata']
context = metadata['text']
if citations:
metadata['score'] = doc['score']
contexts.append(tuple((context, metadata)))
else:
contexts.append(context)
return contexts
def set_collection_name(self, name: str):
if (not isinstance(name, str)):
raise TypeError('Collection name must be a string')
self.config.collection_name = name
def count(self) -> int:
return self.client.describe_index_stats()['total_vector_count']
def _get_or_create_db(self):
return self.client
def reset(self):
pinecone.delete_index(self.index_name)
self._setup_pinecone_index()
def _get_index_name(self) -> str:
return f'{self.config.collection_name}-{self.config.vector_dimension}'.lower().replace('_', '-') |
def get_connections(wire, wire_info, conn, idx, coord_to_tile, tiles):
pair = conn['wire_pairs'][idx]
wire_tile_type = wire_info['type']
tile_types = conn['tile_types']
shortname = wire_info['shortname']
grid_deltas = conn['grid_deltas']
wire1 = ((tile_types[0] == wire_tile_type) and (shortname == pair[0]))
wire2 = ((tile_types[1] == wire_tile_type) and (shortname == pair[1]))
assert (wire1 or wire2), (wire, conn)
tile_of_wire = wire_info['tile']
start_coord_x = tiles[tile_of_wire]['grid_x']
start_coord_y = tiles[tile_of_wire]['grid_y']
if wire1:
target_coord_x = (start_coord_x + grid_deltas[0])
target_coord_y = (start_coord_y + grid_deltas[1])
target_tile_type = tile_types[1]
target_wire = pair[1]
target_tile = (target_coord_x, target_coord_y)
if is_tile_type(tiles, coord_to_tile, target_tile, target_tile_type):
(yield (target_tile, target_wire))
if wire2:
target_coord_x = (start_coord_x - grid_deltas[0])
target_coord_y = (start_coord_y - grid_deltas[1])
target_tile_type = tile_types[0]
target_wire = pair[0]
target_tile = (target_coord_x, target_coord_y)
if is_tile_type(tiles, coord_to_tile, target_tile, target_tile_type):
(yield (target_tile, target_wire)) |
class Clocks(dict):
def names(self):
return list(self.keys())
def add_io(self, io):
for name in self.names():
print(((name + '_clk'), 0, Pins(1)))
io.append(((name + '_clk'), 0, Pins(1)))
def add_clockers(self, sim_config):
for (name, desc) in self.items():
sim_config.add_clocker((name + '_clk'), **desc) |
def process() -> None:
DATA_RAW_DIR = 'data/raw'
DATA_FEATURES_DIR = 'data/features'
files = ['green_tripdata_2021-01.parquet', 'green_tripdata_2021-02.parquet']
print('Load train data')
for file in files:
path_source = f'{DATA_RAW_DIR}/{file}'
data = pd.read_parquet(path_source)
print('Generate UID')
data['uuid'] = [uuid.uuid4() for x in range(len(data))]
data['uuid'] = data['uuid'].astype('str')
data['duration_min'] = (data.lpep_dropoff_datetime - data.lpep_pickup_datetime)
data.duration_min = data.duration_min.apply((lambda td: float((td.total_seconds() / 60))))
data = data.drop(['store_and_fwd_flag'], axis=1)
numeric_columns = data.select_dtypes(include='number').columns
medians = data[numeric_columns].median()
data = data.fillna(medians).fillna(0)
print('Save data')
path_destination = f'{DATA_FEATURES_DIR}/{file}'
data.to_parquet(path_destination) |
_common_aws_errors
def describe_dynamo_tables(client: Any, table_names: List[str]) -> List[Dict]:
describe_tables = []
for table in table_names:
described_table = client.describe_table(TableName=table)
describe_tables.append(described_table['Table'])
return describe_tables |
class TestWildcard(util.PluginTestCase):
def setup_fs(self):
config = self.dedent("\n matrix:\n - name: python\n sources:\n - '{}/**/*.txt'\n aspell:\n lang: en\n d: en_US\n hunspell:\n d: en_US\n pipeline:\n - pyspelling.filters.python:\n group_comments: true\n - pyspelling.flow_control.wildcard:\n allow:\n - py-comment\n - pyspelling.filters.context:\n context_visible_first: true\n delimiters:\n # Ignore lint (noqa) and coverage (pragma) as well as shebang (#!)\n - open: '^(?: *(?:noqa\\b|pragma: no cover)|!)'\n close: '$'\n # Ignore Python encoding string -*- encoding stuff -*-\n - open: '^ *-\\*-'\n close: '-\\*-$'\n - pyspelling.filters.context:\n context_visible_first: true\n escapes: '\\\\[\\\\`]'\n delimiters:\n # Ignore multiline content between fences (fences can have 3 or more back ticks)\n # ```\n # content\n # ```\n - open: '(?s)^(?P<open> *`{{3,}})$'\n close: '^(?P=open)$'\n # Ignore text between inline back ticks\n - open: '(?P<open>`+)'\n close: '(?P=open)'\n ").format(self.tempdir)
self.mktemp('.wildcard.yml', config, 'utf-8')
def test_wildcard(self):
bad_words = ['helo', 'begn']
good_words = ['yes', 'word']
template = self.dedent('\n #!/usr/bin/env python\n # -*- coding: utf-8 -*-\n """\n #! {}\n """\n def function(): # noqa\n # ```\n # alsjf alsk\n # eurpoq qeiew\n # ```\n ').format('\n'.join((bad_words + good_words)))
self.mktemp('test.txt', template, 'utf-8')
self.assert_spellcheck('.wildcard.yml', bad_words) |
class JSONEncoderTests(TestCase):
def setUp(self):
self.encoder = JSONEncoder()
def test_encode_decimal(self):
d = Decimal(3.14)
assert (self.encoder.default(d) == float(d))
def test_encode_datetime(self):
current_time = datetime.now()
assert (self.encoder.default(current_time) == current_time.isoformat())
current_time_utc = current_time.replace(tzinfo=utc)
assert (self.encoder.default(current_time_utc) == (current_time.isoformat() + 'Z'))
def test_encode_time(self):
current_time = datetime.now().time()
assert (self.encoder.default(current_time) == current_time.isoformat())
def test_encode_time_tz(self):
current_time = datetime.now().time()
current_time = current_time.replace(tzinfo=utc)
with pytest.raises(ValueError):
self.encoder.default(current_time)
def test_encode_date(self):
current_date = date.today()
assert (self.encoder.default(current_date) == current_date.isoformat())
def test_encode_timedelta(self):
delta = timedelta(hours=1)
assert (self.encoder.default(delta) == str(delta.total_seconds()))
def test_encode_uuid(self):
unique_id = uuid4()
assert (self.encoder.default(unique_id) == str(unique_id))
.skipif((not coreapi), reason='coreapi is not installed')
def test_encode_coreapi_raises_error(self):
with pytest.raises(RuntimeError):
self.encoder.default(coreapi.Document())
with pytest.raises(RuntimeError):
self.encoder.default(coreapi.Error())
def test_encode_object_with_tolist(self):
foo = MockList()
assert (self.encoder.default(foo) == [1, 2, 3])
def test_encode_empty_returnlist(self):
foo = ReturnList(serializer=None)
assert (self.encoder.default(foo) == []) |
def test_static_files_only(app_static_files_only, elasticapm_client):
client = TestClient(app_static_files_only)
response = client.get(('/tmp/' + file_name), headers={constants.TRACEPARENT_HEADER_NAME: '00-0af7651916cd43dd8448eb211c80319c-b7ad6b-03', constants.TRACESTATE_HEADER_NAME: 'foo=bar,bar=baz', 'REMOTE_ADDR': '127.0.0.1'})
assert (response.status_code == 200)
assert (len(elasticapm_client.events[constants.TRANSACTION]) == 1)
transaction = elasticapm_client.events[constants.TRANSACTION][0]
spans = elasticapm_client.spans_for_transaction(transaction)
assert (len(spans) == 0)
assert (transaction['name'] == 'GET /tmp')
assert (transaction['result'] == 'HTTP 2xx')
assert (transaction['outcome'] == 'success')
assert (transaction['type'] == 'request')
assert (transaction['span_count']['started'] == 0)
assert (transaction['context']['request']['url']['pathname'] == ('/tmp/' + file_name))
request = transaction['context']['request']
assert (request['method'] == 'GET')
assert (request['socket'] == {'remote_address': '127.0.0.1'}) |
_type(ofproto.OFPTMPBF_TIME_CAPABILITY)
class OFPBundleFeaturesPropTime(OFPBundleFeaturesProp):
def __init__(self, type_=None, length=None, sched_accuracy=None, sched_max_future=None, sched_max_past=None, timestamp=None):
super(OFPBundleFeaturesPropTime, self).__init__(type_, length)
self.sched_accuracy = sched_accuracy
self.sched_max_future = sched_max_future
self.sched_max_past = sched_max_past
self.timestamp = timestamp
def parser(cls, buf):
prop = cls()
(prop.type, prop.length) = struct.unpack_from(ofproto.OFP_BUNDLE_FEATURES_PROP_TIME_0_PACK_STR, buf)
offset = ofproto.OFP_BUNDLE_FEATURES_PROP_TIME_0_SIZE
for f in ['sched_accuracy', 'sched_max_future', 'sched_max_past', 'timestamp']:
t = OFPTime.parser(buf, offset)
setattr(prop, f, t)
offset += ofproto.OFP_TIME_SIZE
return prop
def serialize(self):
self.length = ofproto.OFP_BUNDLE_FEATURES_PROP_TIME_SIZE
buf = bytearray()
msg_pack_into(ofproto.OFP_BUNDLE_FEATURES_PROP_TIME_0_PACK_STR, buf, 0, self.type, self.length)
offset = ofproto.OFP_BUNDLE_FEATURES_PROP_TIME_0_SIZE
for f in [self.sched_accuracy, self.sched_max_future, self.sched_max_past, self.timestamp]:
f.serialize(buf, offset)
offset += ofproto.OFP_TIME_SIZE
return buf |
class BuildingMenuCmdSet(CmdSet):
key = 'building_menu'
priority = 5
mergetype = 'Replace'
def at_cmdset_creation(self):
caller = self.cmdsetobj
menu = caller.ndb._building_menu
if (menu is None):
menu = caller.db._building_menu
if menu:
menu = BuildingMenu.restore(caller)
cmds = [CmdNoInput, CmdNoMatch]
for cmd in cmds:
self.add(cmd(building_menu=menu)) |
class OptionPlotoptionsStreamgraphAccessibilityPoint(Options):
def dateFormat(self):
return self._config_get(None)
def dateFormat(self, text: str):
self._config(text, js_type=False)
def dateFormatter(self):
return self._config_get(None)
def dateFormatter(self, value: Any):
self._config(value, js_type=False)
def describeNull(self):
return self._config_get(True)
def describeNull(self, flag: bool):
self._config(flag, js_type=False)
def descriptionFormat(self):
return self._config_get(None)
def descriptionFormat(self, text: str):
self._config(text, js_type=False)
def descriptionFormatter(self):
return self._config_get(None)
def descriptionFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valueDescriptionFormat(self):
return self._config_get('{xDescription}{separator}{value}.')
def valueDescriptionFormat(self, text: str):
self._config(text, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False) |
class Animator(HasTraits):
start = Button('Start Animation')
stop = Button('Stop Animation')
delay = Range(10, 100000, 500, desc='frequency with which timer is called')
timer = Any
traits_view = View(Group(Item('start'), Item('stop'), show_labels=False), Item('_'), Item(name='delay'), title='Animation Controller', buttons=['OK'])
def __init__(self, millisec, callable, *args, **kwargs):
HasTraits.__init__(self)
self.delay = millisec
self.ui = None
self.timer = Timer(millisec, callable, *args, **kwargs)
def show(self):
self.ui = self.edit_traits()
def close(self):
if (self.ui is not None):
self.ui.dispose()
def _start_fired(self):
self.timer.Start(self.delay)
def _stop_fired(self):
self.timer.Stop()
def _delay_changed(self, value):
t = self.timer
if (t is None):
return
if t.IsRunning():
t.Stop()
t.Start(value) |
class AtariPPORNDModel(PPORNDModel):
def __init__(self, num_actions: int, network: str='nature') -> None:
super().__init__()
self._num_actions = num_actions
self._network = network.lower()
if (self._network == 'nature'):
self._ppo_net = NatureCNNBackbone()
self._tgt_net = NatureCNNBackbone()
self._prd_net = NatureCNNBackbone()
self._head = DiscreteActorCriticRNDHead(self._ppo_net.output_size, [512], num_actions)
elif (self._network == 'impala'):
self._ppo_net = ImpalaCNNBackbone()
self._tgt_net = ImpalaCNNBackbone()
self._prd_net = ImpalaCNNBackbone()
self._head = DiscreteActorCriticRNDHead(self._ppo_net.output_size, [256], num_actions)
else:
assert False, 'Unsupported network.'
def forward(self, obs: torch.Tensor) -> Tuple[(torch.Tensor, torch.Tensor, torch.Tensor)]:
x = (obs.float() / 255.0)
h = self._ppo_net(x)
(logpi, ext_v, int_v) = self._head(h)
return (logpi, ext_v, int_v)
_method(batch_size=128)
def act(self, obs: torch.Tensor, deterministic_policy: torch.Tensor) -> Tuple[(torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor)]:
with torch.no_grad():
(logpi, ext_v, int_v) = self.forward(obs)
greedy_action = logpi.argmax((- 1), keepdim=True)
sample_action = logpi.exp().multinomial(1, replacement=True)
action = torch.where(deterministic_policy, greedy_action, sample_action)
logpi = logpi.gather(dim=(- 1), index=action)
return (action, logpi, ext_v, int_v)
_method(batch_size=None)
def intrinsic_reward(self, obs: torch.Tensor) -> torch.Tensor:
return self._rnd_error(obs)
def rnd_loss(self, obs: torch.Tensor) -> torch.Tensor:
return (self._rnd_error(obs).mean() * 0.5)
def _rnd_error(self, obs: torch.Tensor) -> torch.Tensor:
x = (obs.float() / 255.0)
with torch.no_grad():
tgt = self._tgt_net(x)
prd = self._prd_net(x)
err = (prd - tgt).square().mean((- 1), keepdim=True)
return err |
def uninstall_system():
files = [os.path.join(SYSTEM_EXTENSION_DIR, EXTENSION_FILE), os.path.join(SYSTEM_EXTENSION_DIR, (EXTENSION_FILE + 'c')), os.path.join(SYSTEM_GLIB_SCHEMA_DIR, GLIB_SCHEMA_FILE)]
for file_ in files:
if os.path.isfile(file_):
os.remove(file_)
if is_glib_compile_schema_installed():
try:
subprocess.call([GLIB_COMPILE_SCHEMA, SYSTEM_GLIB_SCHEMA_DIR])
except Exception:
print('An error occured while trying to recompile glib schemas')
print('Nautilus Terminal extension successfully uninstalled from the system.') |
.parametrize('transaction,expected,key_object,from_', TEST_SIGNED_TRANSACTION_PARAMS, ids=['with set gas', 'with no set gas', 'with mismatched sender', 'with invalid sender', 'with gasPrice lower than base fee', 'with txn type and dynamic fee txn params', 'with dynamic fee txn params and no type'])
def test_signed_transaction(w3, fund_account, transaction, expected, key_object, from_):
w3.middleware_onion.add(construct_sign_and_send_raw_middleware(key_object))
to_from = valfilter(bool, {'to': w3.eth.accounts[0], 'from': from_})
_transaction = merge(transaction, to_from)
if (isinstance(expected, type) and issubclass(expected, Exception)):
with pytest.raises(expected):
w3.eth.send_transaction(_transaction)
else:
start_balance = w3.eth.get_balance(_transaction.get('from', w3.eth.accounts[0]))
w3.eth.send_transaction(_transaction)
assert (w3.eth.get_balance(_transaction.get('from')) <= (start_balance + expected)) |
class _InterceptGeneratorMeta(type):
def __new__(cls, name, bases, dict):
return super().__new__(cls, name, (bases + (tuple,)), dict)
def __call__(cls, *args, **kwargs):
if ((len(args) == 1) and (kwargs == {}) and isinstance(args[0], Generator)):
args = tuple(args[0])
obj = super().__call__(*args, **kwargs)
obj._is_frozen = True
return obj |
class ZenithNovelsPageProcessor(HtmlProcessor.HtmlPageProcessor):
wanted_mimetypes = ['text/html']
want_priority = 80
loggerPath = 'Main.Text.ZenithNovels'
def wantsUrl(url):
if re.search('^ url):
print(("zenith novels Wants url: '%s'" % url))
return True
return False
def preprocessBody(self, soup):
badspans = soup.find_all('span', style=re.compile('color\\W?:\\W?white', re.I))
for bad in badspans:
bad.decompose()
return soup |
def handle_it(exception_details):
access_violation_flag = False
if ('access-violation' == exception_details['type']):
memory = exception_details['memory']
if (int(memory['address'], 16) in range(data.break_point_info['break_addr'], (data.break_point_info['break_addr'] + data.break_point_info['break_len']))):
data.break_point_info['current_pc'] = int(exception_details['address'], 16)
data.break_point_info['current_lr'] = int(exception_details['context']['lr'], 16)
data.break_point_info['cmd'] = 3
access_violation_flag = True
elif (int(memory['address'], 16) in range(data.break_point_info['break_page_info'][0], (data.break_point_info['break_page_info'][0] + data.proc_info['pagesize']))):
data.break_point_info['current_pc'] = int(exception_details['address'], 16)
data.break_point_info['current_lr'] = int(exception_details['context']['lr'], 16)
data.break_point_info['cmd'] = 1
access_violation_flag = True
else:
print('ignore the access-violation exception...')
data.break_point_info['cmd'] = 100
access_violation_flag = True
if access_violation_flag:
data.rpc.api._script.post(wrapper_to_post('exception_ret', data.break_point_info))
return
if ('breakpoint' == exception_details['type']):
if (0 == len(data.soft_breakpoint_runtime)):
data.rpc.api._script.post(wrapper_to_post('exception_ret', {'info': 'breakpoint', 'cmd': 100}))
return
for index in range(0, len(data.soft_breakpoint_runtime)):
if ((int(exception_details['address'], 16) == data.soft_breakpoint_runtime[index]['break_addr']) or ((int(exception_details['address'], 16) + 1) == data.soft_breakpoint_runtime[index]['break_addr'])):
data.soft_breakpoint_runtime[index]['cmd'] = 2
data.soft_breakpoint_runtime[index]['break_page_info'] = data.break_point_info['break_page_info']
data.soft_breakpoint_runtime[index]['index'] = index
data.rpc.api._script.post(wrapper_to_post('exception_ret', data.soft_breakpoint_runtime[index]))
return
print('ignore the breakpoint exception...')
data.soft_breakpoint_runtime['cmd'] = 100
data.rpc.api._script.post(wrapper_to_post('exception_ret', {'info': 'breakpoint', 'cmd': 100}))
return |
.EventDecorator()
def compile_coordinate_element(ufl_coordinate_element, contains_eps, parameters=None):
if (parameters is None):
parameters = tsfc.default_parameters()
else:
_ = tsfc.default_parameters()
_.update(parameters)
parameters = _
element = tsfc.finatinterface.create_element(ufl_coordinate_element)
cell = ufl_coordinate_element.cell
extruded = isinstance(cell, ufl.TensorProductCell)
code = {'geometric_dimension': cell.geometric_dimension(), 'topological_dimension': cell.topological_dimension(), 'celldist_l1_c_expr': celldist_l1_c_expr(element.cell, 'X'), 'to_reference_coords_newton_step': to_reference_coords_newton_step(ufl_coordinate_element, parameters), 'init_X': init_X(element.cell, parameters), 'max_iteration_count': (1 if is_affine(ufl_coordinate_element) else 16), 'convergence_epsilon': 1e-12, 'dX_norm_square': dX_norm_square(cell.topological_dimension()), 'X_isub_dX': X_isub_dX(cell.topological_dimension()), 'extruded_arg': (', int const *__restrict__ layers' if extruded else ''), 'extr_comment_out': ('//' if extruded else ''), 'non_extr_comment_out': ('//' if (not extruded) else ''), 'IntType': as_cstr(IntType), 'ScalarType': ScalarType_c, 'RealType': RealType_c, 'tolerance': contains_eps}
evaluate_template_c = '#include <math.h>\nstruct ReferenceCoords {\n %(ScalarType)s X[%(geometric_dimension)d];\n};\n\nstatic %(RealType)s tolerance = %(tolerance)s; /* used in locate_cell */\n\n%(to_reference_coords_newton_step)s\n\nstatic inline void to_reference_coords_kernel(void *result_, double *x0, %(RealType)s *cell_dist_l1, %(ScalarType)s *C)\n{\n struct ReferenceCoords *result = (struct ReferenceCoords *) result_;\n\n /*\n * Mapping coordinates from physical to reference space\n */\n\n %(ScalarType)s *X = result->X;\n %(init_X)s\n\n int converged = 0;\n for (int it = 0; !converged && it < %(max_iteration_count)d; it++) {\n %(ScalarType)s dX[%(topological_dimension)d] = { 0.0 };\n to_reference_coords_newton_step(C, x0, X, dX);\n\n if (%(dX_norm_square)s < %(convergence_epsilon)g * %(convergence_epsilon)g) {\n converged = 1;\n }\n\n%(X_isub_dX)s\n }\n\n *cell_dist_l1 = %(celldist_l1_c_expr)s;\n}\n\nstatic inline void wrap_to_reference_coords(\n void* const result_, double* const x, %(RealType)s* const cell_dist_l1, %(IntType)s const start, %(IntType)s const end%(extruded_arg)s,\n %(ScalarType)s const *__restrict__ coords, %(IntType)s const *__restrict__ coords_map);\n\n%(RealType)s to_reference_coords(void *result_, struct Function *f, int cell, double *x)\n{\n %(RealType)s cell_dist_l1 = 0.0;\n %(extr_comment_out)swrap_to_reference_coords(result_, x, &cell_dist_l1, cell, cell+1, f->coords, f->coords_map);\n return cell_dist_l1;\n}\n\n%(RealType)s to_reference_coords_xtr(void *result_, struct Function *f, int cell, int layer, double *x)\n{\n %(RealType)s cell_dist_l1 = 0.0;\n %(non_extr_comment_out)sint layers[2] = {0, layer+2}; // +2 because the layer loop goes to layers[1]-1, which is nlayers-1\n %(non_extr_comment_out)swrap_to_reference_coords(result_, x, &cell_dist_l1, cell, cell+1, layers, f->coords, f->coords_map);\n return cell_dist_l1;\n}\n\n'
return (evaluate_template_c % code) |
class DesktopCoverPlugin():
def __init__(self):
self.__desktop_cover = None
def enable(self, _exaile):
self.__migrate_anchor_setting()
def on_gui_loaded(self):
self.__desktop_cover = DesktopCover()
def disable(self, _exaile):
self.__desktop_cover.destroy()
self.__desktop_cover = None
def __migrate_anchor_setting():
gravity = settings.get_option('plugin/desktopcover/anchor', 'topleft')
gravity_map = DesktopCover.gravity_map
if (gravity not in gravity_map):
gravities = list(gravity_map.keys())
try:
gravity = gravities[gravity]
except (IndexError, TypeError):
gravity = 'topleft'
settings.set_option('plugin/desktopcover/anchor', gravity) |
class ConvDepthwiseTestCase(unittest.TestCase):
def test_fp16(self, batch=4):
groups = 32
size = (12, 12)
target = detect_target()
X = Tensor(shape=[IntImm(batch), *size, 32], dtype='float16', name='input_0', is_input=True)
W = Tensor(shape=[32, 3, 3, 1], dtype='float16', name='input_1', is_input=True)
OP = ops.conv2d_depthwise(stride=1, pad=1, dilate=1, group=groups)
Y = OP(X, W)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
module = compile_model(Y, target, './tmp', 'conv2d_dw')
X_pt = torch.randn(batch, 32, *size).cuda().half()
W_pt = torch.randn(32, 1, 3, 3).cuda().half()
Y_pt = torch.nn.functional.conv2d(X_pt, W_pt, padding=1, groups=groups)
x = X_pt.permute((0, 2, 3, 1)).contiguous()
w = W_pt.permute((0, 2, 3, 1)).contiguous()
y = torch.empty([batch, *size, 32]).cuda().half()
module.run_with_tensors({'input_0': x, 'input_1': w}, [y])
y_transpose = y.permute((0, 3, 1, 2))
self.assertFalse(y_transpose.isnan().any())
self.assertFalse(y_transpose.isinf().any())
if (target.name() == 'cuda'):
torch.testing.assert_close(Y_pt, y_transpose, atol=0.01, rtol=0.01)
else:
torch.testing.assert_close(Y_pt, y_transpose, atol=0.125, rtol=0.1) |
class TestOIDCProviderConfig():
VALID_CREATE_OPTIONS = {'provider_id': 'oidc.provider', 'client_id': 'CLIENT_ID', 'issuer': ' 'display_name': 'oidcProviderName', 'enabled': True, 'id_token_response_type': True, 'code_response_type': True, 'client_secret': 'CLIENT_SECRET'}
OIDC_CONFIG_REQUEST = {'displayName': 'oidcProviderName', 'enabled': True, 'clientId': 'CLIENT_ID', 'clientSecret': 'CLIENT_SECRET', 'issuer': ' 'responseType': {'code': True, 'idToken': True}}
.parametrize('provider_id', (INVALID_PROVIDER_IDS + ['saml.provider']))
def test_get_invalid_provider_id(self, user_mgt_app, provider_id):
with pytest.raises(ValueError) as excinfo:
auth.get_oidc_provider_config(provider_id, app=user_mgt_app)
assert str(excinfo.value).startswith('Invalid OIDC provider ID')
def test_get(self, user_mgt_app):
recorder = _instrument_provider_mgt(user_mgt_app, 200, OIDC_PROVIDER_CONFIG_RESPONSE)
provider_config = auth.get_oidc_provider_config('oidc.provider', app=user_mgt_app)
self._assert_provider_config(provider_config)
assert (len(recorder) == 1)
req = recorder[0]
assert (req.method == 'GET')
assert (req.url == '{0}{1}'.format(USER_MGT_URLS['PREFIX'], '/oauthIdpConfigs/oidc.provider'))
.parametrize('invalid_opts', [{'provider_id': None}, {'provider_id': ''}, {'provider_id': 'saml.provider'}, {'client_id': None}, {'client_id': ''}, {'issuer': None}, {'issuer': ''}, {'issuer': 'not a url'}, {'display_name': True}, {'enabled': 'true'}, {'id_token_response_type': 'true'}, {'code_response_type': 'true'}, {'code_response_type': True, 'client_secret': ''}, {'code_response_type': True, 'client_secret': True}, {'code_response_type': True, 'client_secret': None}, {'code_response_type': False, 'id_token_response_type': False}])
def test_create_invalid_args(self, user_mgt_app, invalid_opts):
options = dict(self.VALID_CREATE_OPTIONS)
options.update(invalid_opts)
with pytest.raises(ValueError):
auth.create_oidc_provider_config(**options, app=user_mgt_app)
def test_create(self, user_mgt_app):
recorder = _instrument_provider_mgt(user_mgt_app, 200, OIDC_PROVIDER_CONFIG_RESPONSE)
provider_config = auth.create_oidc_provider_config(**self.VALID_CREATE_OPTIONS, app=user_mgt_app)
self._assert_provider_config(provider_config)
assert (len(recorder) == 1)
req = recorder[0]
assert (req.method == 'POST')
assert (req.url == '{0}/oauthIdpConfigs?oauthIdpConfigId=oidc.provider'.format(USER_MGT_URLS['PREFIX']))
got = json.loads(req.body.decode())
assert (got == self.OIDC_CONFIG_REQUEST)
def test_create_minimal(self, user_mgt_app):
recorder = _instrument_provider_mgt(user_mgt_app, 200, OIDC_PROVIDER_CONFIG_RESPONSE)
options = dict(self.VALID_CREATE_OPTIONS)
del options['display_name']
del options['enabled']
del options['client_secret']
del options['id_token_response_type']
del options['code_response_type']
want = dict(self.OIDC_CONFIG_REQUEST)
del want['displayName']
del want['enabled']
del want['clientSecret']
del want['responseType']
provider_config = auth.create_oidc_provider_config(**options, app=user_mgt_app)
self._assert_provider_config(provider_config)
assert (len(recorder) == 1)
req = recorder[0]
assert (req.method == 'POST')
assert (req.url == '{0}/oauthIdpConfigs?oauthIdpConfigId=oidc.provider'.format(USER_MGT_URLS['PREFIX']))
got = json.loads(req.body.decode())
assert (got == want)
def test_create_empty_values(self, user_mgt_app):
recorder = _instrument_provider_mgt(user_mgt_app, 200, OIDC_PROVIDER_CONFIG_RESPONSE)
options = dict(self.VALID_CREATE_OPTIONS)
options['display_name'] = ''
options['enabled'] = False
options['code_response_type'] = False
want = dict(self.OIDC_CONFIG_REQUEST)
want['displayName'] = ''
want['enabled'] = False
want['responseType'] = {'code': False, 'idToken': True}
del want['clientSecret']
provider_config = auth.create_oidc_provider_config(**options, app=user_mgt_app)
self._assert_provider_config(provider_config)
assert (len(recorder) == 1)
req = recorder[0]
assert (req.method == 'POST')
assert (req.url == '{0}/oauthIdpConfigs?oauthIdpConfigId=oidc.provider'.format(USER_MGT_URLS['PREFIX']))
got = json.loads(req.body.decode())
assert (got == want)
.parametrize('invalid_opts', [{}, {'provider_id': None}, {'provider_id': ''}, {'provider_id': 'saml.provider'}, {'client_id': ''}, {'issuer': ''}, {'issuer': 'not a url'}, {'display_name': True}, {'enabled': 'true'}, {'id_token_response_type': 'true'}, {'code_response_type': 'true'}, {'code_response_type': True, 'client_secret': ''}, {'code_response_type': True, 'client_secret': True}, {'code_response_type': True, 'client_secret': None}, {'code_response_type': False, 'id_token_response_type': False}])
def test_update_invalid_args(self, user_mgt_app, invalid_opts):
options = {'provider_id': 'oidc.provider'}
options.update(invalid_opts)
with pytest.raises(ValueError):
auth.update_oidc_provider_config(**options, app=user_mgt_app)
def test_update(self, user_mgt_app):
recorder = _instrument_provider_mgt(user_mgt_app, 200, OIDC_PROVIDER_CONFIG_RESPONSE)
provider_config = auth.update_oidc_provider_config(**self.VALID_CREATE_OPTIONS, app=user_mgt_app)
self._assert_provider_config(provider_config)
assert (len(recorder) == 1)
req = recorder[0]
assert (req.method == 'PATCH')
mask = ['clientId', 'clientSecret', 'displayName', 'enabled', 'issuer', 'responseType.code', 'responseType.idToken']
assert (req.url == '{0}/oauthIdpConfigs/oidc.provider?updateMask={1}'.format(USER_MGT_URLS['PREFIX'], ','.join(mask)))
got = json.loads(req.body.decode())
assert (got == self.OIDC_CONFIG_REQUEST)
def test_update_minimal(self, user_mgt_app):
recorder = _instrument_provider_mgt(user_mgt_app, 200, OIDC_PROVIDER_CONFIG_RESPONSE)
provider_config = auth.update_oidc_provider_config('oidc.provider', display_name='oidcProviderName', app=user_mgt_app)
self._assert_provider_config(provider_config)
assert (len(recorder) == 1)
req = recorder[0]
assert (req.method == 'PATCH')
assert (req.url == '{0}/oauthIdpConfigs/oidc.provider?updateMask=displayName'.format(USER_MGT_URLS['PREFIX']))
got = json.loads(req.body.decode())
assert (got == {'displayName': 'oidcProviderName'})
def test_update_empty_values(self, user_mgt_app):
recorder = _instrument_provider_mgt(user_mgt_app, 200, OIDC_PROVIDER_CONFIG_RESPONSE)
provider_config = auth.update_oidc_provider_config('oidc.provider', display_name=auth.DELETE_ATTRIBUTE, enabled=False, id_token_response_type=False, app=user_mgt_app)
self._assert_provider_config(provider_config)
assert (len(recorder) == 1)
req = recorder[0]
assert (req.method == 'PATCH')
mask = ['displayName', 'enabled', 'responseType.idToken']
assert (req.url == '{0}/oauthIdpConfigs/oidc.provider?updateMask={1}'.format(USER_MGT_URLS['PREFIX'], ','.join(mask)))
got = json.loads(req.body.decode())
assert (got == {'displayName': None, 'enabled': False, 'responseType': {'idToken': False}})
.parametrize('provider_id', (INVALID_PROVIDER_IDS + ['saml.provider']))
def test_delete_invalid_provider_id(self, user_mgt_app, provider_id):
with pytest.raises(ValueError) as excinfo:
auth.delete_oidc_provider_config(provider_id, app=user_mgt_app)
assert str(excinfo.value).startswith('Invalid OIDC provider ID')
def test_delete(self, user_mgt_app):
recorder = _instrument_provider_mgt(user_mgt_app, 200, '{}')
auth.delete_oidc_provider_config('oidc.provider', app=user_mgt_app)
assert (len(recorder) == 1)
req = recorder[0]
assert (req.method == 'DELETE')
assert (req.url == '{0}{1}'.format(USER_MGT_URLS['PREFIX'], '/oauthIdpConfigs/oidc.provider'))
.parametrize('arg', [None, 'foo', list(), dict(), 0, (- 1), 101, False])
def test_invalid_max_results(self, user_mgt_app, arg):
with pytest.raises(ValueError):
auth.list_oidc_provider_configs(max_results=arg, app=user_mgt_app)
.parametrize('arg', ['', list(), dict(), 0, (- 1), 101, False])
def test_invalid_page_token(self, user_mgt_app, arg):
with pytest.raises(ValueError):
auth.list_oidc_provider_configs(page_token=arg, app=user_mgt_app)
def test_list_single_page(self, user_mgt_app):
recorder = _instrument_provider_mgt(user_mgt_app, 200, LIST_OIDC_PROVIDER_CONFIGS_RESPONSE)
page = auth.list_oidc_provider_configs(app=user_mgt_app)
self._assert_page(page)
provider_configs = list((config for config in page.iterate_all()))
assert (len(provider_configs) == 2)
assert (len(recorder) == 1)
req = recorder[0]
assert (req.method == 'GET')
assert (req.url == '{0}{1}'.format(USER_MGT_URLS['PREFIX'], '/oauthIdpConfigs?pageSize=100'))
def test_list_multiple_pages(self, user_mgt_app):
sample_response = json.loads(OIDC_PROVIDER_CONFIG_RESPONSE)
configs = _create_list_response(sample_response)
response = {'oauthIdpConfigs': configs[:2], 'nextPageToken': 'token'}
recorder = _instrument_provider_mgt(user_mgt_app, 200, json.dumps(response))
page = auth.list_oidc_provider_configs(max_results=10, app=user_mgt_app)
self._assert_page(page, next_page_token='token')
assert (len(recorder) == 1)
req = recorder[0]
assert (req.method == 'GET')
assert (req.url == '{0}/oauthIdpConfigs?pageSize=10'.format(USER_MGT_URLS['PREFIX']))
response = {'oauthIdpConfigs': configs[2:]}
recorder = _instrument_provider_mgt(user_mgt_app, 200, json.dumps(response))
page = page.get_next_page()
self._assert_page(page, count=1, start=2)
assert (len(recorder) == 1)
req = recorder[0]
assert (req.method == 'GET')
assert (req.url == '{0}/oauthIdpConfigs?pageSize=10&pageToken=token'.format(USER_MGT_URLS['PREFIX']))
def test_paged_iteration(self, user_mgt_app):
sample_response = json.loads(OIDC_PROVIDER_CONFIG_RESPONSE)
configs = _create_list_response(sample_response)
response = {'oauthIdpConfigs': configs[:2], 'nextPageToken': 'token'}
recorder = _instrument_provider_mgt(user_mgt_app, 200, json.dumps(response))
page = auth.list_oidc_provider_configs(app=user_mgt_app)
iterator = page.iterate_all()
for index in range(2):
provider_config = next(iterator)
assert (provider_config.provider_id == 'oidc.provider{0}'.format(index))
assert (len(recorder) == 1)
req = recorder[0]
assert (req.method == 'GET')
assert (req.url == '{0}/oauthIdpConfigs?pageSize=100'.format(USER_MGT_URLS['PREFIX']))
response = {'oauthIdpConfigs': configs[2:]}
recorder = _instrument_provider_mgt(user_mgt_app, 200, json.dumps(response))
provider_config = next(iterator)
assert (provider_config.provider_id == 'oidc.provider2')
assert (len(recorder) == 1)
req = recorder[0]
assert (req.method == 'GET')
assert (req.url == '{0}/oauthIdpConfigs?pageSize=100&pageToken=token'.format(USER_MGT_URLS['PREFIX']))
with pytest.raises(StopIteration):
next(iterator)
def test_list_empty_response(self, user_mgt_app):
response = {'oauthIdpConfigs': []}
_instrument_provider_mgt(user_mgt_app, 200, json.dumps(response))
page = auth.list_oidc_provider_configs(app=user_mgt_app)
assert (len(page.provider_configs) == 0)
provider_configs = list((config for config in page.iterate_all()))
assert (len(provider_configs) == 0)
def test_list_error(self, user_mgt_app):
_instrument_provider_mgt(user_mgt_app, 500, '{"error":"test"}')
with pytest.raises(exceptions.InternalError) as excinfo:
auth.list_oidc_provider_configs(app=user_mgt_app)
assert (str(excinfo.value) == 'Unexpected error response: {"error":"test"}')
def test_config_not_found(self, user_mgt_app):
_instrument_provider_mgt(user_mgt_app, 500, CONFIG_NOT_FOUND_RESPONSE)
with pytest.raises(auth.ConfigurationNotFoundError) as excinfo:
auth.get_oidc_provider_config('oidc.provider', app=user_mgt_app)
error_msg = 'No auth provider found for the given identifier (CONFIGURATION_NOT_FOUND).'
assert (excinfo.value.code == exceptions.NOT_FOUND)
assert (str(excinfo.value) == error_msg)
assert (excinfo.value. is not None)
assert (excinfo.value.cause is not None)
def _assert_provider_config(self, provider_config, want_id='oidc.provider'):
assert isinstance(provider_config, auth.OIDCProviderConfig)
assert (provider_config.provider_id == want_id)
assert (provider_config.display_name == 'oidcProviderName')
assert (provider_config.enabled is True)
assert (provider_config.issuer == '
assert (provider_config.client_id == 'CLIENT_ID')
def _assert_page(self, page, count=2, start=0, next_page_token=''):
assert isinstance(page, auth.ListProviderConfigsPage)
index = start
assert (len(page.provider_configs) == count)
for provider_config in page.provider_configs:
self._assert_provider_config(provider_config, want_id='oidc.provider{0}'.format(index))
index += 1
if next_page_token:
assert (page.next_page_token == next_page_token)
assert (page.has_next_page is True)
else:
assert (page.next_page_token == '')
assert (page.has_next_page is False)
assert (page.get_next_page() is None) |
def extractIlover18NovelHomeBlog(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [("It's Purely an Accident to Love Again", "It's Purely an Accident to Love Again", 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_mask_sha512():
configuration = HashMaskingConfiguration(algorithm='SHA-512')
masker = HashMaskingStrategy(configuration)
expected = '527ca44f5c95400d161c503e6ddad7be01941ec9e7a03c2201338a16ba8a36bb765a430bd6b276af3f743a3a91efecd056645b4ea13b4b8cf39e8e3'
secret = MaskingSecretCache[str](secret='adobo', masking_strategy=HashMaskingStrategy.name, secret_type=SecretType.salt)
cache_secret(secret, request_id)
masked = masker.mask(['monkey'], request_id)[0]
assert (expected == masked)
clear_cache_secrets(request_id) |
class DefaultArgumentsMultiple(Argument):
def __init__(self, *args: Any, **kwargs: Any) -> None:
kwargs['nargs'] = (- 1)
default = kwargs.pop('default', tuple())
super().__init__(*args, **kwargs)
self.default = default
def full_process_value(self, ctx: Context, value: Any) -> Any:
if (not value):
value = self.default
else:
value = [self._parse_arg_str(i) for i in value]
return super().process_value(ctx, value)
def _parse_arg_str(args: str) -> Tuple[Any]:
parsed = ast.literal_eval(args)
if (not isinstance(parsed, tuple)):
parsed = (parsed,)
return parsed |
def _gen_tensor_modal(tensor) -> str:
content = {}
content['shape'] = _get_tensor_shape_str(tensor)
content['is_view_of'] = ('None' if (tensor._attrs['is_view_of'] is None) else tensor._attrs['is_view_of']._attrs['name'])
content['is_input'] = str(tensor._attrs['is_input'])
content['is_output'] = str(tensor._attrs['is_output'])
content['is_param'] = str(tensor._attrs['is_param'])
content['dtype'] = str(tensor._attrs['dtype'])
table_src = TABLE_TEMPLATE.render(table_data=content)
modal_src = MODAL_TEMPLATE.render(modal_id=f"{tensor._attrs['name']}_modal", modal_label=f"{tensor._attrs['name']}_label", modal_title=tensor._attrs['name'], modal_content=table_src)
return modal_src |
def check_pubkey(pubkey_path, user, project, opts):
if os.path.exists(pubkey_path):
log.info('Pubkey for %s/%s exists: %s', user, project, pubkey_path)
return True
else:
log.info('Missing pubkey for %s/%s', user, project)
try:
get_pubkey(user, project, log, opts.sign_domain, pubkey_path)
return True
except Exception as err:
log.exception(err)
return False |
def get_current_version(app, bench_path='.'):
current_version = None
repo_dir = get_repo_dir(app, bench_path=bench_path)
config_path = os.path.join(repo_dir, 'setup.cfg')
init_path = os.path.join(repo_dir, os.path.basename(repo_dir), '__init__.py')
setup_path = os.path.join(repo_dir, 'setup.py')
try:
if os.path.exists(config_path):
from setuptools.config import read_configuration
config = read_configuration(config_path)
current_version = config.get('metadata', {}).get('version')
if (not current_version):
with open(init_path) as f:
current_version = get_version_from_string(f.read())
except (AttributeError, VersionNotFound):
with open(setup_path) as f:
current_version = get_version_from_string(f.read(), field='version')
return current_version |
class FormAssembler(abc.ABC):
def __init__(self, form, tensor, bcs=(), form_compiler_parameters=None, needs_zeroing=True, weight=1.0):
assert (tensor is not None)
bcs = solving._extract_bcs(bcs)
self._form = form
self._tensor = tensor
self._bcs = bcs
self._form_compiler_params = (form_compiler_parameters or {})
self._needs_zeroing = needs_zeroing
self.weight = weight
def result(self):
def diagonal(self):
def assemble(self):
if annotate_tape():
raise NotImplementedError('Taping with explicit FormAssembler objects is not supported yet. Use assemble instead.')
if self._needs_zeroing:
self._as_pyop2_type(self._tensor).zero()
self.execute_parloops()
for bc in self._bcs:
if isinstance(bc, EquationBC):
bc = bc.extract_form('F')
self._apply_bc(bc)
return self.result
def replace_tensor(self, tensor):
if (tensor is self._tensor):
return
for ((lknl, _), parloop) in zip(self.local_kernels, self.parloops):
data = _FormHandler.index_tensor(tensor, self._form, lknl.indices, self.diagonal)
parloop.arguments[0].data = data
self._tensor = tensor
def execute_parloops(self):
for parloop in self.parloops:
parloop()
_property
def local_kernels(self):
try:
(topology,) = set((d.topology for d in self._form.ufl_domains()))
except ValueError:
raise NotImplementedError('All integration domains must share a mesh topology')
for o in itertools.chain(self._form.arguments(), self._form.coefficients()):
domain = extract_unique_domain(o)
if ((domain is not None) and (domain.topology != topology)):
raise NotImplementedError('Assembly with multiple meshes is not supported')
if isinstance(self._form, ufl.Form):
kernels = tsfc_interface.compile_form(self._form, 'form', diagonal=self.diagonal, parameters=self._form_compiler_params)
elif isinstance(self._form, slate.TensorBase):
kernels = slac.compile_expression(self._form, compiler_parameters=self._form_compiler_params)
else:
raise AssertionError
return tuple(((k, subdomain_id) for k in kernels for subdomain_id in k.kinfo.subdomain_id))
_property
def all_integer_subdomain_ids(self):
return tsfc_interface.gather_integer_subdomain_ids({k for (k, _) in self.local_kernels})
_property
def global_kernels(self):
return tuple((_make_global_kernel(self._form, tsfc_knl, subdomain_id, self.all_integer_subdomain_ids, diagonal=self.diagonal, unroll=self.needs_unrolling(tsfc_knl, self._bcs)) for (tsfc_knl, subdomain_id) in self.local_kernels))
_property
def parloops(self):
loops = []
for ((local_kernel, subdomain_id), global_kernel) in zip(self.local_kernels, self.global_kernels):
loops.append(ParloopBuilder(self._form, local_kernel, global_kernel, self._tensor, subdomain_id, self.all_integer_subdomain_ids, diagonal=self.diagonal, lgmaps=self.collect_lgmaps(local_kernel, self._bcs)).build())
return tuple(loops)
def needs_unrolling(self, local_knl, bcs):
return False
def collect_lgmaps(self, local_knl, bcs):
return None
def _as_pyop2_type(tensor):
if isinstance(tensor, op2.Global):
return tensor
elif isinstance(tensor, firedrake.Cofunction):
return tensor.dat
elif isinstance(tensor, matrix.Matrix):
return tensor.M
else:
raise AssertionError |
class CounterData():
label: str
value: str
def __init__(self, label: str, value: str):
self.label = label
self.value = value
def float(label: str, value: float, precision: int) -> 'CounterData':
return CounterData(label, f'{value:.{precision}}')
def string(label: str, value: str) -> 'CounterData':
return CounterData(label, f'{value}')
def int(label: str, value: int) -> 'CounterData':
return CounterData(label, f'{value}') |
class MakeMenu():
cur_id = 1000
def __init__(self, desc, owner, popup=False, window=None):
self.owner = owner
if (window is None):
window = owner
self.window = window
self.indirect = getattr(owner, 'call_menu', None)
self.names = {}
self.desc = desc.split('\n')
self.index = 0
self.keys = []
if popup:
self.menu = menu = wx.Menu()
self.parse(menu, (- 1))
else:
self.menu = menu = wx.MenuBar()
self.parse(menu, (- 1))
window.SetMenuBar(menu)
if (len(self.keys) > 0):
window.SetAcceleratorTable(wx.AcceleratorTable(self.keys))
def parse(self, menu, indent):
while True:
if (self.index >= len(self.desc)):
return
dline = self.desc[self.index]
line = dline.lstrip()
indented = (len(dline) - len(line))
if (indented <= indent):
return
self.index += 1
if ((line == '') or (line[0:1] == '#')):
continue
if (line[0:1] == '-'):
menu.AppendSeparator()
continue
MakeMenu.cur_id += 1
cur_id = MakeMenu.cur_id
help = ''
match = help_pat.search(line)
if match:
help = (' ' + match.group(2).strip())
line = (match.group(1) + match.group(3))
col = line.find(':')
if (col >= 0):
handler = line[(col + 1):].strip()
if (handler != ''):
if self.indirect:
self.indirect(cur_id, handler)
handler = self.indirect
else:
try:
_locl = dict(self=self)
exec(('def handler(event, self=self.owner):\n %s\n' % handler), globals(), _locl)
handler = _locl['handler']
except Exception:
logger.exception('Invalid menu handler {:r}'.format(handler))
handler = null_handler
else:
try:
_locl = dict(self=self)
exec(('def handler(event, self=self.owner):\n%s\n' % (self.get_body(indented),)), globals(), _locl)
handler = _locl['handler']
except Exception:
logger.exception('Invalid menu handler {:r}'.format(handler))
handler = null_handler
self.window.Bind(wx.EVT_MENU, handler, id=cur_id)
not_checked = checked = disabled = False
line = line[:col]
match = options_pat.search(line)
if match:
line = (match.group(1) + match.group(3))
(not_checked, checked, disabled, name) = option_check('~/-', match.group(2).strip())
if (name != ''):
self.names[name] = cur_id
setattr(self.owner, name, MakeMenuItem(self, cur_id))
label = line.strip()
col = label.find('|')
if (col >= 0):
key = label[(col + 1):].strip()
label = ('%s%s%s' % (label[:col].strip(), '\t', key))
key = key.upper()
flag = wx.ACCEL_NORMAL
col = key.find('-')
if (col >= 0):
flag = {'CTRL': wx.ACCEL_CTRL, 'SHIFT': wx.ACCEL_SHIFT, 'ALT': wx.ACCEL_ALT}.get(key[:col].strip(), wx.ACCEL_CTRL)
key = key[(col + 1):].strip()
code = key_map.get(key, None)
try:
if (code is None):
code = ord(key)
self.keys.append(wx.AcceleratorEntry(flag, code, cur_id))
except:
pass
menu.Append(cur_id, label, help, (not_checked or checked))
if checked:
menu.Check(cur_id, True)
if disabled:
menu.Enable(cur_id, False)
continue
submenu = wx.Menu()
label = line.strip()
self.parse(submenu, indented)
try:
menu.AppendMenu(cur_id, label, submenu, help)
except:
menu.Append(submenu, label)
def get_body(self, indent):
result = []
while (self.index < len(self.desc)):
line = self.desc[self.index]
if ((len(line) - len(line.lstrip())) <= indent):
break
result.append(line)
self.index += 1
result = '\n'.join(result).rstrip()
if (result != ''):
return result
return ' pass'
def get_id(self, name):
if isinstance(name, str):
return self.names[name]
return name
def checked(self, name, check=None):
if (check is None):
return self.menu.IsChecked(self.get_id(name))
self.menu.Check(self.get_id(name), check)
def enabled(self, name, enable=None):
if (enable is None):
return self.menu.IsEnabled(self.get_id(name))
self.menu.Enable(self.get_id(name), enable)
def label(self, name, label=None):
if (label is None):
return self.menu.GetLabel(self.get_id(name))
self.menu.SetLabel(self.get_id(name), label) |
def extractFoitinstlWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def funcparser_callable_crop(*args, **kwargs):
if (not args):
return ''
(text, *rest) = args
nrest = len(rest)
try:
width = int(kwargs.get('width', (rest[0] if (nrest > 0) else _CLIENT_DEFAULT_WIDTH)))
except (TypeError, ValueError):
width = _CLIENT_DEFAULT_WIDTH
suffix = kwargs.get('suffix', (rest[1] if (nrest > 1) else '[...]'))
return crop(str(text), width=width, suffix=str(suffix)) |
.django_db
class TestUserForumPermission(object):
def test_cannot_target_an_anonymous_user_and_a_registered_user(self):
user = UserFactory.create()
with pytest.raises(ValidationError):
perm = ForumPermissionFactory.create()
user_perm = UserForumPermissionFactory.build(permission=perm, user=user, anonymous_user=True)
user_perm.clean() |
def test_normalize_items_from_objects():
class Function():
def __init__(self, name):
self.name = name
def __str__(self):
return self.name
functions = ['printf', '__libc_start_main']
assert (normalize_lief_items([Function(name) for name in functions]) == functions) |
def gen_profiler(func_attrs, workdir, profiler_filename, dim_info_dict, src_template, problem_args_template, args_parser_template, support_split_k=False, output_addr_calculator='', bias_ptr_arg=None, extra_code='', problem_args_template_cutlass_3x=None):
import cutlass_lib
op_type = func_attrs['op']
op_instance = func_attrs['op_instance']
(op_instance, op_has_tma_epilogue) = filter_cutlass_3x_ops(op_instance, func_attrs)
backend_spec = CUDASpec()
elem_input_type = backend_spec.dtype_to_lib_type(func_attrs['inputs'][0]._attrs['dtype'])
elem_output_type = backend_spec.dtype_to_lib_type(func_attrs['outputs'][0]._attrs['dtype'])
elem_type = backend_spec.dtype_to_backend_type(func_attrs['inputs'][0]._attrs['dtype'])
ndims = 2
adims = [('&a_dim' + str(i)) for i in range(ndims)]
bdims = [('&b_dim' + str(i)) for i in range(ndims)]
cdims = [('&c_dim' + str(i)) for i in range(ndims)]
shape_func = gemm_common.gen_shape_eval_code(indent=2, dtype='int64_t', dim_info_dict=dim_info_dict, is_ptr=True)
has_bias = (bias_ptr_arg is not None)
instance_name_base = 'GemmInstance'
exec_program = EXEC_TEMPLATE.render(indent=' ', instance=instance_name_base, is_profiler=True, support_split_k=support_split_k, problem_args=problem_args_template.render(elem_input_type=elem_input_type, elem_output_type=elem_output_type), problem_args_cutlass_3x=(problem_args_template_cutlass_3x.render(elem_input_type=elem_input_type, elem_output_type=elem_output_type, has_tma_epilogue=op_has_tma_epilogue) if (problem_args_template_cutlass_3x is not None) else ''))
input_output_checks = INPUT_OUTPUT_CHECKS_TEMPLATE.render(input_ndims=ndims, weight_ndims=ndims, output_ndims=ndims)
function_name = 'gemm'
instances = []
benchmark_instances = []
for (instance_idx, (op_name, op)) in enumerate(op_instance.items()):
config = emit_instance(op, for_profiler=True)
instance_name = f'{instance_name_base}_{instance_idx}'
gemm_op = f'gemm_op_{instance_idx}'
cutlass_3x = (op.gemm_kind == cutlass_lib.library.GemmKind.Universal3x)
instance_template = (INSTANCE_TEMPLATE_CUTLASS_3X if cutlass_3x else INSTANCE_TEMPLATE)
instance = instance_template.render(config_name=extract_config_name(config, cutlass_3x=cutlass_3x), name=instance_name, config=config)
benchmark_instance = BENCHMARK_INSTANCE_TEMPLATE.render(indent=' ', instance_name=instance_name, gemm_op=gemm_op, gemm_op_name=op_name, func_name=f'benchmark_{function_name}', support_split_k=support_split_k, split_k='split_k', adims=adims, bdims=bdims, cdims=cdims)
instances.append(instance)
benchmark_instances.append(benchmark_instance)
args_parse = (args_parser_template if isinstance(args_parser_template, str) else args_parser_template.render())
op_func = src_template.render(is_profiler=True, instances='\n'.join(instances), function_name=function_name, input_ndims=ndims, weight_ndims=ndims, output_ndims=ndims, shape_eval=shape_func, input_output_checks=input_output_checks, exec_paths=exec_program, output_addr_calculator=output_addr_calculator, support_split_k=support_split_k, extra_code=extra_code)
benchmark_adims = [('a_dim' + str(i)) for i in range(ndims)]
benchmark_bdims = [('b_dim' + str(i)) for i in range(ndims)]
benchmark_cdims = [('c_dim' + str(i)) for i in range(ndims)]
func_call = FUNC_CALL_TEMPLATE.render(is_profiler=True, func_name=function_name, a_ptr='memory_pool->RequestTensorByIdx(0)', b_ptr='memory_pool->RequestTensorByIdx(1)', has_bias=has_bias, bias_ptr=bias_ptr_arg, c_ptr='memory_pool->RequestTensorByIdx(2)', split_k='split_k', adims=benchmark_adims, bdims=benchmark_bdims, cdims=benchmark_cdims)
tensor_decl = TENSOR_DECL_TEMPLATE.render(elem_input_type=elem_input_type, elem_output_type=elem_output_type, has_bias=has_bias)
code = PROFILER_TEMPLATE.render(op_func=op_func, has_bias=has_bias, has_d=has_d(func_attrs), support_split_k=support_split_k, args_parse=args_parse, function_name=function_name, input_ndims=ndims, weight_ndims=ndims, output_ndims=ndims, func_call=func_call, name=instance_name_base, tensor_decl=tensor_decl, benchmark_instances='\n'.join(benchmark_instances), elem_type=elem_type)
file_pairs = []
add_profiler(file_pairs, workdir, op_type, profiler_filename, code)
return build_profiler(file_pairs) |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = 'profile-name'
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'endpoint_control_profile': {'required': False, 'type': 'dict', 'default': None, 'options': {}}}
for attribute_name in module_spec['options']:
fields['endpoint_control_profile']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['endpoint_control_profile']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=True)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'endpoint_control_profile')
(is_error, has_changed, result, diff) = fortios_endpoint_control(module.params, fos, module.check_mode)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
def microsoft_ocr_tables_standardize_response(original_response: dict) -> OcrTablesAsyncDataClass:
num_pages = len(original_response['pages'])
pages: List[Page] = [Page() for _ in range(num_pages)]
for table in original_response.get('tables', []):
std_table = _ocr_tables_standardize_table(table, original_response)
page_index: int = (table['boundingRegions'][0]['pageNumber'] - 1)
pages[page_index].tables.append(std_table)
return OcrTablesAsyncDataClass(pages=pages, num_pages=num_pages) |
def find_(root: str, rbase: str, include_files: List[str], include_dirs: List[str], excludes: List[str], scan_exclude: List[str]) -> List[str]:
files = []
scan_root = os.path.join(root, rbase)
with os.scandir(scan_root) as it:
for entry in it:
path = os.path.join(rbase, entry.name)
if matches(scan_exclude, path):
continue
if entry.is_dir():
if matches(include_dirs, path):
if (not matches(excludes, path)):
files.append(path)
else:
ret = find_(root=root, rbase=path, include_files=include_files, include_dirs=include_dirs, excludes=excludes, scan_exclude=scan_exclude)
files.extend(ret)
elif (matches(include_files, path) and (not matches(excludes, path))):
files.append(path)
return files |
def main():
ledger = LedgerClient(NetworkConfig.fetchai_stable_testnet())
faucet_api = FaucetApi(NetworkConfig.fetchai_stable_testnet())
initial_stake =
total_period = 60000
req = QueryValidatorsRequest()
resp = ledger.staking.Validators(req)
total_stake = 0
validators_stake = [int(validator.tokens) for validator in resp.validators if (validator.status == 3)]
total_stake = sum(validators_stake)
validators_comission = [int(validator.commission.commission_rates.rate) for validator in resp.validators if (validator.status == 3)]
validators = ledger.query_validators()
validator = 'not_selected'
stake_threshold = 0.1
for _i in range(len(validators_comission)):
validator_index = validators_comission.index(min(validators_comission))
validator_stake_pct = (validators_stake[validator_index] / total_stake)
if (validator_stake_pct >= stake_threshold):
validator = validators[validator_index]
break
validators_comission[validator_index] = float('inf')
if (validator == 'not_selected'):
validators_comission = [int(validator.commission.commission_rates.rate) for validator in resp.validators if (validator.status == 3)]
print('No validator meets the minimum stake threshold requirement')
validator_index = validators_comission.index(min(validators_comission))
validator = validators[validator_index]
commission = (float(resp.validators[0].commission.commission_rates.rate) / 1e+18)
pct_delegated = (initial_stake / total_stake)
alice = LocalWallet.generate()
alice_address = str(alice.address())
alice_balance = ledger.query_bank_balance(alice.address())
while (alice_balance < initial_stake):
print('Providing wealth to alice...')
faucet_api.get_wealth(alice.address())
alice_balance = ledger.query_bank_balance(alice.address())
tx = Transaction()
tx.add_message(create_delegate_msg(alice_address, validator.address, initial_stake, 'atestfet'))
tx.add_message(create_withdraw_delegator_reward(alice_address, validator.address))
account = ledger.query_account(alice.address())
tx.seal(SigningCfg.direct(alice.public_key(), account.sequence), fee='', gas_limit=0)
tx.sign(alice.signer(), ledger.network_config.chain_id, account.number)
tx.complete()
(_, str_tx_fee) = ledger.estimate_gas_and_fee_for_tx(tx)
denom = 'atestfet'
tx_fee = str_tx_fee[:(- len(denom))]
fee = (int(tx_fee) * 1.2)
req = QueryTotalSupplyRequest()
resp = ledger.bank.TotalSupply(req)
total_supply = float(json.loads(resp.supply[0].amount))
req = QueryParamsRequest(subspace='mint', key='InflationRate')
resp = ledger.params.Params(req)
inflation = float(json.loads(resp.param.value))
req = QueryParamsRequest(subspace='distribution', key='communitytax')
resp = ledger.params.Params(req)
community_tax = float(json.loads(resp.param.value))
anual_reward = ((((inflation * total_supply) * pct_delegated) * (1 - community_tax)) * (1 - commission))
minute_reward = (((anual_reward / 360) / 24) / 60)
rate = (minute_reward / initial_stake)
f = fee
S = initial_stake
k = rate
D = total_period
X = list(range(1, D))
R = [M(x, f, S, k, D) for x in X]
optimal_period = (R.index(max(R)) + 1)
print('total period: ', total_period, 'minutes')
print('optimal compounding period: ', optimal_period, 'minutes') |
class OptionSeriesBarSonificationDefaultinstrumentoptionsMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def log_fortianalyzer2_override_filter(data, fos):
vdom = data['vdom']
log_fortianalyzer2_override_filter_data = data['log_fortianalyzer2_override_filter']
filtered_data = underscore_to_hyphen(filter_log_fortianalyzer2_override_filter_data(log_fortianalyzer2_override_filter_data))
return fos.set('log.fortianalyzer2', 'override-filter', data=filtered_data, vdom=vdom) |
('rocm.gemm_rcr.gen_function')
def gemm_gen_function(func_attrs, exec_cond_template, dim_info_dict):
return common.gen_function(func_attrs, exec_cond_template, dim_info_dict, '', input_addr_calculator=common.INPUT_ADDR_CALCULATOR.render(accessor_a=func_attrs['input_accessors'][0], accessor_b=func_attrs['input_accessors'][1]), output_addr_calculator=common.OUTPUT_ADDR_CALCULATOR.render(output_accessor=func_attrs['output_accessors'][0])) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.