code stringlengths 281 23.7M |
|---|
class Counter(BPHandler):
def __init__(self):
self.increment = {}
self.counts = {}
def register_handler(self, qemu, addr, func_name, increment=1):
self.increment[addr] = increment
self.counts[addr] = 0
return Counter.get_value
_handler
def get_value(self, qemu, addr):
self.counts[addr] += self.increment[addr]
return (True, self.counts[addr]) |
def load_physics(pModule, path='.'):
reset_default_p()
sys.path.append(path)
p = imp.load_source(pModule, os.path.join(path, (pModule + '.py')))
sys.path.remove(path)
physics_object = Physics_base()
for (k, v) in p.__dict__.items():
if (k not in physics_excluded_keys):
physics_object.__dict__[k] = v
return physics_object |
def blake2f(evm: Evm) -> None:
data = evm.message.data
ensure((len(data) == 213), InvalidParameter)
blake2b = Blake2b()
(rounds, h, m, t_0, t_1, f) = blake2b.get_blake2_parameters(data)
charge_gas(evm, (GAS_BLAKE2_PER_ROUND * rounds))
ensure((f in [0, 1]), InvalidParameter)
evm.output = blake2b.compress(rounds, h, m, t_0, t_1, f) |
class _CSTRUCT(ctypes.Structure):
def toObj(self):
obj = {}
for (k, t) in self._fields_:
if k[0].isupper():
v = getattr(self, k)
if isinstance(v, bytes):
v = v.decode()
obj[k] = v
return dic2obj(obj) |
_register_parser
_set_msg_type(ofproto.OFPT_METER_MOD)
class OFPMeterMod(MsgBase):
def __init__(self, datapath, command=ofproto.OFPMC_ADD, flags=ofproto.OFPMF_KBPS, meter_id=1, bands=None):
bands = (bands if bands else [])
super(OFPMeterMod, self).__init__(datapath)
self.command = command
self.flags = flags
self.meter_id = meter_id
self.bands = bands
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
msg = super(OFPMeterMod, cls).parser(datapath, version, msg_type, msg_len, xid, buf)
(msg.command, msg.flags, msg.meter_id) = struct.unpack_from(ofproto.OFP_METER_MOD_PACK_STR, buf, ofproto.OFP_HEADER_SIZE)
offset = ofproto.OFP_METER_MOD_SIZE
msg.bands = []
while (offset < msg.msg_len):
band = OFPMeterBandHeader.parser(buf, offset)
msg.bands.append(band)
offset += band.len
return msg
def _serialize_body(self):
msg_pack_into(ofproto.OFP_METER_MOD_PACK_STR, self.buf, ofproto.OFP_HEADER_SIZE, self.command, self.flags, self.meter_id)
offset = ofproto.OFP_METER_MOD_SIZE
for b in self.bands:
b.serialize(self.buf, offset)
offset += b.len |
def download_youtube_video(url):
def get_video_id(url):
url_data = urlparse(url)
query = parse_qs(url_data.query)
video = query.get('v', [])
if video:
return video[0]
else:
raise Exception('unrecoginzed url format.')
id = get_video_id(url)
name = f'{id}.wav'
if (not Path(name).exists()):
ydl_opts = {'format': 'bestaudio/best', 'postprocessors': [{'key': 'FFmpegExtractAudio', 'preferredcodec': 'wav', 'preferredquality': '192'}], 'outtmpl': id}
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
ydl.download([url])
sound = AudioSegment.from_wav(name)
sound = sound.set_channels(1).set_frame_rate(16000)
sound.export(name, format='wav')
return name |
.parametrize('reorder', [False, True])
def test_changing_default_reorder_works(reorder):
old_reorder = parameters['reorder_meshes']
try:
parameters['reorder_meshes'] = reorder
m = UnitSquareMesh(1, 1)
m.init()
assert (m._did_reordering == reorder)
finally:
parameters['reorder_meshes'] = old_reorder |
def scale(coordinates, scalar=1):
try:
try:
arr = np.array(coordinates, dtype=float)
except TypeError:
arr = np.array(list(coordinates), dtype=float)
return (arr * scalar)
except NameError:
if isinstance(coordinates, tuple):
return [(coordinates[0] * scalar), (coordinates[1] * scalar)]
return [((c[0] * scalar), (c[1] * scalar)) for c in coordinates] |
class Plugin(DigitalBitboxPlugin, QtPluginBase):
icon_paired = 'icons8-usb-connected-80.png'
icon_unpaired = 'icons8-usb-disconnected-80.png'
def create_handler(self, window: HandlerWindow) -> QtHandlerBase:
return DigitalBitbox_Handler(window)
def show_key(self, account: AbstractAccount, keyinstance_id: int) -> None:
if (not self.is_mobile_paired()):
return
keystore = cast(Hardware_KeyStore, account.get_keystore())
derivation_path = account.get_derivation_path(keyinstance_id)
assert (derivation_path is not None)
subpath = '/'.join((str(x) for x in derivation_path))
keypath = f'{keystore.derivation}/{subpath}'
xpub = self.get_client(keystore)._get_xpub(keypath)
verify_request_payload = {'type': 'p2pkh', 'echo': xpub['echo']}
self.comserver_post_notification(verify_request_payload) |
def plot_perf(data, filename, xkey, xlabel, xscale, ykey, ylabel):
(fig, ax1) = plt.subplots()
color_table = {'exo': 'tab:orange', 'MKL': 'tab:blue', 'OpenBLAS': 'tab:green'}
for (series, points) in data.items():
ax1.plot(points[xkey], points[ykey], label=series, color=color_table.get(series, None), zorder=(100 if (series == 'exo') else 1))
ax1.set(xlabel=xlabel, ylabel=ylabel)
ax1.set_xscale(xscale)
ax1.set_ybound(lower=0, upper=flops)
ax1.grid()
ax1.legend()
ax1.yaxis.set_major_formatter((lambda x, _: f'{(x / .0):.2f}'))
if flops:
ax2 = ax1.twinx()
ax2.set_ylabel('$\\%$ of peak')
ax2.yaxis.set_major_formatter((lambda x, _: f'${x:.0%}$'.replace('%', '\\%')))
ax1.set_yticks(np.linspace(ax1.get_ybound()[0], ax1.get_ybound()[1], 7))
ax2.set_yticks(np.linspace(ax2.get_ybound()[0], ax2.get_ybound()[1], 7))
fig.tight_layout(pad=0)
plt.savefig(f'{filename}.png')
plt.savefig(f'{filename}.pgf') |
('/searchinit/<stypes>/<sstr>/<modify>/<otherargs>')
def searchinit(stypes, sstr, modify, otherargs):
if (not ('strlist' in comm.searchvalues.raw_dict())):
comm.searchvalues['strlist'] = []
sstr = comm.ensure_text(sstr)
sstr = sstr.strip()
liststypes = stypes.split(',')
if (str(plugin.get_setting('javbus')) != 'true'):
if ('jav' in liststypes):
liststypes.remove('jav')
dictotherargs = json.loads(otherargs)
if (not isinstance(dictotherargs, dict)):
dictotherargs = {}
if (sstr and (sstr != '0') and (modify == '0')):
comm.searchvalues['strlist'] = [e for e in comm.searchvalues['strlist'] if (comm.ensure_binary(e) != comm.ensure_binary(sstr))]
comm.searchvalues['strlist'].append(sstr)
comm.searchvalues.sync()
return stypesearch(liststypes, sstr, dictotherargs)
else:
if (modify == '1'):
if (sstr == '0'):
sstr = ''
newsstr = keyboard(text=sstr).strip()
if (not newsstr):
comm.searchvalues.sync()
return
comm.searchvalues['strlist'] = [e for e in comm.searchvalues['strlist'] if (e != sstr)]
comm.searchvalues['strlist'] = [e for e in comm.searchvalues['strlist'] if (e != newsstr)]
comm.searchvalues['strlist'].append(newsstr)
comm.searchvalues.sync()
return stypesearch(liststypes, newsstr, dictotherargs)
if (modify == '4'):
newsstr = selectstr(sstr)
if (not newsstr):
comm.searchvalues.sync()
return
newsstr = keyboard(text=newsstr).strip()
if (not newsstr):
comm.searchvalues.sync()
return
comm.searchvalues['strlist'] = [e for e in comm.searchvalues['strlist'] if (e != sstr)]
comm.searchvalues['strlist'] = [e for e in comm.searchvalues['strlist'] if (e != newsstr)]
comm.searchvalues['strlist'].append(newsstr)
comm.searchvalues.sync()
return stypesearch(liststypes, newsstr, dictotherargs)
if (modify == '2'):
comm.searchvalues['strlist'] = [e for e in comm.searchvalues['strlist'] if (e != sstr)]
xbmc.executebuiltin('Container.Refresh()')
if (modify == '3'):
dialog = xbmcgui.Dialog()
ret = dialog.yesno('', '')
if ret:
comm.searchvalues['strlist'] = []
items = []
items.append({'label': colorize_label('', color='00FFFF'), 'path': plugin.url_for('pantagsearch', otherargs=otherargs)})
items.append({'label': colorize_label('', color='00FF00'), 'path': plugin.url_for('searchinit', stypes=stypes, sstr='0', modify='1', otherargs=otherargs)})
for strvalue in comm.searchvalues['strlist'][::(- 1)]:
context_menu_items = []
listitem = ListItem(label=strvalue, label2=None, icon=None, thumbnail=None, path=plugin.url_for('searchinit', stypes=stypes, sstr=comm.ensure_binary(strvalue), modify='0', otherargs=otherargs))
context_menu_items.append((('' + colorize_label(comm.ensure_text(strvalue), color='0000FF')), (('RunPlugin(' + plugin.url_for('searchinit', stypes=stypes, sstr=comm.ensure_binary(strvalue), modify='1', otherargs=otherargs)) + ')')))
context_menu_items.append((('' + colorize_label(comm.ensure_text(strvalue), color='FF0000')), (('RunPlugin(' + plugin.url_for('searchinit', stypes=stypes, sstr=comm.ensure_binary(strvalue), modify='2', otherargs=otherargs)) + ')')))
if (len(context_menu_items) > 0):
listitem.add_context_menu_items(context_menu_items)
items.append(listitem)
if (len(comm.searchvalues['strlist']) > 0):
items.append({'label': colorize_label('', color='FF0000'), 'path': plugin.url_for('searchinit', stypes=stypes, sstr='0', modify='3', otherargs=otherargs)})
comm.searchvalues.sync()
comm.setViewCode = 'list'
return items |
def test_observation_monitoring():
env = build_dummy_maze_env()
env = MazeEnvMonitoringWrapper.wrap(env, observation_logging=True, action_logging=False, reward_logging=False)
env = LogStatsWrapper.wrap(env)
env.reset()
for ii in range(3):
obs = env.step(env.action_space.sample())[0]
observation_events = env.get_last_step_events(query=[ObservationEvents.observation_original, ObservationEvents.observation_processed])
assert (len(observation_events) == 4)
for event in observation_events:
assert issubclass(event.interface_class, ObservationEvents)
obs_name = event.attributes['name']
assert (obs_name in ['observation_0', 'observation_1'])
if (ii > 0):
assert np.allclose(np.asarray(obs[obs_name]), np.asarray(event.attributes['value'])) |
def _getOutput(ps, patterns, process_key=''):
if (not isinstance(patterns, list)):
patterns = [patterns]
poller = select.poll()
poller.register(ps.stdout)
lines = []
match = False
while (not getRunKilled(process_key)):
if poller.poll(15.0):
line = ps.stdout.readline()
else:
continue
if (not line):
break
nline = line.rstrip()
try:
decoded_line = nline.decode('utf-8', errors='replace')
nline = decoded_line
except Exception:
pass
lines.append(nline)
for pattern in patterns:
if pattern.match(nline):
match = True
break
if match:
break
return (lines, match) |
def assert_eqv_proc(proc1, proc2, config_set=frozenset()):
assert isinstance(config_set, frozenset)
for key in config_set:
if (key not in _UF_Unv_key):
new_uf_by_eqv_key(key)
if (not config_set):
_UF_Strict.union(proc1, proc2)
_UF_Unv.union(proc1, proc2)
for (key, uf) in _UF_Unv_key.items():
if (key not in config_set):
uf.union(proc1, proc2) |
def _get_system_from_request_body(system_data: SystemSchema, db: Session=Depends(get_db)) -> SystemAuthContainer:
resp = SystemAuthContainer(original_data=system_data, system=None)
resource_dict = system_data.dict()
if resource_dict.get('fides_key'):
system = db.query(System).filter((System.fides_key == resource_dict.get('fides_key'))).first()
resp.system = system
return resp |
def extractProphetswordStarmvsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Lonely Loser', 'Lonely Loser, Ill Become Blonde Frivolous Gyarus Favourite', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class DummyValueNet(DummyBaseNet):
def __init__(self, obs_shapes: Dict[(str, Sequence[int])], non_lin: type(nn.Module)):
super().__init__(obs_shapes, non_lin)
self.perception_dict['value_head_net'] = DenseBlock(in_keys='hidden_out', in_shapes=self.perception_dict['hidden_out'].out_shapes(), out_keys='value_head_net', hidden_units=[5, 2], non_lin=non_lin)
self.perception_dict['value'] = LinearOutputBlock(in_keys='value_head_net', in_shapes=self.perception_dict['value_head_net'].out_shapes(), out_keys='value', output_units=1)
self.perception_net = InferenceBlock(in_keys=list(self.obs_shapes.keys()), out_keys='value', in_shapes=[self.obs_shapes[key] for key in self.obs_shapes.keys()], perception_blocks=self.perception_dict)
self.perception_net.apply(make_module_init_normc(1.0))
self.perception_dict['value'].apply(make_module_init_normc(0.01))
def forward(self, xx: Dict[(str, torch.Tensor)]) -> Dict[(str, torch.Tensor)]:
return self.perception_net(xx) |
class TestContextManagers():
def test_with_simple(self):
def contexttest():
c = dict(__enter__=(lambda : print('enter')), __exit__=(lambda : print('exit')))
with c:
print(42)
print('.')
return undefined
assert (evaljs((py2js(contexttest, 'f') + 'f()')) == 'enter\n42\nexit\n.')
def test_with_as1(self):
def contexttest():
c = dict(__enter__=(lambda : 7), __exit__=(lambda : print('exit')))
with c as item:
print(42)
print(item)
print(43)
print('.')
return undefined
assert (evaljs((py2js(contexttest, 'f') + 'f()')) == '42\n7\n43\nexit\n.')
def test_with_as2(self):
def contexttest():
c = dict(__enter__=(lambda : 7), __exit__=(lambda : print('exit')))
with c as c.item:
print(42)
print(c.item)
print(43)
print(c.item)
print('.')
return undefined
assert (evaljs((py2js(contexttest, 'f') + 'f()')) == '42\n7\n43\nexit\n7\n.')
def test_with_calculated_context(self):
def contexttest():
def get_ctx():
print('making')
return dict(__enter__=(lambda : 7), __exit__=(lambda : print('exit')))
with get_ctx() as item:
print(item)
print(42)
return undefined
assert (evaljs((py2js(contexttest, 'f') + 'f()')) == 'making\n7\n42\nexit')
def test_with_exception(self):
def contexttest(x):
c = dict(__enter__=(lambda : print('enter')), __exit__=(lambda et, ev, tb: print(et)))
try:
with c:
print(42)
if (x != 1):
raise AttributeError('fooerror')
print(43)
except Exception as e:
print(e.message)
print('.')
return undefined
assert (evaljs((py2js(contexttest, 'f') + 'f(1)')) == 'enter\n42\n43\nnull\n.')
s = 'enter\n42\nAttributeError\nAttributeError: fooerror\n.'
assert (evaljs((py2js(contexttest, 'f') + 'f(0)')) == s)
def test_with_return(self):
def contexttest():
c = dict(__enter__=(lambda : print('enter')), __exit__=(lambda et, ev, tb: print('exit')))
with c:
print(42)
return undefined
print(43)
print('.')
return undefined
assert (evaljs((py2js(contexttest, 'f') + 'f(1)')) == 'enter\n42\nexit') |
class OrgFreedesktopPortalRequestInterface(QtDBus.QDBusAbstractInterface):
Response = QtCore.Signal(QtDBus.QDBusMessage)
def __init__(self, path: str, connection: QtDBus.QDBusConnection, parent: QtCore.QObject) -> None:
super().__init__('org.freedesktop.portal.Desktop', path, 'org.freedesktop.portal.Request', connection, parent) |
def test_semver_parsing() -> None:
assert (SemVer.parse('1.7.2') == SemVer(major=1, minor=7, patch=2))
assert (SemVer.parse('1123.3213.8943') == SemVer(major=1123, minor=3213, patch=8943, prerelease=None))
assert (SemVer.parse('12.34.56-alpha01') == SemVer(major=12, minor=34, patch=56, prerelease=SemVer.Prerelease(('alpha01',))))
assert (SemVer.parse('12.34.56-a.2.b+build') == SemVer(major=12, minor=34, patch=56, prerelease=SemVer.Prerelease(('a', 2, 'b')))) |
class Block(object):
def Field(cls, **kw):
return gh.Boolean(id=gh.Int(required=True, description='ID'), resolver=cls.mutate, **kw)
def mutate(root, info, id):
ctx = info.context
require_login(ctx)
me = ctx.user.player
tgt = models.Player.objects.get(id=id)
if (not tgt):
raise GraphQLError('')
me.blocks.add(tgt)
return True |
class OptionPlotoptionsTimelineSonificationTracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Component(with_metaclass(ComponentMeta, object)):
_IS_COMPONENT = True
_COUNT = 0
id = Attribute(doc='The string by which this component is identified.')
def __init__(self, *init_args, **property_values):
Component._COUNT += 1
self._id = (self.__class__.__name__ + str(Component._COUNT))
self._disposed = False
self.__handlers = {}
self.__pending_events = []
self.__anonymous_reactions = []
self.__initial_mutation = False
for name in self.__emitters__:
self.__handlers.setdefault(name, [])
for name in self.__properties__:
self.__handlers.setdefault(name, [])
with self:
self._comp_init_property_values(property_values)
self.init(*init_args)
self._comp_init_reactions()
def __repr__(self):
return ("<Component '%s' at 0x%x>" % (self._id, id(self)))
def _comp_init_property_values(self, property_values):
values = []
for name in self.__properties__:
prop = getattr(self.__class__, name)
setattr(self, (('_' + name) + '_value'), prop._default)
if (name not in property_values):
values.append((name, prop._default))
for (name, value) in list(property_values.items()):
if (name not in self.__properties__):
if (name in self.__attributes__):
raise AttributeError(('%s.%s is an attribute, not a property' % (self._id, name)))
elif (self._has_proxy is True):
raise AttributeError(('%s does not have property %s.' % (self._id, name)))
if callable(value):
self._comp_make_implicit_setter(name, value)
property_values.pop(name)
continue
if (name in self.__properties__):
values.append((name, value))
property_values.pop(name)
self._comp_apply_property_values(values)
def _comp_apply_property_values(self, values):
self.__initial_mutation = True
for (name, value) in values:
self._mutate(name, value)
for (name, value) in values:
setter_name = (('_set' if name.startswith('_') else 'set_') + name)
setter = getattr(self, setter_name, None)
if (setter is not None):
if (getattr(setter, 'is_autogenerated', None) is False):
setter(value)
self.__initial_mutation = False
def _comp_make_implicit_setter(self, prop_name, func):
setter_func = getattr(self, ('set_' + prop_name), None)
if (setter_func is None):
t = '%s does not have a set_%s() action for property %s.'
raise TypeError((t % (self._id, prop_name, prop_name)))
setter_reaction = (lambda : setter_func(func()))
reaction = Reaction(self, setter_reaction, 'auto', [])
self.__anonymous_reactions.append(reaction)
def _comp_init_reactions(self):
if (self.__pending_events is not None):
self.__pending_events.append(None)
loop.call_soon(self._comp_stop_capturing_events)
for name in self.__reactions__:
reaction = getattr(self, name)
if (reaction.get_mode() == 'auto'):
ev = Dict(source=self, type='', label='')
loop.add_reaction_event(reaction, ev)
for reaction in self.__anonymous_reactions:
if (reaction.get_mode() == 'auto'):
ev = Dict(source=self, type='', label='')
loop.add_reaction_event(reaction, ev)
def _comp_stop_capturing_events(self):
events = self.__pending_events
self.__pending_events = None
allow_reconnect = False
for ev in events:
if (ev is None):
allow_reconnect = True
continue
ev.allow_reconnect = allow_reconnect
self.emit(ev.type, ev)
def __enter__(self):
loop._activate_component(self)
loop.call_soon(self.__check_not_active)
return self
def __exit__(self, type, value, traceback):
loop._deactivate_component(self)
def __check_not_active(self):
active_components = loop.get_active_components()
if (self in active_components):
raise RuntimeError('It seems that the event loop is processing events while a Component is active. This has a high risk on race conditions.')
def init(self):
pass
def __del__(self):
if (not self._disposed):
self._dispose()
def dispose(self):
self._dispose()
def _dispose(self):
self._disposed = True
if (not this_is_js()):
logger.debug(('Disposing Component %r' % self))
for (name, reactions) in self.__handlers.items():
for i in range(len(reactions)):
reactions[i][1]._clear_component_refs(self)
while len(reactions):
reactions.pop()
for i in range(len(self.__reactions__)):
getattr(self, self.__reactions__[i]).dispose()
def _registered_reactions_hook(self):
used_event_types = []
for (key, reactions) in self.__handlers.items():
if (len(reactions) > 0):
used_event_types.append(key)
return used_event_types
def _register_reaction(self, event_type, reaction, force=False):
(type, _, label) = event_type.partition(':')
label = (label or reaction._name)
reactions = self.__handlers.get(type, None)
if (reactions is None):
reactions = []
self.__handlers[type] = reactions
if force:
pass
elif type.startswith('mouse_'):
t = 'The event "{}" has been renamed to "pointer{}".'
logger.warning(t.format(type, type[5:]))
else:
msg = ('Event type "{type}" does not exist on component {id}. ' + 'Use "!{type}" or "!xx.yy.{type}" to suppress this warning.')
msg = msg.replace('{type}', type).replace('{id}', self._id)
logger.warning(msg)
comp1 = ((label + '-') + reaction._id)
for i in range(len(reactions)):
comp2 = ((reactions[i][0] + '-') + reactions[i][1]._id)
if (comp1 < comp2):
reactions.insert(i, (label, reaction))
break
elif (comp1 == comp2):
break
else:
reactions.append((label, reaction))
self._registered_reactions_hook()
def disconnect(self, type, reaction=None):
(type, _, label) = type.partition(':')
reactions = self.__handlers.get(type, ())
for i in range((len(reactions) - 1), (- 1), (- 1)):
entry = reactions[i]
if (not ((label and (label != entry[0])) or (reaction and (reaction is not entry[1])))):
reactions.pop(i)
self._registered_reactions_hook()
def emit(self, type, info=None):
info = ({} if (info is None) else info)
(type, _, label) = type.partition(':')
if len(label):
raise ValueError('The type given to emit() should not include a label.')
if (not isinstance(info, dict)):
raise TypeError(('Info object (for %r) must be a dict, not %r' % (type, info)))
ev = Dict(info)
ev.type = type
ev.source = self
if (self.__pending_events is not None):
self.__pending_events.append(ev)
else:
reactions = self.__handlers.get(ev.type, ())
for i in range(len(reactions)):
(label, reaction) = reactions[i]
if label.startswith('reconnect_'):
if (getattr(ev, 'allow_reconnect', True) is True):
index = int(label.split('_')[1])
reaction.reconnect(index)
else:
loop.add_reaction_event(reaction, ev)
return ev
def _mutate(self, prop_name, value, mutation='set', index=(- 1)):
if (not isinstance(prop_name, str)):
raise TypeError(("_mutate's first arg must be str, not %s" % prop_name.__class__))
if (prop_name not in self.__properties__):
cname = self.__class__.__name__
raise AttributeError(('%s object has no property %r' % (cname, prop_name)))
if (loop.can_mutate(self) is False):
raise AttributeError(('Trying to mutate property %s outside of an action or context.' % prop_name))
private_name = (('_' + prop_name) + '_value')
validator_name = (('_' + prop_name) + '_validate')
old = getattr(self, private_name)
if (mutation == 'set'):
value2 = getattr(self, validator_name)(value)
setattr(self, private_name, value2)
if this_is_js():
is_equal = (old == value2)
elif (hasattr(old, 'dtype') and hasattr(value2, 'dtype')):
import numpy as np
is_equal = np.array_equal(old, value2)
else:
is_equal = ((type(old) == type(value2)) and (old == value2))
if (self.__initial_mutation is True):
old = value2
is_equal = False
if (not is_equal):
self.emit(prop_name, dict(new_value=value2, old_value=old, mutation=mutation))
return True
else:
ev = Dict()
ev.objects = value
ev.mutation = mutation
ev.index = index
if isinstance(old, dict):
if (index != (- 1)):
raise IndexError('For in-place dict mutations, the index is not used, and must be -1.')
mutate_dict(old, ev)
else:
if (index < 0):
raise IndexError('For insert, remove, and replace mutations, the index must be >= 0.')
mutate_array(old, ev)
self.emit(prop_name, ev)
return True
def get_event_types(self):
types = list(self.__handlers)
types.sort()
return types
def get_event_handlers(self, type):
if (not type):
raise TypeError('get_event_handlers() missing "type" argument.')
(type, _, label) = type.partition(':')
if len(label):
raise ValueError('The type given to get_event_handlers() should not include a label.')
reactions = self.__handlers.get(type, ())
return [h[1] for h in reactions]
def reaction(self, *connection_strings):
mode = 'normal'
if ((not connection_strings) or ((len(connection_strings) == 1) and callable(connection_strings[0]))):
raise RuntimeError('Component.reaction() needs one or more connection strings.')
func = None
if callable(connection_strings[0]):
func = connection_strings[0]
connection_strings = connection_strings[1:]
elif callable(connection_strings[(- 1)]):
func = connection_strings[(- 1)]
connection_strings = connection_strings[:(- 1)]
for s in connection_strings:
if (not (isinstance(s, str) and (len(s) > 0))):
raise ValueError('Connection string must be nonempty string.')
def _react(func):
if (not callable(func)):
raise TypeError('Component.reaction() decorator requires a callable.')
if looks_like_method(func):
return ReactionDescriptor(func, mode, connection_strings, self)
else:
return Reaction(self, func, mode, connection_strings)
if (func is not None):
return _react(func)
else:
return _react |
def main():
sites = sorted(list(gen_sites()))
max_sites = len(sites)
f = open('params.jl', 'w')
f.write('module,loc,params\n')
routes_file = open('routes.txt', 'w')
print('\nmodule top(\n input [{N}:0] clkin1,\n input [{N}:0] clkin2,\n input [{N}:0] clkfb,\n input [{N}:0] dclk\n);\n\n (* KEEP, DONT_TOUCH *)\n LUT1 dummy();\n'.format(N=(max_sites - 1)))
for (i, (tile_name, tile_type, site)) in enumerate(sorted(gen_sites())):
params = {'site': site, 'active': (random.random() > 0.2), 'clkin1_conn': random.choice((('clkfbout_mult_BUFG_' + site), 'clkin1[{}]'.format(i), '')), 'clkin2_conn': random.choice((('clkfbout_mult_BUFG_' + site), 'clkin2[{}]'.format(i), '')), 'dclk_conn': random.choice(('0', 'dclk[{}]'.format(i))), 'dwe_conn': random.choice(('', '1', '0', ('dwe_' + site), ('den_' + site))), 'den_conn': random.choice(('', '1', '0', ('den_' + site))), 'daddr4_conn': random.choice(('0', ('dwe_' + site))), 'IS_RST_INVERTED': random.randint(0, 1), 'IS_PWRDWN_INVERTED': random.randint(0, 1), 'IS_CLKINSEL_INVERTED': random.randint(0, 1), 'CLKFBOUT_MULT': random.randint(2, 4), 'CLKOUT0_DIVIDE': random.randint(1, 128), 'CLKOUT1_DIVIDE': random.randint(1, 128), 'CLKOUT2_DIVIDE': random.randint(1, 128), 'CLKOUT3_DIVIDE': random.randint(1, 128), 'CLKOUT4_DIVIDE': random.randint(1, 128), 'CLKOUT5_DIVIDE': random.randint(1, 128), 'DIVCLK_DIVIDE': random.randint(1, 5), 'CLKOUT0_DUTY_CYCLE': '0.500', 'STARTUP_WAIT': verilog.quote(('TRUE' if random.randint(0, 1) else 'FALSE')), 'COMPENSATION': verilog.quote(random.choice(('ZHOLD', 'BUF_IN', 'EXTERNAL', 'INTERNAL'))), 'BANDWIDTH': verilog.quote(random.choice(('OPTIMIZED', 'HIGH', 'LOW')))}
if (verilog.unquote(params['COMPENSATION']) == 'ZHOLD'):
params['clkfbin_conn'] = random.choice(('', ('clkfbout_mult_BUFG_' + site)))
elif (verilog.unquote(params['COMPENSATION']) == 'INTERNAL'):
params['clkfbin_conn'] = random.choice(('', ('clkfbout_mult_' + site)))
else:
params['clkfbin_conn'] = random.choice(('', 'clkfb[{}]'.format(i), ('clkfbout_mult_BUFG_' + site)))
params['clkin1_route'] = random.choice(('{}_CLKIN1', '{}_FREQ_BB0', '{}_FREQ_BB1', '{}_FREQ_BB2', '{}_FREQ_BB3', '{}_PLLE2_CLK_IN1_INT')).format(tile_type)
params['clkin2_route'] = random.choice(('{}_CLKIN2', '{}_FREQ_BB0', '{}_FREQ_BB1', '{}_FREQ_BB2', '{}_FREQ_BB3', '{}_PLLE2_CLK_IN2_INT')).format(tile_type)
params['clkfbin_route'] = random.choice(('{}_CLKFBOUT2IN', '{}_UPPER_T_FREQ_BB0', '{}_UPPER_T_FREQ_BB1', '{}_UPPER_T_FREQ_BB2', '{}_UPPER_T_FREQ_BB3', '{}_UPPER_T_PLLE2_CLK_FB_INT')).format(tile_type.replace('_UPPER_T', ''))
f.write(('%s\n' % json.dumps(params)))
def make_ibuf_net(net):
p = net.find('[')
return ((net[:p] + '_IBUF') + net[p:])
if (params['clkin1_conn'] != ''):
net = make_ibuf_net(params['clkin1_conn'])
wire = '{}/{}'.format(tile_name, params['clkin1_route'])
routes_file.write('{} {}\n'.format(net, wire))
if (params['clkin2_conn'] != ''):
net = make_ibuf_net(params['clkin2_conn'])
wire = '{}/{}'.format(tile_name, params['clkin2_route'])
routes_file.write('{} {}\n'.format(net, wire))
if ((params['clkfbin_conn'] != '') and (params['clkfbin_conn'] != ('clkfbout_mult_BUFG_' + site))):
net = params['clkfbin_conn']
if (('[' in net) and (']' in net)):
net = make_ibuf_net(net)
wire = '{}/{}'.format(tile_name, params['clkfbin_route'])
routes_file.write('{} {}\n'.format(net, wire))
if (not params['active']):
continue
print('\n\n wire den_{site};\n wire dwe_{site};\n\n LUT1 den_lut_{site} (\n .O(den_{site})\n );\n\n LUT1 dwe_lut_{site} (\n .O(dwe_{site})\n );\n\n wire clkfbout_mult_{site};\n wire clkfbout_mult_BUFG_{site};\n wire clkout0_{site};\n wire clkout1_{site};\n wire clkout2_{site};\n wire clkout3_{site};\n wire clkout4_{site};\n wire clkout5_{site};\n (* KEEP, DONT_TOUCH, LOC = "{site}" *)\n PLLE2_ADV #(\n .IS_RST_INVERTED({IS_RST_INVERTED}),\n .IS_PWRDWN_INVERTED({IS_PWRDWN_INVERTED}),\n .IS_CLKINSEL_INVERTED({IS_CLKINSEL_INVERTED}),\n .CLKOUT0_DIVIDE({CLKOUT0_DIVIDE}),\n .CLKOUT1_DIVIDE({CLKOUT1_DIVIDE}),\n .CLKOUT2_DIVIDE({CLKOUT2_DIVIDE}),\n .CLKOUT3_DIVIDE({CLKOUT3_DIVIDE}),\n .CLKOUT4_DIVIDE({CLKOUT4_DIVIDE}),\n .CLKOUT5_DIVIDE({CLKOUT5_DIVIDE}),\n .CLKFBOUT_MULT({CLKFBOUT_MULT}),\n .DIVCLK_DIVIDE({DIVCLK_DIVIDE}),\n .STARTUP_WAIT({STARTUP_WAIT}),\n .CLKOUT0_DUTY_CYCLE({CLKOUT0_DUTY_CYCLE}),\n .COMPENSATION({COMPENSATION}),\n .BANDWIDTH({BANDWIDTH}),\n .CLKIN1_PERIOD(10.0),\n .CLKIN2_PERIOD(10.0)\n ) pll_{site} (\n .CLKFBOUT(clkfbout_mult_{site}),\n .CLKOUT0(clkout0_{site}),\n .CLKOUT1(clkout1_{site}),\n .CLKOUT2(clkout2_{site}),\n .CLKOUT3(clkout3_{site}),\n .CLKOUT4(clkout4_{site}),\n .CLKOUT5(clkout5_{site}),\n .DRDY(),\n .LOCKED(),\n .DO(),\n .CLKFBIN({clkfbin_conn}),\n .CLKIN1({clkin1_conn}),\n .CLKIN2({clkin2_conn}),\n .CLKINSEL(),\n .DCLK({dclk_conn}),\n .DEN({den_conn}),\n .DWE({dwe_conn}),\n .PWRDWN(),\n .RST(),\n .DI(),\n .DADDR({{7{{ {daddr4_conn} }} }}));\n\n (* KEEP, DONT_TOUCH *)\n BUFG bufg_{site} (\n .I(clkfbout_mult_{site}),\n .O(clkfbout_mult_BUFG_{site})\n );\n\n (* KEEP, DONT_TOUCH *)\n FDRE reg_clkfbout_mult_{site} (\n .C(clkfbout_mult_{site})\n );\n\n (* KEEP, DONT_TOUCH *)\n FDRE reg_clkout0_{site} (\n .C(clkout0_{site})\n );\n\n (* KEEP, DONT_TOUCH *)\n FDRE reg_clkout1_{site} (\n .C(clkout1_{site})\n );\n\n (* KEEP, DONT_TOUCH *)\n FDRE reg_clkout2_{site} (\n .C(clkout2_{site})\n );\n\n (* KEEP, DONT_TOUCH *)\n FDRE reg_clkout3_{site} (\n .C(clkout3_{site})\n );\n\n (* KEEP, DONT_TOUCH *)\n FDRE reg_clkout4_{site} (\n .C(clkout4_{site})\n );\n\n (* KEEP, DONT_TOUCH *)\n FDRE reg_clkout5_{site} (\n .C(clkout5_{site})\n );\n '.format(**params))
print('endmodule')
f.close() |
class NEDetector(VPNDetector):
def __init__(self, device):
self._device = device
self._ne_processes_cache = None
def _ne_processes(self):
if self._ne_processes_cache:
return self._ne_processes_cache
self._ne_processes_cache = []
self._ne_processes_cache += self._device.pgrep('neagent')
return self._ne_processes_cache
def detect(self):
if (current_os() != 'macos'):
return None
L.debug("Trying to determine if we're using a macOS network extension")
vpn_info = VPNInfo()
if (not self._ne_processes()):
L.debug('Not using a network extension')
return None
L.info('Detected a VPN network extension (unknown protocol)')
vpn_info.vpn_processes = self._ne_processes()
return vpn_info |
_json
class ValidationReport():
validation_result: ValidationResult
validator_name: str
message: str
details: Optional[Dict[(str, Any)]] = None
def __str__(self) -> str:
if self.details:
return f'''Validation Report: {self.validator_name}
Result: {self.validation_result.value}
Message: {self.message}
Details:
{json.dumps(self.details, sort_keys=True, indent=4)}'''
else:
return f'''Validation Report: {self.validator_name}
Result: {self.validation_result.value}
Message: {self.message}''' |
class OptionPlotoptionsPyramidSonificationDefaultinstrumentoptionsMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TreeNodeUtilsTestCase(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_join_pks(self):
pks_str = join_pks(None)
self.assertEqual(pks_str, '')
pks_str = join_pks([])
self.assertEqual(pks_str, '')
pks_str = join_pks([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
self.assertEqual(pks_str, '0,1,2,3,4,5,6,7,8,9,10')
def test_split_pks(self):
pks_list = split_pks(None)
self.assertEqual(pks_list, [])
pks_list = split_pks('')
self.assertEqual(pks_list, [])
pks_list = split_pks('0,1,2,3,4,5,6,7,8,9,10')
self.assertEqual(pks_list, [str(i) for i in range(11)]) |
class ExampleUndoWindow(WorkbenchWindow):
_exit_action = Instance(Action)
_file_menu = Instance(MenuManager)
_label_menu = Instance(MenuManager)
_undo_menu = Instance(MenuManager)
def __file_menu_default(self):
return MenuManager(self._exit_action, name='&File')
def __undo_menu_default(self):
undo_manager = self.workbench.undo_manager
undo_action = UndoAction(undo_manager=undo_manager)
redo_action = RedoAction(undo_manager=undo_manager)
return MenuManager(undo_action, redo_action, name='&Undo')
def __label_menu_default(self):
size_group = Group(CommandAction(command=LabelIncrementSizeCommand), CommandAction(command=LabelDecrementSizeCommand))
normal = CommandAction(id='normal', command=LabelNormalFontCommand, style='radio', checked=True)
bold = CommandAction(id='bold', command=LabelBoldFontCommand, style='radio')
italic = CommandAction(id='italic', command=LabelItalicFontCommand, style='radio')
style_group = Group(normal, bold, italic, id='style')
return MenuManager(size_group, style_group, name='&Label')
def __exit_action_default(self):
return Action(name='E&xit', on_perform=self.workbench.exit)
def _editor_manager_default(self):
return ExampleEditorManager()
def _menu_bar_manager_default(self):
return MenuBarManager(self._file_menu, self._label_menu, self._undo_menu, window=self)
def _tool_bar_manager_default(self):
return ToolBarManager(self._exit_action, show_tool_names=False)
def _active_editor_changed(self, old, new):
if (old is not None):
old.command_stack.undo_manager.active_stack = None
if (new is not None):
new.command_stack.undo_manager.active_stack = new.command_stack
for grp in self._label_menu.groups:
for itm in grp.items:
action = itm.action
if (new is not None):
action.enabled = True
action.command_stack = new.command_stack
action.data = new.obj
if (grp.id == 'style'):
action.checked = (action.data.style == action.id)
else:
action.enabled = False |
def start_north_ocs_v2():
def _start_north_ocs_server_c(fledge_url, ocs_tenant, ocs_client_id, ocs_client_secret, ocs_namespace, ocs_token, taskname='NorthReadingsToOCS'):
conn =
data = {'name': taskname, 'plugin': '{}'.format('OMF'), 'type': 'north', 'schedule_type': 3, 'schedule_day': 0, 'schedule_time': 0, 'schedule_repeat': 30, 'schedule_enabled': 'true', 'config': {'PIServerEndpoint': {'value': 'OSIsoft Cloud Services'}, 'OCSTenantId': {'value': ocs_tenant}, 'OCSClientId': {'value': ocs_client_id}, 'OCSClientSecret': {'value': ocs_client_secret}, 'OCSNamespace': {'value': ocs_namespace}}}
conn.request('POST', '/fledge/scheduled/task', json.dumps(data))
r = conn.getresponse()
assert (200 == r.status)
retval = r.read().decode()
return retval
return _start_north_ocs_server_c |
class Solution():
def customSortString(self, S: str, T: str) -> str:
track = dict(((c, i) for (i, c) in enumerate(S)))
T = list(T)
t = sorted([c for c in T if (c in track)], key=(lambda x: track[x]))
indices = [i for (i, c) in enumerate(T) if (c in track)]
for (i, j) in enumerate(indices):
T[j] = t[i]
return ''.join(T) |
class RollbarTestClient():
def __init__(self, rollbar_connection_config: ConnectionConfig):
self.rollbar_secrets = rollbar_connection_config.secrets
self.headers = {'Content-Type': 'application/json', 'X-Rollbar-Access-Token': self.rollbar_secrets['read_access_token']}
self.base_url = f"
def create_project(self) -> Response:
random_num = random.randint(0, 999)
body = {'name': f'ethyca_test_project_{random_num}'}
self.headers['X-Rollbar-Access-Token'] = self.rollbar_secrets['write_access_token']
project_response: Response = requests.post(url=f'{self.base_url}/projects', json=body, headers=self.headers)
return project_response
def delete_project(self, project_id) -> Response:
url = f'{self.base_url}/project/{project_id}'
self.headers['X-Rollbar-Access-Token'] = self.rollbar_secrets['write_access_token']
project_response: Response = requests.delete(url=url, headers=self.headers)
return project_response
def get_project_tokens(self, project_id: str) -> Response:
self.headers['X-Rollbar-Access-Token'] = self.rollbar_secrets['read_access_token']
project_response: Response = requests.get(url=f'{self.base_url}/project/{project_id}/access_tokens', headers=self.headers)
return project_response
def get_project(self, project_id: str) -> Response:
self.headers['X-Rollbar-Access-Token'] = self.rollbar_secrets['read_access_token']
project_response: Response = requests.get(url=f'{self.base_url}/project/{project_id}', headers=self.headers)
return project_response
def delete_project(self, project_id: str) -> Response:
self.headers['X-Rollbar-Access-Token'] = self.rollbar_secrets['write_access_token']
project_response: Response = requests.delete(url=f'{self.base_url}/project/{project_id}', headers=self.headers)
return project_response
def create_item(self, project_tokens: dict, email: str) -> Response:
self.headers['X-Rollbar-Access-Token'] = project_tokens['post_server_item']
body = {'data': {'environment': 'production', 'body': {'message': {'body': 'Request over threshold of 10 seconds', 'route': 'home#index', 'time_elapsed': 15.23}, 'level': 'error'}, 'person': {'id': f'{random.randint(0, 999)}', 'username': f'ethyca-erasure-username-{random.randint(0, 999)}', 'email': email}}}
item_response: Response = requests.post(url=f'{self.base_url}/item/', json=body, headers=self.headers)
return item_response
def get_item(self, project_tokens: dict) -> Response:
self.headers['X-Rollbar-Access-Token'] = project_tokens['read']
item_response: Response = requests.get(url=f'{self.base_url}/items', headers=self.headers)
return item_response |
class FirewallRuleTest(ForsetiTestCase):
def test_from_json(self):
json_dict = {'kind': 'compute#firewall', 'id': '8', 'creationTimestamp': '2017-05-01T22:08:53.399-07:00', 'name': 'default', 'description': '', 'network': 'network name', 'priority': 1000, 'sourceRanges': ['0.0.0.0/0'], 'allowed': [{'IPProtocol': 'tcp', 'ports': ['22']}], 'direction': 'INGRESS', 'selfLink': ' insert link here'}
json_string = json.dumps(json_dict)
rule = firewall_rule.FirewallRule.from_json(json_string)
self.assertTrue(rule.validate())
.expand([({'kind': 'compute#firewall', 'id': '8', 'creationTimestamp': '2017-05-01T22:08:53.399-07:00', 'name': 'default', 'description': '', 'network': 'network name', 'priority': 1000, 'sourceRanges': ['0.0.0.0/0'], 'allowed': [{'IPProtocol': 'tcp', 'ports': ['22']}], 'direction': 'EGRESS', 'selfLink': ' insert link here'}, firewall_rule.InvalidFirewallRuleError, 'Egress rule missing required field "destinationRanges".*'), ({'kind': 'compute#firewall', 'id': '8', 'creationTimestamp': '2017-05-01T22:08:53.399-07:00', 'description': '', 'network': 'network name', 'priority': 1000, 'sourceRanges': ['0.0.0.0/0'], 'allowed': [{'IPProtocol': 'tcp', 'ports': ['22']}], 'direction': 'INGRESS', 'selfLink': ' insert link here'}, firewall_rule.InvalidFirewallRuleError, 'Rule missing required field "name"'), ({'kind': 'compute#firewall', 'id': '8', 'creationTimestamp': '2017-05-01T22:08:53.399-07:00', 'description': '', 'network': 'network name', 'priority': 1000, 'sourceRanges': ['0.0.0.0/0'], 'direction': 'INGRESS', 'selfLink': ' insert link here'}, firewall_rule.InvalidFirewallRuleError, 'Must have allowed or denied rules'), ({'kind': 'compute#firewall', 'id': '8', 'creationTimestamp': '2017-05-01T22:08:53.399-07:00', 'name': 'default', 'description': '', 'network': 'network name', 'priority': (- 1), 'sourceRanges': ['0.0.0.0/0'], 'allowed': [{'IPProtocol': 'tcp', 'ports': ['22']}], 'direction': 'INGRESS', 'selfLink': ' insert link here'}, firewall_rule.InvalidFirewallRuleError, 'Rule "priority" out of range 0-65535')])
def test_from_json_error(self, json_dict, expected_error, regexp):
json_string = json.dumps(json_dict)
with self.assertRaisesRegex(expected_error, regexp):
rule = firewall_rule.FirewallRule.from_json(json_string)
def test_from_dict(self):
firewall_dict = {'name': 'default', 'network': 'network name', 'priority': 1000, 'sourceRanges': ['0.0.0.0/0'], 'allowed': ['*'], 'direction': 'INGRESS'}
firewall_dict_2 = {'name': 'default', 'network': 'network name', 'priority': 1000, 'sourceRanges': ['0.0.0.0/0'], 'allowed': [{'IPProtocol': 'tcp', 'ports': ['22']}], 'direction': 'INGRESS'}
rule = firewall_rule.FirewallRule.from_dict(firewall_dict)
rule_2 = firewall_rule.FirewallRule.from_dict(firewall_dict)
self.assertTrue((rule_2 < rule))
.expand([({'kind': 'compute#firewall', 'id': '8', 'creationTimestamp': '2017-05-01T22:08:53.399-07:00', 'name': 'default', 'description': '', 'network': 'network name', 'priority': 1000, 'sourceRanges': ['0.0.0.0/0'], 'allowed': [{'IPProtocol': 'tcp', 'ports': ['22']}], 'direction': 'EGRESS', 'selfLink': ' insert link here'}, firewall_rule.InvalidFirewallRuleError, 'Egress rule missing required field "destinationRanges".*'), ({'kind': 'compute#firewall', 'id': '8', 'creationTimestamp': '2017-05-01T22:08:53.399-07:00', 'description': '', 'network': 'network name', 'priority': 1000, 'sourceRanges': ['0.0.0.0/0'], 'allowed': [{'IPProtocol': 'tcp', 'ports': ['22']}], 'direction': 'INGRESS', 'selfLink': ' insert link here'}, firewall_rule.InvalidFirewallRuleError, 'Rule missing required field "name"'), ({'kind': 'compute#firewall', 'id': '8', 'creationTimestamp': '2017-05-01T22:08:53.399-07:00', 'description': '', 'network': 'network name', 'priority': 1000, 'sourceRanges': ['0.0.0.0/0'], 'direction': 'INGRESS', 'selfLink': ' insert link here'}, firewall_rule.InvalidFirewallRuleError, 'Must have allowed or denied rules'), ({'kind': 'compute#firewall', 'id': '8', 'creationTimestamp': '2017-05-01T22:08:53.399-07:00', 'name': 'default', 'description': '', 'network': 'network name', 'priority': (- 1), 'sourceRanges': ['0.0.0.0/0'], 'allowed': [{'IPProtocol': 'tcp', 'ports': ['22']}], 'direction': 'INGRESS', 'selfLink': ' insert link here'}, firewall_rule.InvalidFirewallRuleError, 'Rule "priority" out of range 0-65535')])
def test_from_dict_error(self, firewall_dict, expected_error, regexp):
with self.assertRaisesRegex(expected_error, regexp):
rule = firewall_rule.FirewallRule.from_dict(firewall_dict, validate=True)
.expand([('192.0.0.1', '192.0.0.1/24', True), ('192.0.0.1', '192.0.0.0/16', True), ('192.0.0.1/24', '192.0.0.0/16', True), ('192.0.0.1/24', '192.0.0.1', False), ('192.0.1.1', '192.0.0.0/16', True), ('192.0.1.1', '192.0.0.1/24', False), ('192.0.0.1/32', '192.0.0.0/16', True), ('192.0.0.2/32', '0.0.0.0/0', True), ('5.5.5.5', '192.0.0.0/16', False)])
def test_ip_in_range(self, ip_addr, ip_range, expected):
self.assertEqual(expected, firewall_rule.ip_in_range(ip_addr, ip_range))
.expand([(['192.0.0.1'], ['192.0.0.1/24'], True), (['192.0.0.1'], ['192.0.0.0/16'], True), (['192.0.0.1/24'], ['192.0.0.0/16'], True), (['192.0.0.1/24'], ['192.0.0.1'], False)])
def test_ips_subset_of_ips(self, ips, ips_range, expected):
self.assertEqual(expected, firewall_rule.ips_in_list(ips, ips_range))
.expand([({'firewall_rule_name': ('n' * 63), 'firewall_rule_network': 'n2', 'firewall_rule_source_ranges': json.dumps([('1.1.1.%s' % i) for i in range(256)]), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}]), 'firewall_rule_source_service_accounts': json.dumps(['sa1', 'sa2'])},)])
def test_validate(self, rule_dict):
rule = firewall_rule.FirewallRule(**rule_dict)
rule.validate()
.expand([({'firewall_rule_source_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_source_tags': None, 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_priority': 'NaN', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}])}, 'Rule "priority" could not be converted to an integer: .*NaN.*'), ({'firewall_rule_source_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_priority': '-1', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}])}, 'Rule "priority" out of range 0-65535: "-1".'), ({'firewall_rule_source_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_priority': '', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}])}, 'Rule "priority" out of range 0-65535: ""')])
def test_validate_priority_error(self, rule_dict, expected_regex):
rule = firewall_rule.FirewallRule(**rule_dict)
with self.assertRaisesRegex(firewall_rule.InvalidFirewallRuleError, expected_regex):
rule._validate_priority()
.expand([({'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}])}, 'Ingress rule missing required field oneof "sourceRanges" or "sourceTags" or "sourceServiceAccounts"'), ({'firewall_rule_source_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_destination_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}])}, 'Ingress rules cannot include "destinationRanges"'), ({'firewall_rule_direction': 'egress', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}])}, 'Egress rule missing required field "destinationRanges"'), ({'firewall_rule_destination_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_source_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_direction': 'egress', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}])}, 'Egress rules cannot include "sourceRanges", "sourceTags" or "sourceServiceAccounts"'), ({'firewall_rule_destination_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_source_tags': json.dumps(['t1']), 'firewall_rule_direction': 'egress', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}])}, 'Egress rules cannot include "sourceRanges", "sourceTags" or "sourceServiceAccounts"')])
def test_validate_direction_error(self, rule_dict, expected_regex):
rule = firewall_rule.FirewallRule(**rule_dict)
with self.assertRaisesRegex(firewall_rule.InvalidFirewallRuleError, expected_regex):
rule._validate_direction()
.expand([({'firewall_rule_network': 'n2', 'firewall_rule_source_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_denied': json.dumps([{'IPProtocol': 'tcp', 'ports': ['21-23']}])}, firewall_rule.InvalidFirewallRuleError, 'Rule missing required field "name"'), ({'firewall_rule_name': 'n1', 'firewall_rule_source_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_denied': json.dumps([{'IPProtocol': 'tcp', 'ports': ['21-23']}])}, firewall_rule.InvalidFirewallRuleError, 'Rule missing required field "network"'), ({'firewall_rule_name': 'n1', 'firewall_rule_network': 'n2', 'firewall_rule_source_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_denied': json.dumps([{'IPProtocol': 'tcp', 'ports': ['21-23']}, {}])}, firewall_rule.InvalidFirewallActionError, 'Action must have field IPProtocol')])
def test_validate_errors(self, rule_dict, expected_error, regexp):
rule = firewall_rule.FirewallRule(**rule_dict)
with self.assertRaisesRegex(expected_error, regexp):
rule.validate()
.expand([({'firewall_rule_network': 'n2', 'firewall_rule_source_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_denied': json.dumps([{'IPProtocol': 'tcp', 'ports': ['21-23']}])}, firewall_rule.InvalidFirewallRuleError, 'Rule missing required field "name"'), ({'firewall_rule_name': 'n1', 'firewall_rule_source_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_denied': json.dumps([{'IPProtocol': 'tcp', 'ports': ['21-23']}])}, firewall_rule.InvalidFirewallRuleError, 'Rule missing required field "network"'), ({'firewall_rule_name': 'n1', 'firewall_rule_network': 'n2', 'firewall_rule_source_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_denied': json.dumps([{'IPProtocol': 'tcp', 'ports': ['21-23']}, {}])}, firewall_rule.InvalidFirewallActionError, 'Action must have field IPProtocol')])
def test_as_json_error(self, rule_dict, expected_error, regexp):
rule = firewall_rule.FirewallRule(**rule_dict)
with self.assertRaisesRegex(expected_error, regexp):
rule.as_json()
.expand([({'firewall_rule_name': 'n1', 'firewall_rule_network': 'n2', 'firewall_rule_source_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_denied': json.dumps([{'IPProtocol': 'tcp', 'ports': ['21-23']}])}, {'denied': [{'IPProtocol': 'tcp', 'ports': ['21-23']}], 'direction': 'INGRESS', 'network': 'n2', 'name': 'n1', 'sourceRanges': ['1.1.1.1']})])
def test_as_json(self, rule_dict, expected):
rule = firewall_rule.FirewallRule(**rule_dict)
self.assertEqual(json.dumps(expected, sort_keys=True), rule.as_json())
.expand([({'firewall_rule_network': 'n1', 'firewall_rule_source_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}])}, firewall_rule.InvalidFirewallRuleError, 'Rule missing required field "name"'), ({'firewall_rule_name': 'n1', 'firewall_rule_source_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}])}, firewall_rule.InvalidFirewallRuleError, 'Rule missing required field "network"'), ({'firewall_rule_name': ('n' * 64), 'firewall_rule_network': 'n2', 'firewall_rule_source_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}])}, firewall_rule.InvalidFirewallRuleError, 'Rule name exceeds length limit of 63 chars'), ({'firewall_rule_name': ('n' * 63), 'firewall_rule_network': 'n2', 'firewall_rule_source_ranges': json.dumps([('1.1.1.%s' % i) for i in range(257)]), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}])}, firewall_rule.InvalidFirewallRuleError, 'Rule entry "sourceRanges" must contain 256 or fewer values'), ({'firewall_rule_name': ('n' * 63), 'firewall_rule_network': 'n2', 'firewall_rule_destination_ranges': json.dumps([('1.1.1.%s' % i) for i in range(257)]), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}])}, firewall_rule.InvalidFirewallRuleError, 'Rule entry "destinationRanges" must contain 256 or fewer values'), ({'firewall_rule_name': ('n' * 63), 'firewall_rule_network': 'n2', 'firewall_rule_source_ranges': json.dumps([('1.1.1.%s' % i) for i in range(256)]), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_source_tags': json.dumps([('t%s' % i) for i in range(257)]), 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}])}, firewall_rule.InvalidFirewallRuleError, 'Rule entry "sourceTags" must contain 256 or fewer values'), ({'firewall_rule_name': ('n' * 63), 'firewall_rule_network': 'n2', 'firewall_rule_source_ranges': json.dumps([('1.1.1.%s' % i) for i in range(256)]), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_target_tags': json.dumps([('t%s' % i) for i in range(257)]), 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}])}, firewall_rule.InvalidFirewallRuleError, 'Rule entry "targetTags" must contain 256 or fewer values'), ({'firewall_rule_name': ('n' * 63), 'firewall_rule_network': 'n2', 'firewall_rule_source_ranges': json.dumps([('1.1.1.%s' % i) for i in range(256)]), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_target_tags': json.dumps([('t%s' % i) for i in range(256)]), 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}]), 'firewall_rule_source_service_accounts': json.dumps(['sa1'])}, firewall_rule.InvalidFirewallRuleError, 'targetTags cannot be set when source/targetServiceAccounts are set')])
def test_validate_keys_error(self, rule_dict, expected_error, regexp):
rule = firewall_rule.FirewallRule(**rule_dict)
with self.assertRaisesRegex(expected_error, regexp):
rule.validate()
.expand([({'firewall_rule_source_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}])}, {'firewall_rule_source_ranges': json.dumps(['0.0.0.0/0']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['21-23']}])}, True), ({'firewall_rule_source_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_denied': json.dumps([{'IPProtocol': 'tcp', 'ports': ['50-55']}])}, {'firewall_rule_source_ranges': json.dumps(['0.0.0.0/0']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_denied': json.dumps([{'IPProtocol': 'tcp', 'ports': ['40-60']}])}, True), ({'firewall_rule_source_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['21-23']}])}, {'firewall_rule_source_ranges': json.dumps(['0.0.0.0/0']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}])}, False), ({'firewall_rule_source_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_action': 'deny', 'firewall_rule_denied': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}])}, {'firewall_rule_source_ranges': json.dumps(['0.0.0.0/0']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}])}, False), ({'firewall_rule_source_ranges': json.dumps(['10.0.0.1']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps(['*'])}, {'firewall_rule_source_ranges': json.dumps(['10.0.0.2']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps(['*'])}, False), ({'firewall_rule_destination_ranges': json.dumps(['10.0.0.1', '10.0.0.2']), 'firewall_rule_direction': 'egress', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps(['*'])}, {'firewall_rule_destination_ranges': json.dumps(['10.0.0.0/24']), 'firewall_rule_direction': 'egress', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps(['*'])}, True), ({'firewall_rule_destination_ranges': json.dumps(['10.0.0.1', '10.0.0.2']), 'firewall_rule_direction': 'egress', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps(['*'])}, {'firewall_rule_destination_ranges': json.dumps(['10.0.0.0/24']), 'firewall_rule_direction': 'egress', 'firewall_rule_allowed': json.dumps(['*'])}, True), ({'firewall_rule_destination_ranges': json.dumps(['10.0.0.1', '10.0.0.2']), 'firewall_rule_direction': 'egress', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps(['*'])}, {'firewall_rule_destination_ranges': json.dumps(['10.0.0.0/24']), 'firewall_rule_direction': 'egress', 'firewall_rule_allowed': json.dumps(['*'])}, True), ({'firewall_rule_destination_ranges': json.dumps(['10.0.0.1', '10.0.0.2']), 'firewall_rule_direction': 'egress', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps(['*'])}, {'firewall_rule_destination_ranges': json.dumps(['10.0.0.0/24']), 'firewall_rule_direction': 'egress', 'firewall_rule_allowed': json.dumps(['*'])}, True)])
def test_firewall_rule_lt(self, rule_1_dict, rule_2_dict, expected):
rule_1 = firewall_rule.FirewallRule(**rule_1_dict)
rule_2 = firewall_rule.FirewallRule(**rule_2_dict)
self.assertEqual(expected, (rule_1 < rule_2))
.expand([({'firewall_rule_source_ranges': json.dumps(['0.0.0.0/0']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['21-23']}])}, {'firewall_rule_source_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}])}, True), ({'firewall_rule_source_ranges': json.dumps(['0.0.0.0/0']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps(['*'])}, {'firewall_rule_source_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps(['*'])}, True), ({'firewall_rule_source_ranges': json.dumps(['10.0.0.1']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps(['*'])}, {'firewall_rule_source_ranges': json.dumps(['10.0.0.2']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps(['*'])}, False), ({'firewall_rule_destination_ranges': json.dumps(['10.0.0.0/24']), 'firewall_rule_direction': 'egress', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps(['*'])}, {'firewall_rule_destination_ranges': json.dumps(['10.0.0.1', '10.0.0.2']), 'firewall_rule_direction': 'egress', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps(['*'])}, True), ({'firewall_rule_destination_ranges': json.dumps(['10.0.0.0/24']), 'firewall_rule_direction': 'egress', 'firewall_rule_allowed': json.dumps(['*'])}, {'firewall_rule_destination_ranges': json.dumps(['10.0.0.1', '10.0.0.2']), 'firewall_rule_direction': 'egress', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps(['*'])}, True), ({'firewall_rule_destination_ranges': json.dumps(['10.0.0.0/24']), 'firewall_rule_direction': 'egress', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps(['*'])}, {'firewall_rule_destination_ranges': json.dumps(['10.0.0.1', '10.0.0.2']), 'firewall_rule_direction': 'egress', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps(['*'])}, True), ({'firewall_rule_destination_ranges': json.dumps(['10.0.0.0/24']), 'firewall_rule_direction': 'egress', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps(['*'])}, {'firewall_rule_destination_ranges': json.dumps(['10.0.0.1', '10.0.0.2']), 'firewall_rule_direction': 'egress', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps(['*'])}, True), ({'firewall_rule_destination_ranges': json.dumps(['10.0.0.0/24']), 'firewall_rule_direction': 'egress', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps(['*'])}, {'firewall_rule_destination_ranges': json.dumps(['10.0.0.1', '10.0.0.2']), 'firewall_rule_allowed': json.dumps(['*']), 'firewall_rule_direction': 'egress'}, True)])
def test_firewall_rule_gt(self, rule_1_dict, rule_2_dict, expected):
rule_1 = firewall_rule.FirewallRule(**rule_1_dict)
rule_2 = firewall_rule.FirewallRule(**rule_2_dict)
self.assertEqual(expected, (rule_1 > rule_2))
.expand([({'firewall_rule_source_ranges': json.dumps(['0.0.0.0/0']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}])}, {'firewall_rule_source_ranges': json.dumps(['0.0.0.0/0']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}])}, True), ({'firewall_rule_source_ranges': json.dumps(['0.0.0.0/0']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps(['*'])}, {'firewall_rule_source_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps(['*'])}, False)])
def test_firewall_rule_eq(self, rule_1_dict, rule_2_dict, expected):
rule_1 = firewall_rule.FirewallRule(**rule_1_dict)
rule_2 = firewall_rule.FirewallRule(**rule_2_dict)
self.assertEqual(expected, (rule_1 == rule_2))
.expand([({'firewall_rule_direction': 'INGRESS', 'firewall_rule_allowed': json.dumps(['*'])}, {'firewall_rule_direction': 'egress', 'firewall_rule_allowed': json.dumps(['*'])}, False), ({'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps(['*'])}, {'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n2', 'firewall_rule_allowed': json.dumps(['*'])}, False), ({'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_source_tags': json.dumps(['t1', 't2']), 'firewall_rule_allowed': json.dumps(['*'])}, {'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_source_tags': json.dumps(['t3', 't2']), 'firewall_rule_allowed': json.dumps(['*'])}, False), ({'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_source_tags': json.dumps(['t1', 't2']), 'firewall_rule_target_tags': json.dumps(['t3', 't5']), 'firewall_rule_allowed': json.dumps(['*'])}, {'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_source_tags': json.dumps(['t1', 't2']), 'firewall_rule_target_tags': json.dumps(['t3', 't4']), 'firewall_rule_allowed': json.dumps(['*'])}, False), ({'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_source_tags': json.dumps(['t1', 't2']), 'firewall_rule_target_tags': json.dumps(['t3', 't4']), 'firewall_rule_source_ranges': json.dumps(['0.0.0.0/0']), 'firewall_rule_allowed': json.dumps(['*'])}, {'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_source_tags': json.dumps(['t1', 't2']), 'firewall_rule_target_tags': json.dumps(['t3', 't4']), 'firewall_rule_source_ranges': json.dumps(['10.0.0.0/24']), 'firewall_rule_allowed': json.dumps(['*'])}, False), ({'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_source_tags': json.dumps(['t1', 't2']), 'firewall_rule_target_tags': json.dumps(['t3', 't4']), 'firewall_rule_destination_ranges': json.dumps(['0.0.0.0/0']), 'firewall_rule_allowed': json.dumps(['*'])}, {'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_source_tags': json.dumps(['t1', 't2']), 'firewall_rule_target_tags': json.dumps(['t3', 't4']), 'firewall_rule_destination_ranges': json.dumps(['10.0.0.0/24']), 'firewall_rule_allowed': json.dumps(['*'])}, False), ({'firewall_rule_source_ranges': json.dumps(['0.0.0.0/0']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}])}, {'firewall_rule_source_ranges': json.dumps(['0.0.0.0/0']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['22']}])}, True), ({'firewall_rule_source_ranges': json.dumps(['0.0.0.0/0']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['10', '11', '12', '13']}])}, {'firewall_rule_source_ranges': json.dumps(['0.0.0.0/0']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps([{'IPProtocol': 'tcp', 'ports': ['10-13']}])}, True), ({'firewall_rule_source_ranges': json.dumps(['0.0.0.0/0']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps(['*'])}, {'firewall_rule_source_ranges': json.dumps(['1.1.1.1']), 'firewall_rule_direction': 'INGRESS', 'firewall_rule_network': 'n1', 'firewall_rule_allowed': json.dumps(['*'])}, False)])
def test_firewall_rule_is_equivalent(self, rule_1_dict, rule_2_dict, expected):
rule_1 = firewall_rule.FirewallRule(**rule_1_dict)
rule_2 = firewall_rule.FirewallRule(**rule_2_dict)
self.assertEqual(expected, rule_1.is_equivalent(rule_2))
def test_load_firewall_rule(self):
expected_map = fake_firewall_rules.EXPECTED_FIREWALL_RULES_MAP
for loadable_firewall in fake_firewall_rules.EXPECTED_LOADABLE_FIREWALL_RULES:
rule = firewall_rule.FirewallRule(**loadable_firewall)
dict_rule = json.loads(rule.as_json())
expected_list = expected_map.get(rule.project_id)
expected = {}
for fw_rule in expected_list:
if (fw_rule['name'] == rule.name):
expected = fw_rule.copy()
for key in ['kind', 'id', 'creationTimestamp', 'description', 'selfLink']:
expected.pop(key)
unicode_expected = json.loads(json.dumps(expected))
unicode_expected['allowed'] = sorted(unicode_expected['allowed'], key=(lambda x: x['IPProtocol']))
dict_rule['allowed'] = sorted(dict_rule['allowed'], key=(lambda x: x['IPProtocol']))
self.maxDiff = None
self.assertDictEqual(unicode_expected, dict_rule) |
def create_mono_urmp(instrument_key, audio_files, target_dir, instruments_dict):
target_dir = (target_dir / instruments_dict[instrument_key])
if (not target_dir.exists()):
target_dir.mkdir()
cur_audio_files = [audio_file for audio_file in audio_files if (f'_{instrument_key}_' in audio_file.name)]
[copyfile(audio_file, (target_dir / audio_file.name)) for audio_file in cur_audio_files] |
(short_help='Get account evaluation.')
('-a', '--account', metavar='ACCNO', help='Account number.')
('-d', '--include-delisted', is_flag=True, help='Include delisted.', default=True)
('-D', '--exclude-delisted', is_flag=True, help='Exclude delisted.')
('-e', '--for-each', is_flag=True, help='Show individual evaluation.', default=True)
('-E', '--as-summary', is_flag=True, help='Show summarized evaluation.')
('-p', '--port', metavar='PORT', help='Port number of grpc server (optional).')
_option()
def evaluation(account, include_delisted, exclude_delisted, for_each, as_summary, port):
if (account is None):
logger.info('Account not given. Using first account available.')
if exclude_delisted:
include_delisted = False
if as_summary:
for_each = False
lookup_type = '1'
elif for_each:
lookup_type = '2'
from koapy.backend.kiwoom_open_api_plus.core.KiwoomOpenApiPlusEntrypoint import KiwoomOpenApiPlusEntrypoint
with KiwoomOpenApiPlusEntrypoint(port=port) as context:
context.EnsureConnected()
if (account is None):
account = context.GetAccountList()[0]
(single, multi) = context.GetAccountEvaluationStatusAsSeriesAndDataFrame(account, include_delisted)
click.echo('[] : []')
click.echo(single.to_markdown(floatfmt='.2f'))
click.echo()
click.echo('[] : []')
click.echo(multi.to_markdown())
click.echo()
(single, multi) = context.GetAccountEvaluationBalanceAsSeriesAndDataFrame(account, lookup_type)
click.echo('[] : []')
click.echo(single.to_markdown(floatfmt='.2f'))
click.echo()
click.echo('[] : []')
click.echo(multi.to_markdown()) |
def fastq_parser(input_file):
with gzip.open(input_file, 'rt') as f:
while True:
name = f.readline().strip()[1:]
if (not name):
break
seq = f.readline().strip()
name2 = f.readline().strip()[1:]
qual = f.readline().strip()
(yield Fastq(name, seq, name2, qual)) |
class OFPGroupDescStats(StringifyMixin):
def __init__(self, type_=None, group_id=None, buckets=None, properties=None, length=None, bucket_array_len=None):
buckets = (buckets if buckets else [])
properties = (properties if properties else [])
super(OFPGroupDescStats, self).__init__()
self.length = length
self.type = type_
self.group_id = group_id
self.buckets = buckets
self.properties = properties
def parser(cls, buf, offset):
stats = cls()
(stats.length, stats.type, stats.group_id, stats.bucket_array_len) = struct.unpack_from(ofproto.OFP_GROUP_DESC_STATS_PACK_STR, buf, offset)
offset += ofproto.OFP_GROUP_DESC_STATS_SIZE
bucket_buf = buf[offset:(offset + stats.bucket_array_len)]
stats.buckets = []
while bucket_buf:
bucket = OFPBucket.parser(bucket_buf, 0)
stats.buckets.append(bucket)
bucket_buf = bucket_buf[bucket.len:]
offset += stats.bucket_array_len
rest = buf[offset:(offset + stats.length)]
while rest:
(p, rest) = OFPGroupProp.parse(rest)
stats.properties.append(p)
return stats |
class OptionSeriesErrorbarSonificationTracksMappingFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
()
def mongo_database():
connection_params = {'host': os.environ.get('MONGODB_HOST', 'localhost'), 'port': int(os.environ.get('MONGODB_PORT', 27017))}
if (pymongo.version_tuple < (3, 0)):
connection_params['safe'] = True
mongo = pymongo.MongoClient(**connection_params)
db = mongo.elasticapm_test
(yield db)
mongo.drop_database('elasticapm_test')
mongo.close() |
def validate_period_has_submissions(fiscal_year, fiscal_period):
if (not period_has_submissions(fiscal_year, fiscal_period)):
raise RuntimeError(f"Well congratulations. You've managed to choose a fiscal period with no submissions. Give 'select reporting_fiscal_year, reporting_fiscal_period, count(*) from submission_attributes where group by reporting_fiscal_year, reporting_fiscal_period order by reporting_fiscal_year, reporting_fiscal_period;' a whirl and try again.") |
class FilterSchema(BaseModel):
name: str
display_name: str
model_unique_ids: List[Optional[str]] = []
def add_model_unique_id(self, model_unique_id: Optional[str]):
new_model_unique_ids = list({*self.model_unique_ids, model_unique_id})
self.model_unique_ids = new_model_unique_ids |
def test_dynamic_to_static_conversion() -> None:
array = np.random.rand(*NUMPY_SHAPE)
message1 = MyDynamicNumpyMessage(field1='hello', field2=array, field3=5)
message2 = MyNumpyMessage(__sample__=message1.__sample__, __original_message_type__=MyDynamicNumpyMessage)
assert (message2.field1 == 'hello')
assert (message2.field2 == array).all()
assert (message2.field3 == 5) |
class CssDivCircle(CssStyle.Style):
_attrs = {'border-radius': '50%', 'padding': '3%'}
def customize(self):
rgb = Colors.getHexToRgb(self.page.theme.greys[(- 1)])
rgb_color = Colors.getHexToRgb(self.page.theme.colors[(- 1)])
self.css({'box-shadow': ('0 0 %(size)spx rgba(%(r)s, %(g)s, %(b)s, %(opac)s)' % {'r': rgb[0], 'g': rgb[1], 'b': rgb[2], 'opac': 0.5, 'size': 5})})
self.hover.css({'box-shadow': ('0 0 %(size)spx rgba(%(r)s, %(g)s, %(b)s, %(opac)s)' % {'r': rgb_color[0], 'g': rgb_color[1], 'b': rgb_color[2], 'opac': 0.8, 'size': 5})}) |
class flow_mod(message):
subtypes = {}
version = 1
type = 14
def __init__(self, xid=None, match=None, cookie=None, _command=None, idle_timeout=None, hard_timeout=None, priority=None, buffer_id=None, out_port=None, flags=None, actions=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (match != None):
self.match = match
else:
self.match = ofp.match()
if (cookie != None):
self.cookie = cookie
else:
self.cookie = 0
if (_command != None):
self._command = _command
else:
self._command = 0
if (idle_timeout != None):
self.idle_timeout = idle_timeout
else:
self.idle_timeout = 0
if (hard_timeout != None):
self.hard_timeout = hard_timeout
else:
self.hard_timeout = 0
if (priority != None):
self.priority = priority
else:
self.priority = 0
if (buffer_id != None):
self.buffer_id = buffer_id
else:
self.buffer_id = 0
if (out_port != None):
self.out_port = out_port
else:
self.out_port = 0
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (actions != None):
self.actions = actions
else:
self.actions = []
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(self.match.pack())
packed.append(struct.pack('!Q', self.cookie))
packed.append(util.pack_fm_cmd(self._command))
packed.append(struct.pack('!H', self.idle_timeout))
packed.append(struct.pack('!H', self.hard_timeout))
packed.append(struct.pack('!H', self.priority))
packed.append(struct.pack('!L', self.buffer_id))
packed.append(util.pack_port_no(self.out_port))
packed.append(struct.pack('!H', self.flags))
packed.append(loxi.generic_util.pack_list(self.actions))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
(subtype,) = reader.peek('!H', 56)
subclass = flow_mod.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = flow_mod()
_version = reader.read('!B')[0]
assert (_version == 1)
_type = reader.read('!B')[0]
assert (_type == 14)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
obj.match = ofp.match.unpack(reader)
obj.cookie = reader.read('!Q')[0]
obj._command = util.unpack_fm_cmd(reader)
obj.idle_timeout = reader.read('!H')[0]
obj.hard_timeout = reader.read('!H')[0]
obj.priority = reader.read('!H')[0]
obj.buffer_id = reader.read('!L')[0]
obj.out_port = util.unpack_port_no(reader)
obj.flags = reader.read('!H')[0]
obj.actions = loxi.generic_util.unpack_list(reader, ofp.action.action.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.match != other.match):
return False
if (self.cookie != other.cookie):
return False
if (self._command != other._command):
return False
if (self.idle_timeout != other.idle_timeout):
return False
if (self.hard_timeout != other.hard_timeout):
return False
if (self.priority != other.priority):
return False
if (self.buffer_id != other.buffer_id):
return False
if (self.out_port != other.out_port):
return False
if (self.flags != other.flags):
return False
if (self.actions != other.actions):
return False
return True
def pretty_print(self, q):
q.text('flow_mod {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('match = ')
q.pp(self.match)
q.text(',')
q.breakable()
q.text('cookie = ')
q.text(('%#x' % self.cookie))
q.text(',')
q.breakable()
q.text('idle_timeout = ')
q.text(('%#x' % self.idle_timeout))
q.text(',')
q.breakable()
q.text('hard_timeout = ')
q.text(('%#x' % self.hard_timeout))
q.text(',')
q.breakable()
q.text('priority = ')
q.text(('%#x' % self.priority))
q.text(',')
q.breakable()
q.text('buffer_id = ')
q.text(('%#x' % self.buffer_id))
q.text(',')
q.breakable()
q.text('out_port = ')
q.text(util.pretty_port(self.out_port))
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPFF_SEND_FLOW_REM', 2: 'OFPFF_CHECK_OVERLAP', 4: 'OFPFF_EMERG'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('actions = ')
q.pp(self.actions)
q.breakable()
q.text('}') |
()
def fsspec_test_folder(is_integration_test):
if is_integration_test:
empty_dataset = generic_upload_dataset_if_not_exists(client=FoundryRestClient(), name='fsspec_test_folder_v1', upload_folder=None, foundry_schema=None)
if empty_dataset[4]:
folder_setup(rid=empty_dataset[0])
(yield empty_dataset)
else:
(yield ('empty-rid', 'empty-path', None, 'empty-branch', False)) |
def test_param_message_event(elasticapm_client):
elasticapm_client.capture('Message', param_message={'message': 'test %s %d', 'params': ('x', 1)})
assert (len(elasticapm_client.events[ERROR]) == 1)
event = elasticapm_client.events[ERROR][0]
assert (event['log']['message'] == 'test x 1')
assert (event['log']['param_message'] == 'test %s %d') |
def test_instance_id():
with tempfile.TemporaryDirectory() as f:
os.environ['EFB_DATA_PATH'] = f
master_id = 'tests.mocks.master.MockMasterChannel#instance1'
slave_ids = ['tests.mocks.slave.MockSlaveChannel#instance1', 'tests.mocks.slave.MockSlaveChannel#instance2']
config = {'master_channel': master_id, 'slave_channels': slave_ids}
config = dump_and_load_config(config)
ehforwarderbot.__main__.init(config)
assert (coordinator.master.channel_id == master_id)
assert isinstance(coordinator.master, master.MockMasterChannel)
for i in slave_ids:
assert (i in coordinator.slaves)
assert isinstance(coordinator.slaves[i], slave.MockSlaveChannel) |
def upgrade():
op.create_table('users', sa.Column('id', GUID(), nullable=False), sa.Column('email', sa.String(length=256), nullable=False), sa.Column('username', sa.String(length=256), nullable=False), sa.Column('active', sa.Boolean(), nullable=False), sa.PrimaryKeyConstraint('id'))
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=True) |
class Source(BaseObject):
def __init__(self, api=None, from_=None, rel=None, to=None, **kwargs):
self.api = api
self.from_ = from_
self.rel = rel
self.to = to
for (key, value) in kwargs.items():
setattr(self, key, value)
for key in self.to_dict():
if (getattr(self, key) is None):
try:
self._dirty_attributes.remove(key)
except KeyError:
continue |
def main(args=None):
args = parse_arguments().parse_args(args)
viewpointObj = Viewpoint()
fileList = []
chromosome_sizes = None
background_dict = None
fileHDF5Object = h5py.File(args.file, 'r')
fileType = fileHDF5Object.attrs['type']
if ((args.outputMode == 'geneName') and (args.outputModeName is None)):
log.error("Output mode is 'geneName'. Please specify a gene name via --outputModeName too!")
exit(1)
if (args.outputFileType == 'bigwig'):
if (fileType != 'interactions'):
log.error("Only file type 'interactions' supports bigwig. Exiting.")
exit(1)
if (args.range is None):
log.error("Bigwig files require the argument '--range upstream downstream'. Exiting.")
exit(1)
if (args.backgroundModelFile is not None):
if args.backgroundModelFile:
background_dict = viewpointObj.readBackgroundDataFile(args.backgroundModelFile, args.range, args.range[1], pMean=True)
else:
log.error('Please define a background file via --backgroundModelFile.')
exit(1)
if (args.chromosomeSizes is not None):
chromosome_sizes = OrderedDict()
with open(args.chromosomeSizes.name, 'r') as file:
file_ = True
while file_:
file_ = file.readline().strip()
if (file_ != ''):
line_split = file_.split('\t')
chromosome_sizes[line_split[0]] = int(line_split[1])
else:
log.error("Bigwig files require the argument '--chromosomeSizes'. Exiting.")
exit(1)
keys_file = list(fileHDF5Object.keys())
if ((fileType == 'interactions') or (fileType == 'significant')):
if (args.outputMode == 'all'):
for (i, sample) in enumerate(keys_file):
matrix_obj1 = fileHDF5Object[sample]
chromosomeList1 = sorted(list(matrix_obj1.keys()))
chromosomeList1.remove('genes')
for chromosome1 in chromosomeList1:
geneList1 = sorted(list(matrix_obj1[chromosome1].keys()))
for gene1 in geneList1:
fileList.append([[sample, chromosome1, gene1]])
else:
for (i, sample) in enumerate(keys_file):
matrix_obj1 = fileHDF5Object[sample]['genes']
chromosomeList1 = sorted(list(matrix_obj1.keys()))
gene_name = args.outputModeName
counter = 1
while (gene_name in chromosomeList1):
fileList.append([[sample, 'genes', gene_name]])
gene_name = ((args.outputModeName + '_') + str(counter))
counter += 1
elif (fileType == 'target'):
if (fileHDF5Object.attrs['combinationMode'] == 'dual'):
if (args.outputMode == 'all'):
for outer_matrix in keys_file:
inner_matrix_object = fileHDF5Object[outer_matrix]
keys_inner_matrices = list(inner_matrix_object.keys())
for inner_matrix in keys_inner_matrices:
inner_object = inner_matrix_object[inner_matrix]
gene_object = inner_object['genes']
keys_genes = list(gene_object.keys())
for gen in keys_genes:
fileList.append([outer_matrix, inner_matrix, 'genes', gen])
else:
for outer_matrix in keys_file:
inner_matrix_object = fileHDF5Object[outer_matrix]
keys_inner_matrices = list(inner_matrix_object.keys())
for inner_matrix in keys_inner_matrices:
inner_object = inner_matrix_object[inner_matrix]['genes']
keys_genes = list(inner_object.keys())
gene_name = args.outputModeName
counter = 1
while (gene_name in keys_genes):
fileList.append([outer_matrix, inner_matrix, 'genes', gene_name])
gene_name = ((args.outputModeName + '_') + str(counter))
counter += 1
elif (fileHDF5Object.attrs['combinationMode'] == 'single'):
if (args.outputMode == 'all'):
for outer_matrix in keys_file:
gene_object = fileHDF5Object[outer_matrix]['genes']
keys_genes = list(gene_object.keys())
for gen in keys_genes:
fileList.append([outer_matrix, 'genes', gen])
else:
for outer_matrix in keys_file:
keys_genes = list(fileHDF5Object[outer_matrix]['genes'].keys())
gene_name = args.outputModeName
counter = 1
while (gene_name in keys_genes):
fileList.append([outer_matrix, 'genes', gene_name])
gene_name = ((args.outputModeName + '_') + str(counter))
counter += 1
elif (fileType == 'aggregate'):
if (args.outputMode == 'all'):
for (i, combinationOfMatrix) in enumerate(keys_file):
keys_matrix_intern = list(fileHDF5Object[combinationOfMatrix].keys())
if (len(keys_matrix_intern) == 0):
continue
matrix1 = keys_matrix_intern[0]
matrix2 = keys_matrix_intern[1]
matrix_obj1 = fileHDF5Object[((combinationOfMatrix + '/') + matrix1)]
matrix_obj2 = fileHDF5Object[((combinationOfMatrix + '/') + matrix2)]
chromosomeList1 = sorted(list(matrix_obj1.keys()))
chromosomeList2 = sorted(list(matrix_obj2.keys()))
chromosomeList1.remove('genes')
chromosomeList2.remove('genes')
for (chromosome1, chromosome2) in zip(chromosomeList1, chromosomeList2):
geneList1 = sorted(list(matrix_obj1[chromosome1].keys()))
geneList2 = sorted(list(matrix_obj2[chromosome2].keys()))
for (gene1, gene2) in zip(geneList1, geneList2):
fileList.append([[combinationOfMatrix, matrix1, chromosome1, gene1], [combinationOfMatrix, matrix2, chromosome2, gene2]])
else:
for (i, combinationOfMatrix) in enumerate(keys_file):
keys_matrix_intern = list(fileHDF5Object[combinationOfMatrix].keys())
if (len(keys_matrix_intern) == 0):
continue
matrix1 = keys_matrix_intern[0]
matrix2 = keys_matrix_intern[1]
matrix_obj1 = fileHDF5Object[((combinationOfMatrix + '/') + matrix1)]['genes']
matrix_obj2 = fileHDF5Object[((combinationOfMatrix + '/') + matrix2)]['genes']
chromosomeList1 = sorted(list(matrix_obj1.keys()))
chromosomeList2 = sorted(list(matrix_obj2.keys()))
gene_name = args.outputModeName
counter = 1
while ((gene_name in chromosomeList1) and (gene_name in chromosomeList2)):
fileList.append([[combinationOfMatrix, matrix1, 'genes', gene_name], [combinationOfMatrix, matrix2, 'genes', gene_name]])
gene_name = ((args.outputModeName + '_') + str(counter))
counter += 1
elif (fileType == 'differential'):
if (args.outputMode == 'all'):
for outer_matrix in keys_file:
inner_matrix_object = fileHDF5Object[outer_matrix]
keys_inner_matrices = list(inner_matrix_object.keys())
for inner_matrix in keys_inner_matrices:
inner_object = inner_matrix_object[inner_matrix]
chromosomeList = sorted(list(inner_object.keys()))
chromosomeList.remove('genes')
for chromosome in chromosomeList:
geneList = sorted(list(inner_object[chromosome].keys()))
for gene in geneList:
fileList.append([outer_matrix, inner_matrix, chromosome, gene])
else:
for outer_matrix in keys_file:
inner_matrix_object = fileHDF5Object[outer_matrix]
keys_inner_matrices = list(inner_matrix_object.keys())
for inner_matrix in keys_inner_matrices:
inner_object = inner_matrix_object[inner_matrix]['genes']
chromosomeList = sorted(list(inner_object.keys()))
gene_name = args.outputModeName
counter = 1
while (gene_name in chromosomeList):
fileList.append([outer_matrix, inner_matrix, 'genes', gene_name])
gene_name = ((args.outputModeName + '_') + str(counter))
counter += 1
fileHDF5Object.close()
filesPerThread = (len(fileList) // args.threads)
all_data_collected = False
thread_data = ([None] * args.threads)
file_name_list = ([None] * args.threads)
queue = ([None] * args.threads)
process = ([None] * args.threads)
thread_done = ([False] * args.threads)
fail_flag = False
fail_message = ''
for i in range(args.threads):
if (i < (args.threads - 1)):
fileListPerThread = fileList[(i * filesPerThread):((i + 1) * filesPerThread)]
else:
fileListPerThread = fileList[(i * filesPerThread):]
queue[i] = Queue()
process[i] = Process(target=exportData, kwargs=dict(pFileList=fileListPerThread, pArgs=args, pViewpointObject=viewpointObj, pDecimalPlace=args.decimalPlaces, pChromosomeSizes=chromosome_sizes, pBackgroundData=background_dict, pFileType=fileType, pQueue=queue[i]))
process[i].start()
while (not all_data_collected):
for i in range(args.threads):
if ((queue[i] is not None) and (not queue[i].empty())):
return_content = queue[i].get()
if ('Fail:' in return_content):
fail_flag = True
fail_message = return_content[6:]
else:
(file_name_list[i], thread_data[i]) = return_content
queue[i] = None
process[i].join()
process[i].terminate()
process[i] = None
thread_done[i] = True
all_data_collected = True
for thread in thread_done:
if (not thread):
all_data_collected = False
time.sleep(1)
if fail_flag:
log.error(fail_message)
exit(1)
thread_data = [item for sublist in thread_data for item in sublist]
file_name_list = [item for sublist in file_name_list for item in sublist]
if (len(thread_data) == 0):
log.error('Contains not the requested data!')
exit(1)
if (args.outputFileType == 'txt'):
if (args.outputMode == 'geneName'):
basepath = os.path.dirname(args.outFileName)
for (i, file_content_string) in enumerate(thread_data):
with open(((basepath + '/') + file_name_list[i]), 'w') as file:
file.write(file_content_string)
else:
with tarfile.open(args.outFileName, 'w:gz') as tar:
if (args.oneTargetFile and (fileType == 'target')):
tar_info = tarfile.TarInfo(name='targets.tsv')
tar_info.mtime = time.time()
file_content_string_all = ''
for (i, file_content_string) in enumerate(thread_data):
file_content_string_all += file_content_string
file_content_string_all = file_content_string_all.encode('utf-8')
tar_info.size = len(file_content_string_all)
file = io.BytesIO(file_content_string_all)
tar.addfile(tarinfo=tar_info, fileobj=file)
else:
for (i, file_content_string) in enumerate(thread_data):
tar_info = tarfile.TarInfo(name=file_name_list[i])
tar_info.mtime = time.time()
file_content_string = file_content_string.encode('utf-8')
tar_info.size = len(file_content_string)
file = io.BytesIO(file_content_string)
tar.addfile(tarinfo=tar_info, fileobj=file)
elif (args.outputFileType == 'bigwig'):
if (args.outputMode == 'geneName'):
bigwig_folder = os.path.dirname(args.outFileName)
else:
bigwig_folder = mkdtemp(prefix='bigwig_folder')
for (i, file_content) in enumerate(thread_data):
for (j, file_list) in enumerate(file_content):
bw = pyBigWig.open(((bigwig_folder + '/') + file_name_list[i][j]), 'w')
bw.addHeader(file_list[0])
bw.addEntries(file_list[1], file_list[2], ends=file_list[3], values=file_list[4])
bw.close()
if (args.outputMode == 'all'):
if (not args.outFileName.endswith('.tar.gz')):
args.outFileName = (args.outFileName + '.tar.gz')
with tarfile.open(args.outFileName, 'w:gz') as tar_handle:
for (root, dirs, files) in os.walk(bigwig_folder):
for file in files:
tar_handle.add(os.path.join(root, file), arcname=file)
if os.path.exists(bigwig_folder):
try:
shutil.rmtree(bigwig_folder)
except OSError as e:
log.error(('Error: %s - %s.' % (e.filename, e.strerror))) |
def fortios_firewall_schedule(data, fos, check_mode):
fos.do_member_operation('firewall.schedule', 'onetime')
if data['firewall_schedule_onetime']:
resp = firewall_schedule_onetime(data, fos, check_mode)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'firewall_schedule_onetime'))
if check_mode:
return resp
return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {}) |
class TrackTopic(MethodView):
decorators = [login_required, allows.requires(CanAccessForum(), on_fail=FlashAndRedirect(message=_('You are not allowed to access that forum'), level='warning', endpoint=(lambda *a, **k: current_category.url)))]
def post(self, topic_id, slug=None):
topic = Topic.query.filter_by(id=topic_id).first_or_404()
real(current_user).track_topic(topic)
real(current_user).save()
return redirect(topic.url) |
class Item():
def __init__(self, name, width, height, depth, weight):
self.name = name
self.width = width
self.height = height
self.depth = depth
self.weight = weight
self.rotation_type = 0
self.position = START_POSITION
self.number_of_decimals = DEFAULT_NUMBER_OF_DECIMALS
def format_numbers(self, number_of_decimals):
self.width = set_to_decimal(self.width, number_of_decimals)
self.height = set_to_decimal(self.height, number_of_decimals)
self.depth = set_to_decimal(self.depth, number_of_decimals)
self.weight = set_to_decimal(self.weight, number_of_decimals)
self.number_of_decimals = number_of_decimals
def string(self):
return ('%s(%sx%sx%s, weight: %s) pos(%s) rt(%s) vol(%s)' % (self.name, self.width, self.height, self.depth, self.weight, self.position, self.rotation_type, self.get_volume()))
def get_volume(self):
return set_to_decimal(((self.width * self.height) * self.depth), self.number_of_decimals)
def get_dimension(self):
if (self.rotation_type == RotationType.RT_WHD):
dimension = [self.width, self.height, self.depth]
elif (self.rotation_type == RotationType.RT_HWD):
dimension = [self.height, self.width, self.depth]
elif (self.rotation_type == RotationType.RT_HDW):
dimension = [self.height, self.depth, self.width]
elif (self.rotation_type == RotationType.RT_DHW):
dimension = [self.depth, self.height, self.width]
elif (self.rotation_type == RotationType.RT_DWH):
dimension = [self.depth, self.width, self.height]
elif (self.rotation_type == RotationType.RT_WDH):
dimension = [self.width, self.depth, self.height]
else:
dimension = []
return dimension |
class UserCheckIn(db.Model):
id = db.Column(db.Integer, primary_key=True)
ticket_holder_id = db.Column(db.Integer, db.ForeignKey('ticket_holders.id', ondelete='CASCADE'))
ticket_holder = db.relationship('TicketHolder', backref='user_check_ins', foreign_keys=[ticket_holder_id])
session_id = db.Column(db.Integer, db.ForeignKey('sessions.id', ondelete='CASCADE'))
session = db.relationship('Session', backref='user_check_ins', foreign_keys=[session_id])
station_id = db.Column(db.Integer, db.ForeignKey('station.id', ondelete='CASCADE'))
station = db.relationship('Station', backref='user_check_ins', foreign_keys=[station_id])
track_name = db.Column(db.String, nullable=True)
session_name = db.Column(db.String, nullable=True)
speaker_name = db.Column(db.String, nullable=True)
check_in_out_at = db.Column(db.DateTime(timezone=True))
created_at = db.Column(db.DateTime(timezone=True), default=datetime.datetime.utcnow())
updated_at = db.Column(db.DateTime(timezone=True))
is_deleted = db.Column(db.Boolean, default=False)
def __repr__(self):
return f'<User Check In {self.id}>' |
class AudioFolder(object):
def __init__(self, root, download=True, extension='wav', lib='librosa'):
self.root = os.path.expanduser(root)
self.data = []
self.audio_files = get_files(dir=self.root, extension=extension)
self.loader_function = getattr(loaders, lib)
def __getitem__(self, index):
return self.loader_function(self.audio_files[index])
def __len__(self):
return len(self.audio_files) |
class Glyph():
def __init__(self):
self.name = None
self.width = None
self.height = None
self.unicodes = None
self.note = None
self.lib = None
self.image = None
self.guidelines = None
self.anchors = None
self.outline = []
def _writePointPenCommand(self, command, args, kwargs):
args = _listToString(args)
kwargs = _dictToString(kwargs)
if (args and kwargs):
return f'pointPen.{command}(*{args}, **{kwargs})'
elif len(args):
return f'pointPen.{command}(*{args})'
elif len(kwargs):
return f'pointPen.{command}(**{kwargs})'
else:
return ('pointPen.%s()' % command)
def beginPath(self, **kwargs):
self.outline.append(self._writePointPenCommand('beginPath', [], kwargs))
def endPath(self):
self.outline.append(self._writePointPenCommand('endPath', [], {}))
def addPoint(self, *args, **kwargs):
self.outline.append(self._writePointPenCommand('addPoint', args, kwargs))
def addComponent(self, *args, **kwargs):
self.outline.append(self._writePointPenCommand('addComponent', args, kwargs))
def drawPoints(self, pointPen):
if self.outline:
py = '\n'.join(self.outline)
exec(py, {'pointPen': pointPen})
def py(self):
text = []
if (self.name is not None):
text.append(('glyph.name = "%s"' % self.name))
if self.width:
text.append(('glyph.width = %r' % self.width))
if self.height:
text.append(('glyph.height = %r' % self.height))
if (self.unicodes is not None):
text.append(('glyph.unicodes = [%s]' % ', '.join([str(i) for i in self.unicodes])))
if (self.note is not None):
text.append(('glyph.note = "%s"' % self.note))
if (self.lib is not None):
text.append(('glyph.lib = %s' % _dictToString(self.lib)))
if (self.image is not None):
text.append(('glyph.image = %s' % _dictToString(self.image)))
if (self.guidelines is not None):
text.append(('glyph.guidelines = %s' % _listToString(self.guidelines)))
if (self.anchors is not None):
text.append(('glyph.anchors = %s' % _listToString(self.anchors)))
if self.outline:
text += self.outline
return '\n'.join(text) |
def spy(id: str, node, log_level: int=DEBUG, mapper: Callable=(lambda msg: msg)):
node_name = node.ns_name
color = node.color
effective_log_level = node.backend.log_level
def _spy(source):
def subscribe(observer, scheduler=None):
def on_next(value):
if ((node.log_level >= effective_log_level) and (log_level >= effective_log_level)):
print_info(node_name, color, id, trace_type='', value=str(mapper(value)), log_level=log_level)
observer.on_next(value)
return source.subscribe(on_next, observer.on_error, observer.on_completed, scheduler)
return rx.create(subscribe)
return _spy |
def generate_candidate_keywords(sentence_list, stop_word_pattern, minCharacters, maxWords):
phrase_list = []
for s in sentence_list:
tmp = re.sub(stop_word_pattern, '|', s.strip())
phrases = tmp.split('|')
for phrase in phrases:
phrase = phrase.strip().lower()
if ((phrase != '') and (len(phrase) >= minCharacters) and (len(phrase.split()) <= maxWords)):
phrase_list.append(phrase)
return phrase_list |
def extractRealmOfChaos(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if ('Myriad of Shades' in item['tags']):
names = [tmp for tmp in item['tags'] if (tmp in ['Celest Ambrosia', 'Kiriko', 'Melanie Ambrosia', 'Shana Bonnet', 'Silvia', 'XCrossJ', 'Ghost'])]
postfix_out = ', '.join(names)
if postfix:
postfix_out += (' - ' + postfix)
return buildReleaseMessageWithType(item, 'Myriad of Shades', vol, chp, frag=frag, postfix=postfix_out, tl_type='oel')
return False |
class KiwoomOpenApiWBooleanReturnCodeError(KiwoomOpenApiWError):
OP_ERR_SUCCESS = 1
OP_ERR_FAILURE = 0
def check_code_or_raise(cls, code, message=None):
if (not code):
raise cls(message)
return code
def wrap_to_check_code_or_raise(cls, func, message=None):
(func)
def wrapper(*args, **kwargs):
return cls.check_code_or_raise(func(*args, **kwargs), message)
return wrapper
def try_or_raise(cls, arg, message=None):
if isinstance(arg, (int, bool)):
return cls.check_code_or_raise(arg, message)
elif callable(arg):
return cls.wrap_to_check_code_or_raise(arg, message)
else:
raise TypeError(("Expected 'int', 'bool' or 'callable' but %s found" % type(arg)))
def __init__(self, code, message=None):
super().__init__(message)
self._code = code
self._message = message
def __str__(self):
if self._message:
return self._message
else:
return self.__repr__()
def __repr__(self):
return '{}({!r}, {!r})'.format(self.__class__.__name__, self._code, self._message)
def code(self):
return self._code |
def test_inform_serialization():
msg = FipaMessage(message_id=1, dialogue_reference=(str(0), ''), target=0, performative=FipaMessage.Performative.INFORM, info={'foo': 'bar'})
msg.to = 'receiver'
envelope = Envelope(to=msg.to, sender='sender', message=msg)
envelope_bytes = envelope.encode()
actual_envelope = Envelope.decode(envelope_bytes)
expected_envelope = envelope
assert (expected_envelope.to == actual_envelope.to)
assert (expected_envelope.sender == actual_envelope.sender)
assert (expected_envelope.protocol_specification_id == actual_envelope.protocol_specification_id)
assert (expected_envelope.message != actual_envelope.message)
actual_msg = FipaMessage.serializer.decode(actual_envelope.message)
actual_msg.to = actual_envelope.to
actual_msg.sender = actual_envelope.sender
expected_msg = msg
assert (expected_msg == actual_msg) |
class MatchDict(dict):
def __init__(self, *args, **kwargs):
cb = kwargs.pop('add_callbacks', False)
self.update(*args, **kwargs)
self.__dict__ = self
if cb:
self._callbacks = {}
def __getattr__(self, name):
if (name not in self):
return None
def __setitem__(self, item, value):
try:
if (self[item] and (value != self[item])):
for cb in [x for x in self._callbacks[item] if (item in self._callbacks)]:
cb(value)
except KeyError:
pass
if isinstance(value, dict):
value = MatchDict(value)
super(MatchDict, self).__setitem__(item, value)
def add_callback(self, key, callback):
if (key in self._callbacks):
self._callbacks[key].append(callback)
else:
self._callbacks[key] = [callback]
def remove_callbacks(self, key):
self._callbacks.pop(key)
def remove_callback(self, key, callback):
if (key in self._callbacks):
while (callback in self._callbacks[key]):
self._callbacks[key].remove(callback)
def update(self, *args, **kwargs):
if args:
if (len(args) > 1):
raise TypeError(('update expected at most 1 arguments, got %d' % len(args)))
other = dict(args[0])
for key in other:
self[key] = other[key]
for key in kwargs:
self[key] = kwargs[key] |
def materialize_arg(arg: Dict[(str, Any)], device: str) -> Any:
def create_tensor(attr: Dict[(str, Any)]):
shape = attr['shape']
requires_grad = attr.get('requires_grad', True)
if (len(shape) > 0):
if (attr['dtype'] in pytorch_float_dtype_map):
return torch.rand(*shape, dtype=pytorch_dtype_map[attr['dtype']], requires_grad=requires_grad, device=torch.device(device))
elif (attr['dtype'] in pytorch_int_dtype_map):
return torch.randint((- 10), 10, tuple(shape), dtype=pytorch_dtype_map[attr['dtype']], requires_grad=requires_grad, device=torch.device(device))
elif (attr['dtype'] == 'bool'):
return (torch.rand(*shape, dtype=pytorch_dtype_map['float'], requires_grad=requires_grad, device=torch.device(device)) < 0.5)
else:
return torch.tensor(random.uniform((- 10.0), 10.0), dtype=pytorch_dtype_map[attr['dtype']], requires_grad=requires_grad, device=torch.device(device))
def create_float(attr: Dict[(str, Any)]):
if ('value' in attr):
return attr['value']
return random.uniform(attr['value_range'][0], attr['value_range'][1])
def create_int(attr: Dict[(str, Any)]):
if ('value' in attr):
return attr['value']
return random.randint(attr['value_range'][0], attr['value_range'][1])
def create_str(attr: Dict[(str, Any)]):
if ('value' in attr):
return attr['value']
return ''
def create_bool(attr: Dict[(str, Any)]):
return attr['value']
def create_none(attr: Dict[(str, Any)]):
return None
def create_device(attr: Dict[(str, Any)]):
return torch.device(attr['value'])
def create_genericlist(attr: List[Any]):
result = []
for item in attr['value']:
result.append(arg_factory[item['type']](item))
return result
def create_tuple(attr: List[Any]):
result = create_genericlist(attr)
return tuple(result)
arg_factory: Dict[(str, Callable)] = {'tensor': create_tensor, 'float': create_float, 'double': create_float, 'int': create_int, 'long': create_int, 'none': create_none, 'bool': create_bool, 'device': create_device, 'str': create_str, 'genericlist': create_genericlist, 'tuple': create_tuple}
return arg_factory[arg['type']](arg) |
def test_moving_sum_returns_correct_array_with_1d_data(data_1d):
moved_data = scared.signal_processing.moving_sum(data_1d, 10)
reference_data = []
for i in range(((data_1d.shape[(- 1)] - 10) + 1)):
reference_data.append(data_1d[i:(i + 10)].sum())
reference_data = np.array(reference_data)
assert np.array_equal(moved_data, reference_data) |
def savefig(func):
(func)
def decorated(*args, **kwargs):
file_name = kwargs.pop('file_name', None)
file_path = kwargs.pop('file_path', None)
save_fig = kwargs.pop('save_fig', bool(file_name))
save_kwargs = kwargs.pop('save_kwargs', {})
save_kwargs.setdefault('bbox_inches', 'tight')
close = kwargs.pop('close', None)
func(*args, **kwargs)
if save_fig:
save_figure(file_name, file_path, close, **save_kwargs)
return decorated |
def extractLectranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class NodeType(HasPrivateTraits):
DOCUMENT = ImageResource('document')
CLOSED_FOLDER = ImageResource('closed_folder')
OPEN_FOLDER = ImageResource('open_folder')
node_manager = Instance('pyface.tree.node_manager.NodeManager')
image = Image(DOCUMENT)
closed_image = Image(CLOSED_FOLDER)
open_image = Image(OPEN_FOLDER)
actions = Any
default_action = Instance(Action)
new_actions = Any
def is_type_for(self, node):
raise NotImplementedError()
def allows_children(self, node):
return False
def get_actions(self, node):
return self.actions
def get_context_menu(self, node):
sat = Group(id='SystemActionsTop')
nsa = Group(id='NodeSpecificActions')
sab = Group(id='SystemActionsBottom')
new_actions = self.get_new_actions(node)
if ((new_actions is not None) and (len(new_actions) > 0)):
sat.append(MenuManager(*new_actions, name='New'))
actions = self.get_actions(node)
if ((actions is not None) and (len(actions) > 0)):
for item in actions:
nsa.append(item)
system_actions = self.node_manager.system_actions
if (len(system_actions) > 0):
for item in system_actions:
sab.append(item)
context_menu = MenuManager(sat, nsa, sab)
context_menu.dump()
return context_menu
def get_copy_value(self, node):
return node
def get_default_action(self, node):
return self.default_action
def get_new_actions(self, node):
return self.new_actions
def get_paste_value(self, node):
return node
def get_monitor(self, node):
return None
def has_children(self, node):
return False
def get_children(self, node):
raise NotImplementedError()
def get_drag_value(self, node):
return node
def can_drop(self, node, data):
return False
def drop(self, obj, data):
raise NotImplementedError()
def get_image(self, node, selected, expanded):
if self.allows_children(node):
if expanded:
order = ['open_image', 'closed_image', 'image']
default = self.OPEN_FOLDER
else:
order = ['closed_image', 'open_image', 'image']
default = self.CLOSED_FOLDER
else:
order = ['image', 'open_image', 'closed_image']
default = self.DOCUMENT
for name in order:
image = getattr(self, name)
if (image is not None):
break
else:
image = default
return image
def get_selection_value(self, node):
return node
def get_text(self, node):
return str(node)
def can_set_text(self, node, text):
return (len(text.strip()) > 0)
def set_text(self, node, text):
pass
def is_collapsible(self, node):
return True
def is_draggable(self, node):
return True
def can_rename(self, node):
return False
def is_editable(self, node):
return self.can_rename(node)
def is_expandable(self, node):
return True |
def extractCosysphereWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Into the World of Medicine', 'Into the World of Medicine', 'translated'), ('MGSSGW', 'Major General Spoils his Soul-guiding Wife', 'translated'), ('Major General Spoils his Soul-guiding Wife', 'Major General Spoils his Soul-guiding Wife', 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def angular(args):
project_path = (args.path or os.getcwd())
sys.path.append(project_path)
report_path = utils.get_report_path(project_path, raise_error=False)
sys.path.append(report_path)
ui_setting_path = os.path.join(report_path, '..', 'ui_settings.py')
(auto_route, install_modules, view_folder, angular_app_path) = (False, False, 'views', None)
if os.path.exists(ui_setting_path):
settings = __import__('ui_settings')
install_modules = settings.INSTALL_MODULES
angular_app_path = settings.ANGULAR_APP_PATH
auto_route = settings.ANGULAR_AUTO_ROUTE
view_folder = settings.ANGULAR_VIEWS_PATH
mod = __import__(args.name, fromlist=['object'])
page = utils.get_page(mod)
app = page.outs.publish(server='angular', app_path=angular_app_path, module='MyModule', selector='mymodule', target_folder=view_folder, auto_route=auto_route)
if install_modules:
(server_path, app_name) = os.path.split(angular_app_path)
app._app_path = server_path
app.cli(app_name).npm(page.imports().requirements) |
.django_db
def test_default(client, naics_test_data):
resp = client.get('/api/v2/references/naics/')
assert (resp.status_code == 200)
assert (len(resp.data['results']) == 3)
expected_data = [{'naics': '11', 'naics_description': 'Agriculture, Forestry, Fishing and Hunting', 'count': 3}, {'naics': '21', 'naics_description': 'Mining, Quarrying, and Oil and Gas Extraction', 'count': 0}, {'naics': '22', 'naics_description': 'Utilities', 'count': 0}]
assert (resp.data['results'] == expected_data) |
class TestGetDefaultStorageConfigs():
(scope='function')
def url(self) -> str:
return (V1_URL_PREFIX + STORAGE_DEFAULT)
def test_get_default_configs_not_authenticated(self, api_client: TestClient, url) -> None:
response = api_client.get(url)
assert (401 == response.status_code)
def test_get_default_configs_wrong_scope(self, api_client: TestClient, url, generate_auth_header) -> None:
auth_header = generate_auth_header([STORAGE_DELETE])
response = api_client.get(url, headers=auth_header)
assert (403 == response.status_code)
def test_get_default_configs(self, db, api_client: TestClient, url, generate_auth_header, storage_config_default: StorageConfig):
auth_header = generate_auth_header([STORAGE_READ])
response = api_client.get(url, headers=auth_header)
assert (200 == response.status_code)
expected_response = {'items': [{'name': storage_config_default.name, 'type': storage_config_default.type.value, 'details': {'auth_method': storage_config_default.details['auth_method'], 'naming': storage_config_default.details['naming'], 'bucket': 'test_bucket'}, 'key': storage_config_default.key, 'format': storage_config_default.format.value, 'is_default': True}], 'page': 1, 'pages': 1, 'size': PAGE_SIZE, 'total': 1}
response_body = json.loads(response.text)
assert (expected_response == response_body) |
def get_args():
parser = argparse.ArgumentParser(description='Debin to hack binaries. This script takes an stripped binary as input and output a binary with predicted debug information.')
parser.add_argument('--binary_with_symtab', dest='binary_with_symtab', type=str, default='', required=True, help='path of the binary (with symbol table, stripped by "strip -g") you want to analyze.')
parser.add_argument('--binary_without_symtab', dest='binary_without_symtab', type=str, default='', required=True, help='path of the binary (without symbol table, stripped by "strip -s") you want to analyze.')
parser.add_argument('--debug_info', dest='debug_info', type=str, default='', required=True, help='Path of the debugging info.')
parser.add_argument('--output', dest='output', type=str, default='', required=True, help='path of output binary.')
parser.add_argument('--bap', dest='bap', type=str, default='', help='path of cached BAP-IR file.')
parser.add_argument('--elf_modifier', dest='elf_modifier', type=str, default='', required=True, help='path of the library for modifying ELF binaries.')
parser.add_argument('-two_pass', dest='two_pass', action='store_true', default=False, help='whether to use two passes (variable classification and structured prediction). Setting it to false only will only invoke structured prediction.')
parser.add_argument('--fp_model', dest='fp_model', type=str, default='', help='Path of the models for the first pass (variable classification).')
parser.add_argument('--n2p_url', dest='n2p_url', type=str, default='', required=True, help='URL of n2p server.')
parser.add_argument('--stat', dest='stat', type=str, default=None, help='Path of output statistics file.')
args = parser.parse_args()
return args |
def get_safety_checker(enable_azure_content_safety, enable_sensitive_topics, enable_salesforce_content_safety, enable_llamaguard_content_safety):
safety_checker = []
if enable_azure_content_safety:
safety_checker.append(AzureSaftyChecker())
if enable_sensitive_topics:
safety_checker.append(AuditNLGSensitiveTopics())
if enable_salesforce_content_safety:
safety_checker.append(SalesforceSafetyChecker())
if enable_llamaguard_content_safety:
safety_checker.append(LlamaGuardSafetyChecker())
return safety_checker |
def _create_js_class(PyClass, JSClass):
mc = MetaCollector(PyClass)
cname = PyClass.__name__
jscode = [mc.py2js(JSClass, cname)]
jscode[0] = jscode[0].replace('}\n', ('}\nvar $%s = %s.prototype;\n' % (cname, cname)), 1).replace(('%s.prototype.' % cname), ('$%s.' % cname))
for (name, val) in sorted(PyClass.__dict__.items()):
nameok = ((name in OK_MAGICS) or (not name.startswith('__')))
if (nameok and (not hasattr(JSClass, name))):
if callable(val):
jscode.append(mc.py2js(val, ((('$' + cname) + '.') + name)))
elif (name in OK_MAGICS):
jscode.append(((((('$' + cname) + '.') + name) + ' = ') + json.dumps(val)))
jscode = '\n'.join(jscode)
if (PyClass is Component):
code = '\n'
for (name, val) in sorted(Reaction.__dict__.items()):
if ((not name.startswith('__')) and callable(val)):
code += (mc.py2js(val, ('reaction.' + name), indent=1)[4:] + '\n')
jscode = jscode.replace('REACTION_METHODS_HOOK', code)
if (PyClass is Loop):
jscode = _clean_code_of_thread_stuff(jscode)
jscode = jscode.replace('new Dict()', '{}').replace('new Dict(', '_pyfunc_dict(')
mc.meta['std_functions'].add('dict')
return mc.attach_meta(jscode) |
class ExceptionHandlers():
def functionality_not_configured_handler(request: Request, exc: FunctionalityNotConfigured) -> JSONResponse:
return JSONResponse(status_code=HTTP_500_INTERNAL_SERVER_ERROR, content={'message': str(exc)})
def get_handlers(cls) -> List[Callable[([Request, FunctionalityNotConfigured], JSONResponse)]]:
return [ExceptionHandlers.functionality_not_configured_handler] |
class OptionPlotoptionsBellcurveSonificationDefaultinstrumentoptionsActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class OptionSeriesPyramid3dSonificationContexttracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
_type(ofproto.OFPPDPT_OPTICAL)
class OFPPortDescPropOptical(OFPPortDescProp):
def __init__(self, type_=None, length=None, supported=None, tx_min_freq_lmda=None, tx_max_freq_lmda=None, tx_grid_freq_lmda=None, rx_min_freq_lmda=None, rx_max_freq_lmda=None, rx_grid_freq_lmda=None, tx_pwr_min=None, tx_pwr_max=None):
self.type = type_
self.length = length
self.supported = supported
self.tx_min_freq_lmda = tx_min_freq_lmda
self.tx_max_freq_lmda = tx_max_freq_lmda
self.tx_grid_freq_lmda = tx_grid_freq_lmda
self.rx_min_freq_lmda = rx_min_freq_lmda
self.rx_max_freq_lmda = rx_max_freq_lmda
self.rx_grid_freq_lmda = rx_grid_freq_lmda
self.tx_pwr_min = tx_pwr_min
self.tx_pwr_max = tx_pwr_max
def parser(cls, buf):
optical = cls()
(optical.type, optical.length, optical.supported, optical.tx_min_freq_lmda, optical.tx_max_freq_lmda, optical.tx_grid_freq_lmda, optical.rx_min_freq_lmda, optical.rx_max_freq_lmda, optical.rx_grid_freq_lmda, optical.tx_pwr_min, optical.tx_pwr_max) = struct.unpack_from(ofproto.OFP_PORT_DESC_PROP_OPTICAL_PACK_STR, buf, 0)
return optical |
.gui()
.parametrize('testcase', [t for t in testcases if (not t.skip)])
def test_normcap_ocr_testcases(monkeypatch, qtbot, testcase, run_normcap, select_region, test_signal):
monkeypatch.setattr(screengrab, 'capture', (lambda : [testcase.screenshot]))
monkeypatch.setattr(sys, 'exit', test_signal.on_event.emit)
tray = run_normcap(extra_cli_args=['--language', 'eng'])
with qtbot.waitSignal(test_signal.on_event) as blocker:
select_region(on=tray.windows[0], pos=testcase.coords)
assert (blocker.args == [0])
capture = tray.capture
assert capture
assert (capture.ocr_magic in testcase.expected_ocr_magics), (f'testcase.image_path.name={testcase.image_path.name!r}', f'capture.ocr_text={capture.ocr_text!r}', f'testcase.expected_ocr_text={testcase.expected_ocr_text!r}')
similarity = SequenceMatcher(None, capture.ocr_text, testcase.expected_ocr_text).ratio()
assert (similarity >= testcase.expected_similarity), (f'testcase.image_path.name={testcase.image_path.name!r}', f'capture.ocr_text={capture.ocr_text!r}', f'testcase.expected_ocr_text={testcase.expected_ocr_text!r}') |
_FrrZebraMessageBody.register_type(FRR_ZEBRA_INTERFACE_VRF_UPDATE)
class ZebraInterfaceVrfUpdate(_ZebraMessageBody):
_HEADER_FMT = '!IH'
def __init__(self, ifindex, vrf_id):
super(ZebraInterfaceVrfUpdate, self).__init__()
self.ifindex = ifindex
self.vrf_id = vrf_id
def parse(cls, buf, version=_DEFAULT_FRR_VERSION):
(ifindex, vrf_id) = struct.unpack_from(cls._HEADER_FMT, buf)
return cls(ifindex, vrf_id)
def serialize(self, version=_DEFAULT_FRR_VERSION):
return struct.pack(self._HEADER_FMT, self.ifindex, self.vrf_id) |
def add_subtitles(frame: Arr, display_text: str, full_text: str, speaker: int, location: str, row: int, border_h: int, picture_h: int, font_family: str='dejavu'):
frame_w = frame.shape[1]
text_h = int((0.33 * border_h))
offset = int((0.05 * border_h))
font_path = FONTS[font_family]
font = ImageFont.truetype(font_path, text_h)
text_w = font.getlength(full_text)
text_w = min(text_w, frame_w)
if (location == 'top'):
y = ((offset + (text_h * row)) + (offset * row))
elif (location == 'bottom'):
y = ((((border_h + picture_h) + offset) + (text_h * row)) + (offset * row))
else:
raise ValueError(location)
color = COLORS_RGB[speaker]
x = max(10, int(((frame_w // 2) - (text_w // 2))))
pos = (x, y)
img = Image.fromarray(frame)
draw = ImageDraw.Draw(img)
draw.text(pos, display_text, color, font=font)
return np.array(img) |
class TestModelEval(unittest.TestCase):
def setUp(self) -> None:
return
def test_model_graded(self):
kwargs = TEST_DATA['ModelGraded']
model_grader = ModelGraded()
result = model_grader.check(**kwargs)
grade = [r[0] for r in result]
assert (sum(grade) == 4), 'Expected exactly 4/5 grades to be correct.'
return
def test_similar_generation(self):
kwargs = TEST_DATA['SimilarGeneration']
sent_xfmer = SentenceTransformer('sentence-transformers/paraphrase-mpnet-base-v2')
similar_generation = SimilarGeneration(similarity_model=sent_xfmer, similarity_threshold=0.95)
result = similar_generation.check(**kwargs)
grade = [r[0] for r in result]
assert (sum(grade) == 1), 'Expected exactly 1/2 result to be correct.'
return
def test_valid_url(self):
kwargs = TEST_DATA['ValidURL']
url_check = ValidURL()
result = url_check.check(**kwargs)
grade = [r[0] for r in result]
assert (sum(grade) == 1), 'Expected exactly 1/2 result to be invalid.'
return
def test_toxicity(self):
kwargs = TEST_DATA['Toxicity']
toxicity_check = Toxicity(threshold=0.6)
result = toxicity_check.check(**kwargs)
grade = [r[0] for r in result]
assert (sum(grade) == 1), 'Expected exactly 1/2 result to be toxic.'
return |
class MOPAC(Calculator):
'
conf_key = 'mopac'
MULT_STRS = {1: 'SINGLET', 2: 'DOUBLET', 3: 'TRIPLET', 4: 'QUARTET', 5: 'QUINTET', 6: 'SEXTET', 7: 'SEPTET', 8: 'OCTET'}
CALC_TYPES = {'energy': '1SCF', 'gradient': '1SCF GRADIENTS', 'hessian': 'DFORCE FORCE LET'}
METHODS = [m.lower() for m in 'AM1 PM3 PM6 PM6-DH2 PM6-D3 PM6-DH+ PM6-DH2 PM6-DH2X PM6-D3H4 PM6-D3H4X PM7 PM7-TS'.split()]
def __init__(self, method='PM7', **kwargs):
super().__init__(**kwargs)
self.method = method
assert (self.method.lower() in self.METHODS), f'Invalid method={self.method}! Supported methods are ({self.METHODS})'
self.uhf = ('UHF' if (self.mult != 1) else '')
_ = 'mopac'
self.inp_fn = f'{_}.mop'
self.out_fn = f'{_}.out'
self.aux_fn = f'{_}.aux'
self.to_keep = ('mop', 'out', 'arc', 'aux')
self.parser_funcs = {'energy': self.parse_energy, 'grad': self.parse_grad, 'hessian': self.parse_hessian}
self.base_cmd = self.get_cmd()
self.inp = textwrap.dedent('\n NOSYM {method} {mult} CHARGE={charge} {calc_type} {uhf} THREADS={pal} AUX(6,PRECISION=9) NOREOR\n\n \n {coord_str}\n ').strip()
self.log(f"Created MOPAC calculator using the '{self.method}' method.")
def prepare_coords(self, atoms, coords, opt=False):
coords = (coords.reshape((- 1), 3) * BOHR2ANG)
of = (1 if opt else 0)
coord_str = '\n'.join([f'{a} {c[0]: 10.08f} {of} {c[1]: 10.08f} {of} {c[2]: 10.08f} {of}' for (a, c) in zip(atoms, coords)])
return coord_str
def prepare_input(self, atoms, coords, calc_type, opt=False):
coord_str = self.prepare_coords(atoms, coords, opt)
inp = self.inp.format(method=self.method, charge=self.charge, mult=self.MULT_STRS[self.mult], uhf=self.uhf, calc_type=self.CALC_TYPES[calc_type], coord_str=coord_str, pal=self.pal)
return inp
def get_energy(self, atoms, coords, **prepare_kwargs):
calc_type = 'energy'
inp = self.prepare_input(atoms, coords, calc_type)
results = self.run(inp, calc='energy')
return results
def get_forces(self, atoms, coords, **prepare_kwargs):
calc_type = 'gradient'
inp = self.prepare_input(atoms, coords, calc_type, opt=True)
results = self.run(inp, calc='grad')
return results
def get_hessian(self, atoms, coords, **prepare_kwargs):
calc_type = 'hessian'
inp = self.prepare_input(atoms, coords, calc_type, opt=True)
results = self.run(inp, calc='hessian')
return results
def run_calculation(self, atoms, coords, **prepare_kwargs):
return self.get_energy(atoms, coords, **prepare_kwargs)
def read_aux(self, path):
with open((path / self.aux_fn)) as handle:
text = handle.read()
return text
def parse_energy(self, path):
return self.parse_energy_from_aux(self.read_aux(path))
_or_str('.aux', method=False)
def parse_energy_from_aux(text):
energy_re = 'HEAT_OF_FORMATION:KCAL/MOL=([\\d\\-D+\\.]+)'
mobj = re.search(energy_re, text)
energy = (float(mobj[1].replace('D', 'E')) / AU2KCALPERMOL)
result = {'energy': energy}
return result
def parse_grad(self, path):
text = self.read_aux(path)
grad_re = 'GRADIENTS:KCAL/MOL/ANGSTROM\\[\\d+]=\\s+(.+)\\s+OVERLAP_MATRIX'
mobj = re.search(grad_re, text, re.DOTALL)
gradients = np.array(mobj[1].split(), dtype=float)
gradients = ((gradients / AU2KCALPERMOL) / BOHR2ANG)
forces = (- gradients)
result = {'forces': forces}
result.update(self.parse_energy(path))
return result
def parse_hessian(self, path):
return self.parse_hessian_from_aux(self.read_aux(path))
_or_str('.aux', method=False)
def parse_hessian_from_aux(text):
mass_re = re.compile('ISOTOPIC_MASSES\\[(\\d+)\\]=\\s*(.+?)ROTAT_CONSTS', re.DOTALL)
mass_mobj = mass_re.search(text)
masses = np.array(mass_mobj[2].strip().split(), dtype=float)
M = np.diag(np.sqrt(np.repeat(masses, 3)))
coord_num = (masses.size * 3)
hess_re = ' # Lower half triangle only\\s+([\\s\\.\\-\\d]+)\\s+NORMAL_MODE'
tril_hess = re.search(hess_re, text)[1].strip().split()
tril_hess = np.array(tril_hess, dtype=float)
assert (tril_hess.size == sum(range((coord_num + 1))))
hessian_m = np.zeros((coord_num, coord_num))
tril_indices = np.tril_indices(coord_num)
hessian_m[tril_indices] = tril_hess
triu_indices = np.triu_indices(coord_num, k=1)
hessian_m[triu_indices] = hessian_m.T[triu_indices]
hessian = ((M hessian_m) M)
hessian *= 0.06423
energy = MOPAC.parse_energy_from_aux(text)['energy']
result = {'energy': energy, 'hessian': hessian}
return result
def __str__(self):
return f'MOPAC({self.name})' |
def create_or_update_parent_user() -> None:
with sync_session() as db_session:
if ((not CONFIG.security.parent_server_username) and (not CONFIG.security.parent_server_password)):
return
if ((CONFIG.security.parent_server_username and (not CONFIG.security.parent_server_password)) or (CONFIG.security.parent_server_password and (not CONFIG.security.parent_server_username))):
log.error('Both a parent_server_user and parent_server_password must be set to create a parent server user')
raise ValueError('Both a parent_server_user and parent_server_password must be set to create a parent server user')
user = (FidesUser.get_by(db_session, field='username', value=CONFIG.security.parent_server_username) if CONFIG.security.parent_server_username else None)
if (user and CONFIG.security.parent_server_password):
if (not user.credentials_valid(CONFIG.security.parent_server_password)):
log.debug('Updating Fides parent user credentials')
user.update_password(db_session, CONFIG.security.parent_server_password)
return
return
log.debug('Creating Fides parent user credentials')
user = FidesUser.create(db=db_session, data={'username': CONFIG.security.parent_server_username, 'password': CONFIG.security.parent_server_password})
FidesUserPermissions.create(db=db_session, data={'user_id': user.id, 'roles': [OWNER]}) |
class TaskModelFitting(sl.Task):
def __init__(self, *args, **kwargs):
super(sl.Task, self).__init__(*args, **kwargs)
self.in_first = None
self.in_second = None
def actual_task_code(self, task_model_first, task_df_first):
return task_model_first
def out_first(self):
return sl.TargetInfo(self, ((_paths_folder + self.task_id) + '_output_path_1.txt'))
def run(self):
time.sleep(_sleep_duration)
task_model_first = read_model(self.in_first().open().read())
task_df_first = read_result(self.in_second().open().read())
model_first = self.actual_task_code(task_model_first, task_df_first)
path1 = write_model(((_models_folder + self.task_id) + '_output_model_1.model'), model_first)
write_output_path(((_paths_folder + self.task_id) + '_output_path_1.txt'), path1) |
def amazon_financial_parser_formatter(pages: List[dict]) -> FinancialParserDataClass:
extracted_data = []
for page in pages:
if (page.get('JobStatus') == 'FAILED'):
raise ProviderException(page.get('StatusMessage', 'Amazon returned a job status: FAILED'))
for invoice in (page.get('ExpenseDocuments') or []):
summary = {}
currencies = {}
invoice_index = invoice['ExpenseIndex']
for field in invoice['SummaryFields']:
field_type = field['Type']['Text']
summary[field_type] = field['ValueDetection']['Text']
field_currency = field.get('Currency', {}).get('Code')
if (field_currency is not None):
if (field_currency not in currencies):
currencies[field_currency] = 1
else:
currencies[field_currency] += 1
page_number = field['PageNumber']
item_lines = []
for line_item_group in invoice['LineItemGroups']:
for fields in line_item_group['LineItems']:
parsed_items = {item['Type']['Text']: item['ValueDetection']['Text'] for item in fields['LineItemExpenseFields']}
item_lines.append(FinancialLineItem(amount_line=convert_string_to_number(parsed_items.get('PRICE'), float), description=parsed_items.get('ITEM'), quantity=convert_string_to_number(parsed_items.get('QUANTITY'), int), unit_price=convert_string_to_number(parsed_items.get('UNIT_PRICE'), float), product_code=parsed_items.get('PRODUCT_CODE')))
customer = FinancialCustomerInformation(name=(summary.get('RECEIVER_NAME') or summary.get('NAME')), id_reference=summary.get('ID_REFERENCE'), mailing_address=summary.get('RECEIVER_ADDRESS'), remittance_address=summary.get('ADDRESS'), phone=summary.get('RECEIVER_PHONE'), vat_number=summary.get('RECEIVER_VAT_NUMBER'), abn_number=summary.get('RECEIVER_ABN_NUMBER'), gst_number=summary.get('RECEIVER_GST_NUMBER'), pan_number=summary.get('RECEIVER_PAN_NUMBER'), customer_number=summary.get('CUSTOMER_NUMBER'), tax_id=summary.get('TAX_PAYER_ID'))
merchant = FinancialMerchantInformation(name=summary.get('VENDOR_NAME'), address=summary.get('VENDOR_ADDRESS'), phone=summary.get('VENDOR_PHONE'), vat_number=summary.get('VENDOR_VAT_NUMBER'), abn_number=summary.get('VENDOR_ABN_NUMBER'), gst_number=summary.get('VENDOR_GST_NUMBER'), pan_number=summary.get('VENDOR_PAN_NUMBER'), website=summary.get('VENDOR_URL'), city=summary.get('CITY'), country=summary.get('COUNTRY'), province=summary.get('STATE'), zip_code=summary.get('ZIP_CODE'))
payment = FinancialPaymentInformation(amount_due=convert_string_to_number(summary.get('AMOUNT_DUE'), float), amount_paid=convert_string_to_number(summary.get('AMOUNT_PAID'), float), total=convert_string_to_number(summary.get('TOTAL'), float), subtotal=convert_string_to_number(summary.get('SUB_TOTAL'), float), service_charge=convert_string_to_number(summary.get('SERVICE_CHARGE'), float), payment_terms=summary.get('PAYMENT_TERMS'), shipping_handling_charge=convert_string_to_number(summary.get('SHIPPING_HANDLING_CHARGE'), float), prior_balance=convert_string_to_number(summary.get('PRIOR_BALANCE'), float), gratuity=convert_string_to_number(summary.get('GRATUITY'), float), discount=convert_string_to_number(summary.get('DISCOUNT'), float), total_tax=convert_string_to_number(summary.get('TAX'), float))
financial_document_information = FinancialDocumentInformation(invoice_receipt_id=summary.get('INVOICE_RECEIPT_ID'), purchase_order=summary.get('PO_NUMBER'), invoice_date=summary.get('INVOICE_RECEIPT_DATE'), invoice_due_date=summary.get('DUE_DATE'), order_date=summary.get('ORDER_DATE'))
invoice_currency = None
if (len(currencies) == 1):
invoice_currency = list(currencies.keys())[0]
elif (len(currencies) > 1):
invoice_currency = max(currencies, key=currencies.get)
local = FinancialLocalInformation(currency=invoice_currency)
bank = FinancialBankInformation(account_number=summary.get('ACCOUNT_NUMBER'))
document_metadata = FinancialDocumentMetadata(document_index=invoice_index, document_page_number=page_number)
financial_document = FinancialParserObjectDataClass(customer_information=customer, merchant_information=merchant, payment_information=payment, financial_document_information=financial_document_information, local=local, bank=bank, item_lines=item_lines, document_metadata=document_metadata)
extracted_data.append(financial_document)
return FinancialParserDataClass(extracted_data=extracted_data) |
def mock_audit_events_that_modify_usergroup_info_are_collected_pass(self, cmd):
stdout = ['-w /etc/group -p wa -k identity', '-w /etc/passwd -p wa -k identity', '-w /etc/gshadow -p wa -k identity', '-w /etc/shadow -p wa -k identity', '-w /etc/security/opasswd -p wa -k identity']
stderr = ['']
returncode = 0
return SimpleNamespace(returncode=returncode, stderr=stderr, stdout=stdout) |
def test_get_llm_model_answer_with_system_prompt(config, mocker):
config.system_prompt = 'Custom system prompt'
mocked_get_answer = mocker.patch('embedchain.llm.jina.JinaLlm._get_answer', return_value='Test answer')
llm = JinaLlm(config)
answer = llm.get_llm_model_answer('Test query')
assert (answer == 'Test answer')
mocked_get_answer.assert_called_once_with('Test query', config) |
('pyscf')
def test_h2o2_relaxed_scan_symmetric():
steps = 3
run_dict = {'geom': {'type': 'redund', 'fn': 'lib:h2o2_hf_321g_opt.xyz'}, 'calc': {'type': 'pyscf', 'pal': 2, 'basis': '321g', 'verbose': 0}, 'scan': {'type': 'BOND', 'indices': [2, 3], 'steps': 3, 'step_size': 0.2, 'symmetric': True, 'opt': {'thresh': 'gau'}}}
results = run_from_dict(run_dict)
assert (len(results.scan_geoms) == 7) |
def test_incompatible_interface(newproject):
with newproject._path.joinpath('contracts/Foo.vy').open('w') as fp:
fp.write(CONTRACT)
with newproject._path.joinpath('interfaces/Bar.vy').open('w') as fp:
fp.write(INTERFACE)
with newproject._path.joinpath('interfaces/Baz.sol').open('w') as fp:
fp.write('pragma solidity ^0.4.0; interface X { function baz() external returns (bool); }')
newproject.load() |
def test_generate_all_of_a_layout_of_an_environment(create_test_data, store_local_session, create_pymel, create_maya_env):
data = create_test_data
gen = RepresentationGenerator()
gen.version = data['prop1_model_kisa_v003']
gen.generate_all()
gen.version = data['prop1_look_dev_kisa_v003']
gen.generate_all()
gen.version = data['building1_yapi_model_main_v003']
gen.generate_all()
gen.version = data['building1_yapi_look_dev_main_v003']
gen.generate_all()
gen.version = data['building1_layout_main_v003']
gen.generate_all()
gen = RepresentationGenerator(version=data['building2_yapi_model_main_v003'])
gen.generate_all()
gen.version = data['building2_yapi_look_dev_main_v003']
gen.generate_all()
gen.version = data['building2_layout_main_v003']
gen.generate_all()
gen.version = data['ext1_vegetation_main_v003']
gen.generate_all()
gen.version = data['ext1_layout_main_v003']
gen.generate_all()
r = Representation(version=data['ext1_layout_main_v003'])
v_gpu = r.find('GPU')
v_ass = r.find('ASS')
v_rs = r.find('RS')
assert (v_gpu is None)
assert (v_ass is None)
assert (v_rs is not None) |
class OptionSeriesAreasplineSonificationDefaultinstrumentoptionsMappingHighpass(Options):
def frequency(self) -> 'OptionSeriesAreasplineSonificationDefaultinstrumentoptionsMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionSeriesAreasplineSonificationDefaultinstrumentoptionsMappingHighpassFrequency)
def resonance(self) -> 'OptionSeriesAreasplineSonificationDefaultinstrumentoptionsMappingHighpassResonance':
return self._config_sub_data('resonance', OptionSeriesAreasplineSonificationDefaultinstrumentoptionsMappingHighpassResonance) |
def _build_global_measures_url(measure_id=None, measure_ids=None, tags=None):
params = {'format': 'json'}
if (measure_id is not None):
params['measure'] = measure_id
if (measure_ids is not None):
params['measure'] = ','.join(measure_ids)
if (tags is not None):
params['tags'] = ','.join(tags)
return _build_api_url('measure', params) |
def test_insert(db):
res = db.Standard.insert(foo='test1', bar='test2')
assert isinstance(res, int)
assert res.id
assert (res.foo == 'test1')
assert (res.bar == 'test2')
res = db.CustomType.insert(id='test1', foo='test2', bar='test3')
assert isinstance(res, str)
assert (res.id == 'test1')
assert (res.foo == 'test2')
assert (res.bar == 'test3')
res = db.CustomName.insert(foo='test1', bar='test2')
assert isinstance(res, str)
assert (not res.id)
assert (res.foo == 'test1')
assert (res.bar == 'test2')
res = db.CustomMulti.insert(foo='test1', bar='test2', baz='test3')
assert isinstance(res, tuple)
assert (not res.id)
assert (res.foo == 'test1')
assert (res.bar == 'test2')
assert (res.baz == 'test3') |
_deserializable
class ChromaDB(BaseVectorDB):
BATCH_SIZE = 100
def __init__(self, config: Optional[ChromaDbConfig]=None):
if config:
self.config = config
else:
self.config = ChromaDbConfig()
self.settings = Settings(anonymized_telemetry=False)
self.settings.allow_reset = (self.config.allow_reset if hasattr(self.config, 'allow_reset') else False)
if self.config.chroma_settings:
for (key, value) in self.config.chroma_settings.items():
if hasattr(self.settings, key):
setattr(self.settings, key, value)
if (self.config.host and self.config.port):
logging.info(f'Connecting to ChromaDB server: {self.config.host}:{self.config.port}')
self.settings.chroma_server_host = self.config.host
self.settings.chroma_server_ = self.config.port
self.settings.chroma_api_impl = 'chromadb.api.fastapi.FastAPI'
else:
if (self.config.dir is None):
self.config.dir = 'db'
self.settings.persist_directory = self.config.dir
self.settings.is_persistent = True
self.client = chromadb.Client(self.settings)
super().__init__(config=self.config)
def _initialize(self):
if (not self.embedder):
raise ValueError('Embedder not set. Please set an embedder with `_set_embedder()` function before initialization.')
self._get_or_create_collection(self.config.collection_name)
def _get_or_create_db(self):
return self.client
def _generate_where_clause(self, where: Dict[(str, any)]) -> str:
if (len(where.keys()) <= 1):
return where
where_filters = []
for (k, v) in where.items():
if isinstance(v, str):
where_filters.append({k: v})
return {'$and': where_filters}
def _get_or_create_collection(self, name: str) -> Collection:
if ((not hasattr(self, 'embedder')) or (not self.embedder)):
raise ValueError('Cannot create a Chroma database collection without an embedder.')
self.collection = self.client.get_or_create_collection(name=name, embedding_function=self.embedder.embedding_fn)
return self.collection
def get(self, ids: Optional[List[str]]=None, where: Optional[Dict[(str, any)]]=None, limit: Optional[int]=None):
args = {}
if ids:
args['ids'] = ids
if where:
args['where'] = self._generate_where_clause(where)
if limit:
args['limit'] = limit
return self.collection.get(**args)
def add(self, embeddings: List[List[float]], documents: List[str], metadatas: List[object], ids: List[str], **kwargs: Optional[Dict[(str, Any)]]) -> Any:
size = len(documents)
if ((len(documents) != size) or (len(metadatas) != size) or (len(ids) != size)):
raise ValueError('Cannot add documents to chromadb with inconsistent sizes. Documents size: {}, Metadata size: {}, Ids size: {}'.format(len(documents), len(metadatas), len(ids)))
for i in tqdm(range(0, len(documents), self.BATCH_SIZE), desc='Inserting batches in chromadb'):
self.collection.add(documents=documents[i:(i + self.BATCH_SIZE)], metadatas=metadatas[i:(i + self.BATCH_SIZE)], ids=ids[i:(i + self.BATCH_SIZE)])
def _format_result(self, results: QueryResult) -> list[tuple[(Document, float)]]:
return [(Document(page_content=result[0], metadata=(result[1] or {})), result[2]) for result in zip(results['documents'][0], results['metadatas'][0], results['distances'][0])]
def query(self, input_query: List[str], n_results: int, where: Dict[(str, any)], citations: bool=False, **kwargs: Optional[Dict[(str, Any)]]) -> Union[(List[Tuple[(str, Dict)]], List[str])]:
try:
result = self.collection.query(query_texts=[input_query], n_results=n_results, where=self._generate_where_clause(where), **kwargs)
except InvalidDimensionException as e:
raise InvalidDimensionException((e.message() + '. This is commonly a side-effect when an embedding function, different from the one used to add the embeddings, is used to retrieve an embedding from the database.')) from None
results_formatted = self._format_result(result)
contexts = []
for result in results_formatted:
context = result[0].page_content
if citations:
metadata = result[0].metadata
metadata['score'] = result[1]
contexts.append((context, metadata))
else:
contexts.append(context)
return contexts
def set_collection_name(self, name: str):
if (not isinstance(name, str)):
raise TypeError('Collection name must be a string')
self.config.collection_name = name
self._get_or_create_collection(self.config.collection_name)
def count(self) -> int:
return self.collection.count()
def delete(self, where):
return self.collection.delete(where=self._generate_where_clause(where))
def reset(self):
try:
self.client.delete_collection(self.config.collection_name)
except ValueError:
raise ValueError('For safety reasons, resetting is disabled. Please enable it by setting `allow_reset=True` in your ChromaDbConfig') from None
self._get_or_create_collection(self.config.collection_name) |
def url_to_destination_resource(url: str) -> str:
parts = urllib.parse.urlsplit(url)
hostname = (parts.hostname if parts.hostname else '')
if ('://[' in url):
hostname = ('[%s]' % hostname)
try:
port = parts.port
except ValueError:
port = None
default_port = default_ports.get(parts.scheme, None)
name = ('%s://%s' % (parts.scheme, hostname))
resource = hostname
if ((not port) and (parts.scheme in default_ports)):
port = default_ports[parts.scheme]
if port:
if (port != default_port):
name += (':%d' % port)
resource += (':%d' % port)
return resource |
def test_build_es_linux_aarch64_output():
def run_command(target_os, target_arch):
try:
output = process.run_subprocess_with_output(f'esrally build --revision=latest --target-arch {target_arch} --target-os {target_os} --quiet')
print(output)
elasticsearch = json.loads(''.join(output))['elasticsearch']
assert (f'{target_os}-{target_arch}' in elasticsearch)
except BaseException as e:
raise AssertionError(f'Failed to build Elasticsearch for [{target_os}, {target_arch}].', e)
run_command('linux', 'aarch64')
run_command('linux', 'x86_64') |
class GroupItem(QStandardItem):
ITEM_TYPE = (Qt.UserRole + 25)
def __init__(self, name, parent=None, has_remote_launched_nodes=False, is_group=False):
dname = name
if (dname.rfind('') <= 0):
if is_group:
dname = (('{' + dname) + '}')
else:
dname = (dname + '/')
QStandardItem.__init__(self, dname)
self.parent_item = parent
self._name = name
self.setIcon(nm.settings().icon('state_off.png'))
self.descr_type = self.descr_name = self.descr = ''
self.descr_images = []
self._capcabilities = dict()
self._has_remote_launched_nodes = has_remote_launched_nodes
self._remote_launched_nodes_updated = False
self._re_cap_nodes = dict()
self._is_group = is_group
self._state = NodeItem.STATE_OFF
self.diagnostic_level = 0
self.is_system_group = (name == 'SYSTEM')
self._clearup_mark_delete = False
def name(self):
return self._name
def name(self, new_name):
self._name = new_name
if self._is_group:
self.setText((('{' + self._name) + '}'))
else:
self.setText((self._name + '/'))
def state(self):
return self._state
def is_group(self):
return self._is_group
def cfgs(self):
return self.get_configs()
def get_namespace(self):
name = self._name
if (type(self) == HostItem):
name = rospy.names.SEP
elif ((type(self) == GroupItem) and self._is_group):
name = namespace(self._name)
result = name
if (self.parent_item is not None):
result = (normns((self.parent_item.get_namespace() + rospy.names.SEP)) + normns((result + rospy.names.SEP)))
return normns(result)
def count_nodes(self):
result = 0
for i in range(self.rowCount()):
item = self.child(i)
if isinstance(item, GroupItem):
result += item.count_nodes()
elif isinstance(item, NodeItem):
result += 1
return result
def is_in_cap_group(self, nodename, config, ns, groupname):
try:
if self._re_cap_nodes[(config, ns, groupname)].match(nodename):
return True
except Exception:
pass
return False
def _create_cap_nodes_pattern(self, config, cap):
for (ns, groups) in cap.items():
for (groupname, descr) in groups.items():
try:
nodes = descr['nodes']
def_list = [(('\\A' + n.strip().replace('*', '.*')) + '\\Z') for n in nodes]
if def_list:
self._re_cap_nodes[(config, ns, groupname)] = re.compile('|'.join(def_list), re.I)
else:
self._re_cap_nodes[(config, ns, groupname)] = re.compile('\x08', re.I)
except Exception:
rospy.logwarn(('create_cap_nodes_pattern: %s' % traceback.format_exc(1)))
def add_capabilities(self, config, capabilities, masteruri):
self._capcabilities[config] = capabilities
self._create_cap_nodes_pattern(config, capabilities)
for (ns, groups) in capabilities.items():
for (group, descr) in groups.items():
group_changed = False
nodes = descr['nodes']
if nodes:
groupItem = self.get_group_item(roslib.names.ns_join(ns, group), nocreate=False)
groupItem.descr_name = group
if descr['type']:
groupItem.descr_type = descr['type']
if descr['description']:
groupItem.descr = descr['description']
if descr['images']:
groupItem.descr_images = list(descr['images'])
group_changed = self.move_nodes2group(groupItem, config, ns, group, self)
for node_name in nodes:
if (not re.search('\\*', node_name)):
items = groupItem.get_node_items_by_name(node_name)
if items:
for item in items:
item.add_config(config)
group_changed = True
else:
items = self.get_node_items_by_name(node_name)
if items:
groupItem.add_node(items[0].node_info, config)
elif config:
groupItem.add_node(NodeInfo(node_name, masteruri), config)
group_changed = True
if group_changed:
groupItem.update_displayed_config()
groupItem.updateIcon()
def move_nodes2group(self, group_item, config, ns, groupname, host_item):
self_changed = False
group_changed = False
for i in reversed(range(self.rowCount())):
item = self.child(i)
if isinstance(item, NodeItem):
if host_item.is_in_cap_group(item.name, config, ns, groupname):
row = self.takeRow(i)
group_item._add_row_sorted(row)
group_changed = True
elif (isinstance(item, GroupItem) and (not item.is_group)):
group_changed = item.move_nodes2group(group_item, config, ns, groupname, host_item)
if self_changed:
self.update_displayed_config()
self.updateIcon()
return group_changed
def rem_capablities(self, config):
try:
del self._capcabilities[config]
except Exception:
pass
else:
pass
def get_capability_groups(self, node_name):
result = dict()
try:
for (cfg, cap) in self._capcabilities.items():
for (ns, groups) in cap.items():
for (group, _) in groups.items():
if self.is_in_cap_group(node_name, cfg, ns, group):
if (cfg not in result):
result[cfg] = []
result[cfg].append(roslib.names.ns_join(ns, group))
except Exception:
pass
return result
def exists_capability_group(self, ns, group_name):
try:
if (type(self) == HostItem):
if (len(ns) > 1):
ns = ns.rstrip(rospy.names.SEP)
for (_cfg, cap) in self._capcabilities.items():
for (gns, groups) in cap.items():
for (group, _decription) in groups.items():
if ((ns == gns) and (group == group_name)):
return True
elif (self.parent_item is not None):
return self.parent_item.exists_capability_group(ns, group_name)
except Exception:
pass
return False
def clear_multiple_screens(self):
for i in range(self.rowCount()):
item = self.child(i)
if isinstance(item, GroupItem):
item.clear_multiple_screens()
elif isinstance(item, NodeItem):
item.has_multiple_screens = False
def get_node_items_by_name(self, node_name, recursive=True):
result = []
for i in range(self.rowCount()):
item = self.child(i)
if isinstance(item, GroupItem):
if recursive:
result[len(result):] = item.get_node_items_by_name(node_name)
elif (isinstance(item, NodeItem) and (item == node_name)):
return [item]
return result
def get_node_items_by_cfg(self, cfg):
result = []
for i in range(self.rowCount()):
item = self.child(i)
if isinstance(item, GroupItem):
result[len(result):] = item.get_node_items_by_cfg(cfg)
elif isinstance(item, NodeItem):
if (cfg in item.cfgs):
result.append(item)
return result
def get_node_items(self, recursive=True):
result = []
for i in range(self.rowCount()):
item = self.child(i)
if isinstance(item, GroupItem):
if recursive:
result[len(result):] = item.get_node_items()
elif isinstance(item, NodeItem):
result.append(item)
return result
def get_group_item(self, group_name, is_group=True, nocreate=False):
(lns, rns) = (group_name, '')
if nm.settings().group_nodes_by_namespace:
(lns, rns) = lnamespace(group_name)
if ((lns == rospy.names.SEP) and (type(self) == HostItem)):
(lns, rns) = lnamespace(rns)
if (lns == rospy.names.SEP):
return self
for i in range(self.rowCount()):
item = self.child(i)
if isinstance(item, GroupItem):
if ((item == lns) and (not item._clearup_mark_delete)):
if rns:
return item.get_group_item(rns, is_group, nocreate)
return item
elif ((item > lns) and (not nocreate)):
items = []
newItem = GroupItem(lns, self, is_group=(is_group and (not rns)))
items.append(newItem)
cfgitem = CellItem(group_name, newItem)
items.append(cfgitem)
self.insertRow(i, items)
if rns:
return newItem.get_group_item(rns, is_group, nocreate)
return newItem
if nocreate:
return None
items = []
newItem = GroupItem(lns, self, is_group=(is_group and (not rns)))
items.append(newItem)
cfgitem = CellItem(group_name, newItem)
items.append(cfgitem)
self.appendRow(items)
if rns:
return newItem.get_group_item(rns, is_group, nocreate)
return newItem
def add_node(self, node, cfg=None):
groups = self.get_capability_groups(node.name)
if groups:
for (_, group_list) in groups.items():
for group_name in group_list:
groupItem = self.get_group_item(group_name, is_group=True)
groupItem.add_node(node, cfg)
else:
group_item = self
if (type(group_item) == HostItem):
group_item = self.get_group_item(namespace(node.name), is_group=False)
new_item_row = NodeItem.newNodeRow(node.name, node.masteruri)
group_item._add_row_sorted(new_item_row)
new_item_row[0].set_node_info(node)
if (cfg or (cfg == '')):
new_item_row[0].add_config(cfg)
group_item.updateIcon()
def _add_row_sorted(self, row):
for i in range(self.rowCount()):
item = self.child(i)
if (item > row[0].name):
self.insertRow(i, row)
row[0].parent_item = self
return
self.appendRow(row)
row[0].parent_item = self
def clearup(self, fixed_node_names=None):
self._clearup(fixed_node_names)
self._mark_groups_to_delete()
self._remove_marked_groups()
def _clearup(self, fixed_node_names=None):
removed = False
for i in reversed(range(self.rowCount())):
item = self.child(i)
if isinstance(item, NodeItem):
if (fixed_node_names is not None):
if (item.name not in fixed_node_names):
item.set_node_info(NodeInfo(item.name, item.node_info.masteruri))
if (not (item.has_configs() or item.is_running() or item.published or item.subscribed or item.services)):
removed = True
self._remove_row(i)
else:
removed = (item._clearup(fixed_node_names) or removed)
if ((self.rowCount() == 0) and (self.parent_item is not None)):
self.parent_item._remove_group(self.name)
elif removed:
self.updateIcon()
return removed
def _mark_groups_to_delete(self):
for i in range(self.rowCount()):
item = self.child(i)
if isinstance(item, NodeItem):
if (not isinstance(self, HostItem)):
if (self.parent_item is not None):
if self.is_group:
if (not self.exists_capability_group(self.parent_item.get_namespace(), self.name)):
self._clearup_mark_delete = True
elif (self.rowCount() == 1):
self._clearup_mark_delete = True
else:
item._mark_groups_to_delete()
if (self.rowCount() == 1):
self._clearup_mark_delete = item._clearup_mark_delete
def _remove_marked_groups(self):
rows2add = []
for i in reversed(range(self.rowCount())):
item = self.child(i)
if isinstance(item, GroupItem):
if item._clearup_mark_delete:
rows = self._take_node_rows(item)
if rows:
rows2add = (rows2add + rows)
self._remove_row(i)
else:
item._remove_marked_groups()
for row in rows2add:
self._add_row_sorted(row)
self.updateIcon()
def _take_node_rows(self, group):
result = []
for i in reversed(range(group.rowCount())):
item = group.child(i)
if isinstance(item, NodeItem):
result.append(group.takeRow(i))
else:
result = (result + item._take_node_rows(item))
return result
def _remove_group(self, name):
for i in range(self.rowCount()):
item = self.child(i)
if ((type(item) == GroupItem) and (item == name) and (item.rowCount() == 0)):
self._remove_row(i)
return
def _remove_row(self, index):
item = self.child(index)
item.parent_item = None
try:
cellitem = self.child(index, 1)
cellitem.parent_item = None
cellitem.item = None
except Exception as e:
rospy.logdebug_throttle(10, utf8(e))
self.removeRow(index)
def reset_remote_launched_nodes(self):
self._remote_launched_nodes_updated = False
def remote_launched_nodes_updated(self):
if self._has_remote_launched_nodes:
return self._remote_launched_nodes_updated
return True
def update_running_state(self, nodes, create_nodes=True):
updated_nodes = []
if isinstance(nodes, dict):
for (name, node) in nodes.items():
items = self.get_node_items_by_name(name)
if items:
for item in items:
run_changed = item.set_node_info(node)
if run_changed:
updated_nodes.append(node)
elif create_nodes:
self.add_node(node)
updated_nodes.append(node)
if self._has_remote_launched_nodes:
self._remote_launched_nodes_updated = True
self.clearup(list(nodes.keys()))
elif isinstance(nodes, list):
self.clearup(nodes)
return updated_nodes
def get_nodes_running(self):
result = []
for i in range(self.rowCount()):
item = self.child(i)
if isinstance(item, GroupItem):
result[len(result):] = item.get_nodes_running()
elif (isinstance(item, NodeItem) and (item.node_info.pid is not None)):
result.append(item.name)
return result
def set_duplicate_nodes(self, running_nodes, is_sync_running=False):
ignore = ['/master_sync', '/master_discovery', '/node_manager', '/node_manager_daemon']
for i in range(self.rowCount()):
item = self.child(i)
if isinstance(item, GroupItem):
item.set_duplicate_nodes(running_nodes, is_sync_running)
elif isinstance(item, NodeItem):
if is_sync_running:
item.is_ghost = ((item.node_info.uri is None) and ((item.name in running_nodes) and (running_nodes[item.name] == item.node_info.masteruri)))
item.has_running = ((item.node_info.uri is None) and (item.name not in ignore) and ((item.name in running_nodes) and (running_nodes[item.name] != item.node_info.masteruri)))
else:
if item.is_ghost:
item.is_ghost = False
item.has_running = ((item.node_info.uri is None) and (item.name not in ignore) and (item.name in running_nodes))
def updateIcon(self):
if isinstance(self, HostItem):
return
has_running = False
has_off = False
has_duplicate = False
has_ghosts = False
self.diagnostic_level = 0
for i in range(self.rowCount()):
item = self.child(i)
if isinstance(item, (GroupItem, NodeItem)):
if (item.state == NodeItem.STATE_WARNING):
self.setIcon(nm.settings().icon('crystal_clear_warning.png'))
self._state = NodeItem.STATE_WARNING
if (self.parent_item is not None):
self.parent_item.updateIcon()
return
elif (item.state == NodeItem.STATE_OFF):
has_off = True
elif (item.state == NodeItem.STATE_RUN):
has_running = True
elif (item.state == NodeItem.STATE_GHOST):
has_ghosts = True
elif (item.state == NodeItem.STATE_DUPLICATE):
has_duplicate = True
elif (item.state == NodeItem.STATE_PARTS):
has_running = True
has_off = True
if ((item.state == NodeItem.STATE_RUN) or isinstance(item, GroupItem)):
if (item.diagnostic_level > self.diagnostic_level):
self.diagnostic_level = item.diagnostic_level
if (self.diagnostic_level > 0):
self.setIcon(NodeItem._diagnostic_level2icon(self.diagnostic_level))
elif has_duplicate:
self._state = NodeItem.STATE_DUPLICATE
self.setIcon(nm.settings().icon('imacadam_stop.png'))
elif has_ghosts:
self._state = NodeItem.STATE_GHOST
self.setIcon(nm.settings().icon('state_ghost.png'))
elif (has_running and has_off):
self._state = NodeItem.STATE_PARTS
self.setIcon(nm.settings().icon('state_part.png'))
elif (not has_running):
self._state = NodeItem.STATE_OFF
self.setIcon(nm.settings().icon('state_off.png'))
elif ((not has_off) and has_running):
self._state = NodeItem.STATE_RUN
self.setIcon(nm.settings().icon('state_run.png'))
if (self.parent_item is not None):
self.parent_item.updateIcon()
def _create_html_list(self, title, items):
result = ''
if items:
result += ('<b><u>%s</u></b>' % title)
if (len(items) > 1):
result += (' <span style="color:gray;">[%d]</span>' % len(items))
result += '<ul><span></span><br>'
for i in items:
result += ('<a href="node://%s">%s</a><br>' % (i, i))
result += '</ul>'
return result
def update_tooltip(self):
tooltip = self.generate_description(False)
self.setToolTip((tooltip if tooltip else self.name))
return tooltip
def generate_description(self, extended=True):
tooltip = ''
if (self.descr_type or self.descr_name or self.descr):
tooltip += ('<h4>%s</h4><dl>' % self.descr_name)
if self.descr_type:
tooltip += ('<dt>Type: %s</dt></dl>' % self.descr_type)
if extended:
try:
from docutils import examples
if self.descr:
tooltip += '<b><u>Detailed description:</u></b>'
tooltip += examples.html_body(utf8(self.descr))
except Exception:
rospy.logwarn('Error while generate description for a tooltip: %s', traceback.format_exc(1))
tooltip += '<br>'
nodes = []
for j in range(self.rowCount()):
nodes.append(self.child(j).name)
if nodes:
tooltip += self._create_html_list('Nodes:', nodes)
return ('<div>%s</div>' % tooltip)
def update_description(self, descr_type, descr_name, descr):
self.descr_type = descr_type
self.descr_name = descr_name
self.descr = descr
def update_displayed_config(self):
if (self.parent_item is not None):
cfgs = []
for j in range(self.rowCount()):
if self.child(j).cfgs:
cfgs[len(cfgs):] = self.child(j).cfgs
if cfgs:
cfgs = list(set(cfgs))
cfg_col = self.parent_item.child(self.row(), NodeItem.COL_CFG)
if ((cfg_col is not None) and isinstance(cfg_col, QStandardItem)):
cfg_col.setText((('[%d]' % len(cfgs)) if (len(cfgs) > 1) else ''))
has_launches = NodeItem.has_launch_cfgs(cfgs)
has_defaults = NodeItem.has_default_cfgs(cfgs)
if (has_launches and has_defaults):
cfg_col.setIcon(nm.settings().icon('crystal_clear_launch_file_def_cfg.png'))
elif has_launches:
cfg_col.setIcon(nm.settings().icon('crystal_clear_launch_file.png'))
elif has_defaults:
cfg_col.setIcon(nm.settings().icon('default_cfg.png'))
else:
cfg_col.setIcon(QIcon())
def get_configs(self):
cfgs = []
for j in range(self.rowCount()):
if isinstance(self.child(j), GroupItem):
cfgs[len(cfgs):] = self.child(j).get_configs()
elif self.child(j).cfgs:
cfgs[len(cfgs):] = self.child(j).cfgs
return set(cfgs)
def get_count_mscreens(self):
result = 0
for j in range(self.rowCount()):
if isinstance(self.child(j), GroupItem):
result += self.child(j).get_count_mscreens()
elif self.child(j).has_multiple_screens:
result += 1
return result
def type(self):
return GroupItem.ITEM_TYPE
def __eq__(self, item):
if isstring(item):
return (self.name.lower() == item.lower())
elif ((item is not None) and (type(item) == GroupItem)):
return (self.name.lower() == item.name.lower())
return False
def __ne__(self, item):
return (not (self == item))
def __gt__(self, item):
if isstring(item):
if self.is_system_group:
if (self.name.lower() != item.lower()):
return True
elif (item.lower() == 'system'):
return False
return (self.name.lower() > item.lower())
elif ((item is not None) and (type(item) == GroupItem)):
if item.is_system_group:
if (self.name.lower() != item.lower()):
return True
elif self.is_syste_group:
return False
return (self.name.lower() > item.name.lower())
return False |
def test_remove_deployment_returns(network):
network.connect('mainnet')
address = '0x0bc529c00c6401aef6d220be8c6ea1667f6ad93e'
Contract.from_explorer(address)
(build_json, sources) = _get_deployment(address)
assert ((build_json, sources) != (None, None))
assert ((build_json, sources) == Contract.remove_deployment(address)) |
def play(audio: bytes, notebook: bool=False, use_ffmpeg: bool=True) -> None:
if notebook:
from IPython.display import Audio, display
display(Audio(audio, rate=44100, autoplay=True))
elif use_ffmpeg:
if (not is_installed('ffplay')):
message = "ffplay from ffmpeg not found, necessary to play audio. On mac you can install it with 'brew install ffmpeg'. On linux and windows you can install it from
raise ValueError(message)
args = ['ffplay', '-autoexit', '-', '-nodisp']
proc = subprocess.Popen(args=args, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = proc.communicate(input=audio)
proc.poll()
else:
try:
import io
import sounddevice as sd
import soundfile as sf
except ModuleNotFoundError:
message = '`pip install sounddevice soundfile` required when `use_ffmpeg=False` '
raise ValueError(message)
sd.play(*sf.read(io.BytesIO(audio)))
sd.wait() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.