code stringlengths 281 23.7M |
|---|
_test
def test_zmq_sender_node() -> None:
class MyZMQSenderGraph(Graph):
MY_SOURCE: MySource
MY_SINK: ZMQSenderNode
config: ZMQSenderConfig
def setup(self) -> None:
self.MY_SOURCE.configure(MySourceConfig())
self.MY_SINK.configure(self.config)
def connections(self) -> Connections:
return ((self.MY_SOURCE.TOPIC, self.MY_SINK.topic),)
output_filename = get_test_filename()
graph = MyZMQSenderGraph()
address = f'{ZMQ_ADDR}:{get_free_port()}'
graph.configure(ZMQSenderConfig(write_addr=address, zmq_topic=ZMQ_TOPIC))
runner = LocalRunner(module=graph)
p = Process(target=recv_samples_from_zmq, args=(address, ZMQ_TOPIC, output_filename))
p.start()
runner.run()
p.join()
with open(output_filename, 'br') as f:
data = f.read()
assert (set(graph.MY_SOURCE.samples) == set(data.strip(DATA_DELIMITER).split(DATA_DELIMITER))) |
def test_cache_set_with_autoexpire(cache: FidesopsRedis) -> None:
key = 'a_key'
value = 'a_value'
ttl_range = list(range((CONFIG.redis.default_ttl_seconds - 2), (CONFIG.redis.default_ttl_seconds + 1)))
cache.set_with_autoexpire(key, value)
assert (cache.ttl(key) in ttl_range)
assert (cache.get(key) == value) |
def extractShootingstarscansXyz(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class Channel(ChannelBase):
def paint(self, deviceContext):
dc = deviceContext
table = self.control.table
relevant_control_points = [x for x in table.control_points if (self.name in x.active_channels)]
dc.BeginDrawing()
dc.SetPen(wx.Pen(self.rgb_color, 1))
dc.SetBrush(wx.Brush((255, 255, 255), wx.SOLID))
for k in range((len(relevant_control_points) - 1)):
cur_point = relevant_control_points[k]
next_point = relevant_control_points[(1 + k)]
dc.DrawLine(self.get_pos_index(cur_point.pos), self.get_value_index(cur_point.color), self.get_pos_index(next_point.pos), self.get_value_index(next_point.color))
dc.SetPen(wx.Pen('BLACK', 1))
dc.SetBrush(wx.Brush((255, 255, 255), wx.SOLID))
for control_point in relevant_control_points:
x = self.get_pos_index(control_point.pos)
y = self.get_value_index(control_point.color)
radius = 6
dc.DrawRectangle((x - (radius / 2.0)), (y - (radius / 2.0)), radius, radius)
dc.DrawRectangle(100, 80, 6, 6)
dc.EndDrawing() |
def test_get_parent_fw(backend_db, common_db):
(fw, parent_fo, child_fo) = create_fw_with_parent_and_child()
fw2 = create_test_firmware()
fw2.uid = 'test_fw2'
add_included_file(child_fo, fw2, fw2, ['/some/path'])
backend_db.insert_multiple_objects(fw, fw2, parent_fo, child_fo)
root_fw = common_db.get_parent_fw(child_fo.uid)
assert (root_fw == {fw.uid, fw2.uid})
root_fw_dict = common_db.get_parent_fw_for_uid_list([fw.uid, parent_fo.uid, child_fo.uid])
assert (root_fw_dict == {parent_fo.uid: {fw.uid}, child_fo.uid: {fw.uid, fw2.uid}}) |
class LazyWapitiBinaryWrapper():
download_manager: DownloadManager
install_url: Optional[str] = None
_binary_path: Optional[str] = None
def get_binary_path(self) -> str:
if (not self.install_url):
return DEFAULT_WAPITI_PATH
if self._binary_path:
return self._binary_path
self._binary_path = (install_wapiti_and_get_path_or_none(self.install_url, download_manager=self.download_manager) or DEFAULT_WAPITI_PATH)
return self._binary_path |
def test_aliases_with_default_value_and_field_added():
'
schema = {'type': 'record', 'name': 'test_aliases_with_default_value', 'fields': [{'name': 'test', 'type': 'int'}]}
new_schema = {'type': 'record', 'name': 'test_aliases_with_default_value', 'fields': [{'name': 'newtest', 'type': 'int', 'default': 0, 'aliases': ['test']}, {'name': 'test2', 'type': 'int', 'default': 100}]}
records = [{'test': 1}]
new_records = roundtrip(schema, records, new_schema)
assert (new_records == [{'newtest': 1, 'test2': 100}]) |
def test_residue_missing_atoms():
f = open(just_one_ALA_missing, 'r')
pdb_string = f.read()
with pytest.raises(RuntimeError):
chorizo = LinkedRDKitChorizo(pdb_string)
chorizo = LinkedRDKitChorizo(pdb_string, allow_bad_res=True)
assert (len(chorizo.residues) == 1)
assert (len(chorizo.getIgnoredResidues()) == 1)
expected_removed_residues = ['A:ALA:1']
assert (list(chorizo.getIgnoredResidues().keys()) == expected_removed_residues)
expected_suggested_mutations = {}
assert (chorizo.suggested_mutations == expected_suggested_mutations)
expected_residue_data = {'A:ALA:1': ChorizoResidue('A:ALA:1', 'ATOM 1 N ALA A 1 6.061 2.529 -3.691 1.00 0.00 N \nATOM 2 CA ALA A 1 5.518 2.870 -2.403 1.00 0.00 C \nATOM 3 C ALA A 1 4.995 1.645 -1.690 1.00 0.00 C \nATOM 4 O ALA A 1 5.294 0.515 -2.156 1.00 0.00 O \n', None, None)} |
def get_imports(fips_dir, proj_dir):
proj_name = util.get_project_name_from_dir(proj_dir)
imports = {}
if util.is_valid_project_dir(proj_dir):
dic = util.load_fips_yml(proj_dir)
if ('imports' in dic):
imports = dic['imports']
if imports:
if (type(imports) is dict):
for dep in imports:
if (not ('branch' in imports[dep])):
imports[dep]['branch'] = None
if (not ('cond' in imports[dep])):
imports[dep]['cond'] = None
if (not ('git' in imports[dep])):
log.error("no git URL in import '{}' in '{}/fips.yml'!\n".format(dep, proj_dir))
if (not ('group' in imports[dep])):
imports[dep]['group'] = None
else:
log.error("imports in '{}/fips.yml' must be a dictionary!".format(proj_dir))
return imports |
class OptionPlotoptionsVennPointEvents(Options):
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def drag(self):
return self._config_get(None)
def drag(self, value: Any):
self._config(value, js_type=False)
def dragStart(self):
return self._config_get(None)
def dragStart(self, value: Any):
self._config(value, js_type=False)
def drop(self):
return self._config_get(None)
def drop(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def remove(self):
return self._config_get(None)
def remove(self, value: Any):
self._config(value, js_type=False)
def select(self):
return self._config_get(None)
def select(self, value: Any):
self._config(value, js_type=False)
def unselect(self):
return self._config_get(None)
def unselect(self, value: Any):
self._config(value, js_type=False)
def update(self):
return self._config_get(None)
def update(self, value: Any):
self._config(value, js_type=False) |
class CodeView(ModelView):
model = Instance(CodeModel)
show_line_numbers = Bool(True)
style = Enum('simple', 'readonly')
def default_traits_view(self):
traits_view = View(Item('model.code', editor=CodeEditor(show_line_numbers=self.show_line_numbers), style=self.style))
return traits_view |
def test_gauss_tet1():
print('1st Order Polynomial')
print('Tetrahedron')
gaussTetrahedron.setOrder(1)
int0_f1 = dot(f1(gaussTetrahedron.points), gaussTetrahedron.weights)
print(int0_f1)
gaussTetrahedron.setOrder(2)
int1_f1 = dot(f1(gaussTetrahedron.points), gaussTetrahedron.weights)
print(int1_f1)
gaussTetrahedron.setOrder(3)
int2_f1 = dot(f1(gaussTetrahedron.points), gaussTetrahedron.weights)
print(int2_f1)
npt.assert_almost_equal(int0_f1, int1_f1)
npt.assert_almost_equal(int1_f1, int2_f1) |
class desc_stats_reply(stats_reply):
version = 6
type = 19
stats_type = 0
def __init__(self, xid=None, flags=None, mfr_desc=None, hw_desc=None, sw_desc=None, serial_num=None, dp_desc=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (mfr_desc != None):
self.mfr_desc = mfr_desc
else:
self.mfr_desc = ''
if (hw_desc != None):
self.hw_desc = hw_desc
else:
self.hw_desc = ''
if (sw_desc != None):
self.sw_desc = sw_desc
else:
self.sw_desc = ''
if (serial_num != None):
self.serial_num = serial_num
else:
self.serial_num = ''
if (dp_desc != None):
self.dp_desc = dp_desc
else:
self.dp_desc = ''
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(struct.pack('!256s', self.mfr_desc))
packed.append(struct.pack('!256s', self.hw_desc))
packed.append(struct.pack('!256s', self.sw_desc))
packed.append(struct.pack('!32s', self.serial_num))
packed.append(struct.pack('!256s', self.dp_desc))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = desc_stats_reply()
_version = reader.read('!B')[0]
assert (_version == 6)
_type = reader.read('!B')[0]
assert (_type == 19)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 0)
obj.flags = reader.read('!H')[0]
reader.skip(4)
obj.mfr_desc = reader.read('!256s')[0].rstrip('\x00')
obj.hw_desc = reader.read('!256s')[0].rstrip('\x00')
obj.sw_desc = reader.read('!256s')[0].rstrip('\x00')
obj.serial_num = reader.read('!32s')[0].rstrip('\x00')
obj.dp_desc = reader.read('!256s')[0].rstrip('\x00')
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.mfr_desc != other.mfr_desc):
return False
if (self.hw_desc != other.hw_desc):
return False
if (self.sw_desc != other.sw_desc):
return False
if (self.serial_num != other.serial_num):
return False
if (self.dp_desc != other.dp_desc):
return False
return True
def pretty_print(self, q):
q.text('desc_stats_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REPLY_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('mfr_desc = ')
q.pp(self.mfr_desc)
q.text(',')
q.breakable()
q.text('hw_desc = ')
q.pp(self.hw_desc)
q.text(',')
q.breakable()
q.text('sw_desc = ')
q.pp(self.sw_desc)
q.text(',')
q.breakable()
q.text('serial_num = ')
q.pp(self.serial_num)
q.text(',')
q.breakable()
q.text('dp_desc = ')
q.pp(self.dp_desc)
q.breakable()
q.text('}') |
def is_on_battery(config):
try:
for path in glob.glob(config.get('GENERAL', 'Sysfs_Power_Path', fallback=DEFAULT_SYSFS_POWER_PATH)):
with open(path) as f:
return (not bool(int(f.read())))
raise
except:
warning('No valid Sysfs_Power_Path found! Trying upower method #1')
try:
out = subprocess.check_output(('upower', '-i', '/org/freedesktop/UPower/devices/line_power_AC'))
res = re.search(b'online:\\s+(yes|no)', out).group(1).decode().strip()
if (res == 'yes'):
return False
elif (res == 'no'):
return True
raise
except:
warning('Trying upower method #2')
try:
out = subprocess.check_output(('upower', '-i', '/org/freedesktop/UPower/devices/battery_BAT0'))
res = re.search(b'state:\\s+(.+)', out).group(1).decode().strip()
if (res == 'discharging'):
return True
elif (res in ('fully-charged', 'charging', 'pending-charge')):
return False
except:
pass
warning('No valid power detection methods found. Assuming that the system is running on battery power.')
return True |
_grad()
def test_two_controlnets_eject_top_down(unet: SD1UNet) -> None:
original_parent = unet.parent
cn1 = SD1ControlnetAdapter(unet, name='cn1').inject()
cn2 = SD1ControlnetAdapter(unet, name='cn2').inject()
cn1.eject()
assert (cn2.parent == original_parent)
assert (unet.parent == cn2)
cn2.eject()
assert (unet.parent == original_parent)
assert (len(list(unet.walk(Controlnet))) == 0) |
def extractQuikernovelBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if (item['tags'] == []):
titlemap = [('My boss husband, please let me go!', 'My boss husband, please let me go!', 'translated'), ('Step In Dangerous Love', 'Step In Dangerous Love', 'translated'), ('Master of Dungeon', 'Master of Dungeon', 'oel')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class Move(Core):
def move_file(self, file_id: str, to_parent_file_id: str='root', new_name: str=None, drive_id: str=None, to_drive_id: str=None, **kwargs) -> MoveFileResponse:
def move_file(self, body: MoveFileRequest) -> MoveFileResponse:
def move_file(self, file_id: str=None, to_parent_file_id: str='root', new_name: str=None, drive_id: str=None, to_drive_id: str=None, body: MoveFileRequest=None, **kwargs) -> MoveFileResponse:
if (body is None):
body = MoveFileRequest(file_id=file_id, drive_id=drive_id, to_drive_id=to_drive_id, to_parent_file_id=to_parent_file_id, new_name=new_name, **kwargs)
return self._core_move_file(body)
def batch_move_files(self, file_id_list: List[str], to_parent_file_id: str='root', drive_id: str=None, **kwargs) -> List[BatchSubResponse[MoveFileResponse]]:
def batch_move_files(self, body: BatchMoveFilesRequest) -> List[BatchSubResponse[MoveFileResponse]]:
def batch_move_files(self, file_id_list: List[str]=None, to_parent_file_id: str='root', drive_id: str=None, body: BatchMoveFilesRequest=None, **kwargs) -> List[BatchSubResponse[MoveFileResponse]]:
if (body is None):
body = BatchMoveFilesRequest(drive_id=drive_id, file_id_list=file_id_list, to_parent_file_id=to_parent_file_id, **kwargs)
result = self._core_batch_move_files(body)
return list(result) |
def q_rd1(m0, m1, m2, o0, o1, o2, p0, p1, p2, n0, n1, n2):
m0 = mpmath.mpf(m0)
m1 = mpmath.mpf(m1)
m2 = mpmath.mpf(m2)
o0 = mpmath.mpf(o0)
o1 = mpmath.mpf(o1)
o2 = mpmath.mpf(o2)
p0 = mpmath.mpf(p0)
p1 = mpmath.mpf(p1)
p2 = mpmath.mpf(p2)
n0 = mpmath.mpf(n0)
n1 = mpmath.mpf(n1)
n2 = mpmath.mpf(n2)
x0 = (m0 - o0)
x1 = (n0 - p0)
x2 = (m1 - o1)
x3 = (m2 - o2)
x4 = (n1 - p1)
x5 = (n2 - p2)
x6 = (1 / (mpmath.sqrt((((x0 ** 2) + (x2 ** 2)) + (x3 ** 2))) * mpmath.sqrt((((x1 ** 2) + (x4 ** 2)) + (x5 ** 2)))))
return ((((x0 * x1) * x6) + ((x2 * x4) * x6)) + ((x3 * x5) * x6)) |
class DeathHandler(THBEventHandler):
interested = ['action_apply', 'action_after']
game: 'THBattleRole'
def handle(self, evt_type: str, act) -> Any:
if ((evt_type == 'action_apply') and isinstance(act, PlayerDeath)):
g = self.game
T = THBRoleRole
pl = g.players.player
tgt = act.target
dead = (lambda p: (p.dead or (p is tgt)))
survivors = [p for p in g.players if (not dead(p))]
if (len(survivors) == 1):
pl.reveal([g.roles[p] for p in pl])
if (g.roles[survivors[0].player] == T.CURTAIN):
raise GameEnded([survivors[0].player])
deads: Dict[(THBRoleRole, int)] = defaultdict(int)
for p in g.players:
if dead(p):
deads[g.roles[p.player].get()] += 1
def winner(*roles: THBRoleRole):
pl.reveal([g.roles[p] for p in pl])
raise GameEnded([p for p in pl if (g.roles[p].get() in roles)])
def has_no(i: THBRoleRole):
return (deads[i] == g.roles_config.count(i))
if deads[T.BOSS]:
if g.double_curtain:
winner(T.ATTACKER)
elif has_no(T.ATTACKER):
winner(T.CURTAIN)
else:
winner(T.ATTACKER)
if (has_no(T.ATTACKER) and has_no(T.CURTAIN)):
winner(T.BOSS, T.ACCOMPLICE)
if all([g.is_dropped(ch.player) for ch in survivors]):
pl.reveal([g.roles[p] for p in pl])
raise GameEnded([])
elif ((evt_type == 'action_after') and isinstance(act, PlayerDeath)):
T = THBRoleRole
g = self.game
tgt = act.target
src = act.source
if (not src):
return act
if (g.roles[tgt.player] == T.ATTACKER):
g.process_action(DrawCards(src, 3))
elif (g.roles[tgt.player] == T.ACCOMPLICE):
if (g.roles[src.player] == T.BOSS):
pl = g.players.player
pl.exclude(src.player).reveal(list(src.cards))
cards: List[Card] = []
cards.extend(src.cards)
cards.extend(src.showncards)
cards.extend(src.equips)
(cards and g.process_action(DropCards(src, src, cards)))
return act |
def dm_to_sd(dm):
misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG_MORE, f'In dm_to_sd, {dm}')
if ((not dm) or (dm == '0')):
misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG_DEV, 'Not dm :-(')
return 0.0
try:
(d, m) = re.match('^(\\d+)(\\d\\d\\.\\d+)$', dm).groups()
misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG_DEV, f'dm is now sd: {d} and {m}')
return (float(d) + (float(m) / 60))
except:
misc.addLog(rpieGlobals.LOG_LEVEL_DEBUG_DEV, 'Tried to convert dm to sd but FAILED :-(')
return 0.0 |
def main():
mnist = fetch_mldata('MNIST original')
X = (mnist.data / 255.0)
y = mnist.target
X = X[(y == 2)]
idx = np.random.choice(range(X.shape[0]), size=500, replace=False)
X = X[idx]
rbm = RBM(n_hidden=50, n_iterations=200, batch_size=25, learning_rate=0.001)
rbm.fit(X)
(training,) = plt.plot(range(len(rbm.training_errors)), rbm.training_errors, label='Training Error')
plt.legend(handles=[training])
plt.title('Error Plot')
plt.ylabel('Error')
plt.xlabel('Iterations')
plt.show()
gen_imgs = rbm.training_reconstructions
(fig, axs) = plt.subplots(5, 5)
plt.suptitle('Restricted Boltzmann Machine - First Iteration')
cnt = 0
for i in range(5):
for j in range(5):
axs[(i, j)].imshow(gen_imgs[0][cnt].reshape((28, 28)), cmap='gray')
axs[(i, j)].axis('off')
cnt += 1
fig.savefig('rbm_first.png')
plt.close()
(fig, axs) = plt.subplots(5, 5)
plt.suptitle('Restricted Boltzmann Machine - Last Iteration')
cnt = 0
for i in range(5):
for j in range(5):
axs[(i, j)].imshow(gen_imgs[(- 1)][cnt].reshape((28, 28)), cmap='gray')
axs[(i, j)].axis('off')
cnt += 1
fig.savefig('rbm_last.png')
plt.close() |
def parse_json_to_df(path: str) -> pd.DataFrame:
i = 0
df_dict = {}
for d in parse(path):
df_dict[i] = d
i += 1
if ((i % 10000) == 0):
logger.info('Rows processed: {:,}'.format(i))
df = pd.DataFrame.from_dict(df_dict, orient='index')
df['related'] = df['related'].astype(str)
df['categories'] = df['categories'].astype(str)
df['salesRank'] = df['salesRank'].astype(str)
df = lowercase_df(df)
return df |
class AttributeForm(forms.ModelForm):
attr_key = forms.CharField(label='Attribute Name', required=False, help_text='The main identifier of the Attribute. For Nicks, this is the pattern-matching string.')
attr_category = forms.CharField(label='Category', help_text="Categorization. Unset (default) gives a category of `None`, which is is what is searched with e.g. `obj.db.attrname`. For 'nick'-type attributes, this is usually 'inputline' or 'channel'.", required=False, max_length=128)
attr_value = PickledFormField(label='Value', help_text="Value to pickle/save. Db-objects are serialized as a list containing `__packed_dbobj__` (they can't easily be added from here). Nicks store their pattern-replacement here.", required=False)
attr_type = forms.ChoiceField(label='Type', choices=[(None, '-'), ('nick', 'nick')], help_text="Unset for regular Attributes, 'nick' for Nick-replacement usage.", required=False)
attr_lockstring = forms.CharField(label='Locks', required=False, help_text='Lock string on the form locktype:lockdef;lockfunc:lockdef;...', widget=forms.Textarea(attrs={'rows': 1, 'cols': 8}))
class Meta():
fields = ('attr_key', 'attr_value', 'attr_category', 'attr_lockstring', 'attr_type')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
attr_key = None
attr_category = None
attr_value = None
attr_type = None
attr_lockstring = None
if hasattr(self.instance, 'attribute'):
attr_key = self.instance.attribute.db_key
attr_category = self.instance.attribute.db_category
attr_value = self.instance.attribute.db_value
attr_type = self.instance.attribute.db_attrtype
attr_lockstring = self.instance.attribute.db_lock_storage
self.fields['attr_key'].initial = attr_key
self.fields['attr_category'].initial = attr_category
self.fields['attr_type'].initial = attr_type
self.fields['attr_value'].initial = attr_value
self.fields['attr_lockstring'].initial = attr_lockstring
self.instance.attr_key = attr_key
self.instance.attr_category = attr_category
self.instance.attr_value = attr_value
if isinstance(attr_value, (set, _SaverSet)):
self.fields['attr_value'].disabled = True
self.instance.deserialized_value = from_pickle(attr_value)
self.instance.attr_type = attr_type
self.instance.attr_lockstring = attr_lockstring
def save(self, commit=True):
instance = self.instance
instance.attr_key = (self.cleaned_data['attr_key'] or 'no_name_entered_for_attribute')
instance.attr_category = (self.cleaned_data['attr_category'] or None)
instance.attr_value = self.cleaned_data['attr_value']
instance.attr_value = from_pickle(instance.attr_value)
instance.attr_type = (self.cleaned_data['attr_type'] or None)
instance.attr_lockstring = self.cleaned_data['attr_lockstring']
return instance
def clean_attr_value(self):
data = self.cleaned_data['attr_value']
initial = self.instance.attr_value
if isinstance(initial, (set, _SaverSet, datetime)):
return initial
return data |
def test_nested_subexpressions_to_be_eliminated_1():
cfg = ControlFlowGraph()
cfg.add_node((node := BasicBlock(0, instructions=[Assignment(Variable('a'), BinaryOperation(OperationType.multiply, [expr1.copy(), Constant(2)])), Assignment(Variable('b'), BinaryOperation(OperationType.multiply, [expr1.copy(), Constant(3)])), Assignment(Variable('c'), BinaryOperation(OperationType.multiply, [expr1.copy(), Constant(4)])), Assignment(Variable('d'), BinaryOperation(OperationType.multiply, [expr2.copy(), Constant(2)])), Assignment(Variable('e'), BinaryOperation(OperationType.multiply, [expr2.copy(), Constant(3)])), Assignment(Variable('f'), BinaryOperation(OperationType.multiply, [expr2.copy(), Constant(4)])), Assignment(Variable('g'), BinaryOperation(OperationType.multiply, [expr2.copy(), Constant(5)]))])))
_run_cse(cfg)
(c0, c1) = (Variable('c0', ssa_label=0), Variable('c1', ssa_label=0))
assert (node.instructions == [Assignment(c1.copy(), expr1), Assignment(Variable('a'), BinaryOperation(OperationType.multiply, [c1.copy(), Constant(2)])), Assignment(Variable('b'), BinaryOperation(OperationType.multiply, [c1.copy(), Constant(3)])), Assignment(Variable('c'), BinaryOperation(OperationType.multiply, [c1.copy(), Constant(4)])), Assignment(c0.copy(), BinaryOperation(OperationType.minus, [c1.copy(), BinaryOperation(OperationType.multiply, [Variable('y', ssa_label=2), Constant(2)])])), Assignment(Variable('d'), BinaryOperation(OperationType.multiply, [c0.copy(), Constant(2)])), Assignment(Variable('e'), BinaryOperation(OperationType.multiply, [c0.copy(), Constant(3)])), Assignment(Variable('f'), BinaryOperation(OperationType.multiply, [c0.copy(), Constant(4)])), Assignment(Variable('g'), BinaryOperation(OperationType.multiply, [c0.copy(), Constant(5)]))]) |
class OptionPlotoptionsErrorbarSonificationDefaultspeechoptionsPointgrouping(Options):
def algorithm(self):
return self._config_get('last')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class MyCoefficients(NCLS.Coefficients):
def attachModels(self, modelList):
self.model = modelList[0]
self.q_v = np.zeros(self.model.q[('dH', 0, 0)].shape, 'd')
self.ebqe_v = np.zeros(self.model.ebqe[('dH', 0, 0)].shape, 'd')
self.rdModel = self.model
self.ebqe_rd_u = self.rdModel.ebqe[('u', 0)] |
class ChatRoom(BotPlugin):
connected = False
def callback_connect(self):
self.log.info('Connecting bot chatrooms')
if (not self.connected):
self.connected = True
for room in self.bot_config.CHATROOM_PRESENCE:
self.log.debug('Try to join room %s', repr(room))
try:
self._join_room(room)
except Exception:
self.log.exception(f'Joining room {repr(room)} failed')
def _join_room(self, room):
username = self.bot_config.CHATROOM_FN
password = None
if isinstance(room, (tuple, list)):
(room, password) = room
self.log.info('Joining room %s with username %s and pass ***.', room, username)
else:
self.log.info('Joining room %s with username %s.', room, username)
self.query_room(room).join(username=self.bot_config.CHATROOM_FN, password=password)
def deactivate(self):
self.connected = False
super().deactivate()
(split_args_with=ShlexArgParser())
def room_create(self, message, args):
if (len(args) < 1):
return 'Please tell me which chatroom to create.'
room = self.query_room(args[0])
room.create()
return f'Created the room {room}.'
(split_args_with=ShlexArgParser())
def room_join(self, message, args):
arglen = len(args)
if (arglen < 1):
return 'Please tell me which chatroom to join.'
args[0].strip()
(room_name, password) = ((args[0], None) if (arglen == 1) else (args[0], args[1]))
room = self.query_room(room_name)
if (room is None):
return f'Cannot find room {room_name}.'
room.join(username=self.bot_config.CHATROOM_FN, password=password)
return f'Joined the room {room_name}.'
(split_args_with=ShlexArgParser())
def room_leave(self, message, args):
if (len(args) < 1):
return 'Please tell me which chatroom to leave.'
self.query_room(args[0]).leave()
return f'Left the room {args[0]}.'
(split_args_with=ShlexArgParser())
def room_destroy(self, message, args):
if (len(args) < 1):
return 'Please tell me which chatroom to destroy.'
self.query_room(args[0]).destroy()
return f'Destroyed the room {args[0]}.'
(split_args_with=ShlexArgParser())
def room_invite(self, message, args):
if (len(args) < 2):
return 'Please tell me which person(s) to invite into which room.'
self.query_room(args[0]).invite(*args[1:])
return f"Invited {', '.join(args[1:])} into the room {args[0]}."
def room_list(self, message, args):
rooms = [str(room) for room in self.rooms()]
if len(rooms):
rooms_str = '\n\t'.join(rooms)
return f'''I'm currently in these rooms:
{rooms_str}'''
else:
return "I'm not currently in any rooms."
(split_args_with=ShlexArgParser())
def room_occupants(self, message, args):
if (len(args) < 1):
(yield 'Please supply a room to list the occupants of.')
return
for room in args:
try:
occupants = [o.person for o in self.query_room(room).occupants]
occupants_str = '\n\t'.join(map(str, occupants))
(yield f'''Occupants in {room}:
{occupants_str}.''')
except RoomNotJoinedError as e:
(yield f'Cannot list occupants in {room}: {e}.')
(split_args_with=ShlexArgParser())
def room_topic(self, message, args):
arglen = len(args)
if (arglen < 1):
return 'Please tell me which chatroom you want to know the topic of.'
if (arglen == 1):
try:
topic = self.query_room(args[0]).topic
except RoomNotJoinedError as e:
return f'Cannot get the topic for {args[0]}: {e}.'
if (topic is None):
return f'No topic is set for {args[0]}.'
else:
return f'Topic for {args[0]}: {topic}.'
else:
try:
self.query_room(args[0]).topic = args[1]
except RoomNotJoinedError as e:
return f'Cannot set the topic for {args[0]}: {e}.'
return f'Topic for {args[0]} set.'
def callback_message(self, msg):
try:
if msg.is_direct:
username = msg.frm.person
if (username in self.bot_config.CHATROOM_RELAY):
self.log.debug('Message to relay from %s.', username)
body = msg.body
rooms = self.bot_config.CHATROOM_RELAY[username]
for roomstr in rooms:
self.send(self.query_room(roomstr), body)
elif msg.is_group:
fr = msg.frm
chat_room = str(fr.room)
if (chat_room in self.bot_config.REVERSE_CHATROOM_RELAY):
users_to_relay_to = self.bot_config.REVERSE_CHATROOM_RELAY[chat_room]
self.log.debug('Message to relay to %s.', users_to_relay_to)
body = f'[{fr.person}] {msg.body}'
for user in users_to_relay_to:
self.send(user, body)
except Exception as e:
self.log.exception(f'crashed in callback_message {e}') |
class TestModel():
def test_enum_in_model(self):
class C(Model):
v: E
c = C(v=E.a)
assert isinstance(c.model_dump()['v'], str)
assert (c.model_dump_json() == '{"v":"a"}')
def test_timestamps_in_model(self):
class C(Model):
v: datetime
C(v='2019-01-01T12:00:00Z')
C(v='2019-01-01T12:00:00.1234Z')
def test_unknown_attribute_ignored(self):
class Data(Model):
i: int
with pytest.warns(UnknownModelAttributeWarning):
data = Data(i=1, u=2)
with pytest.raises(AttributeError):
assert data.u
assert ('u' not in data.model_dump_json()) |
(name='reset')
_context
_flag
_analytics
def db_reset(ctx: click.Context, yes: bool) -> None:
config = ctx.obj['CONFIG']
if yes:
are_you_sure = 'y'
else:
echo_red('This will drop all data from the Fides database and reload the default taxonomy!')
are_you_sure = input('Are you sure [y/n]? ')
if (are_you_sure.lower() == 'y'):
handle_cli_response(_api.db_action(server_url=config.cli.server_url, headers=config.user.auth_header, action='reset'))
else:
print('Aborting!') |
def delete_topic(topic):
cmd = '{}/bin/kafka-topics.sh --zookeeper {} --delete --topic {}'.format(KAFKA_PATH, ZK_KAFKA_HOST, topic).split()
with Popen(cmd, stdout=PIPE, bufsize=1, universal_newlines=True) as p:
for line in p.stdout:
print(line, end='')
if (p.returncode != 0):
raise CalledProcessError(p.returncode, p.args) |
class OptionPlotoptionsVariwideSonificationDefaultspeechoptions(Options):
def activeWhen(self) -> 'OptionPlotoptionsVariwideSonificationDefaultspeechoptionsActivewhen':
return self._config_sub_data('activeWhen', OptionPlotoptionsVariwideSonificationDefaultspeechoptionsActivewhen)
def language(self):
return self._config_get('en-US')
def language(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionPlotoptionsVariwideSonificationDefaultspeechoptionsMapping':
return self._config_sub_data('mapping', OptionPlotoptionsVariwideSonificationDefaultspeechoptionsMapping)
def pointGrouping(self) -> 'OptionPlotoptionsVariwideSonificationDefaultspeechoptionsPointgrouping':
return self._config_sub_data('pointGrouping', OptionPlotoptionsVariwideSonificationDefaultspeechoptionsPointgrouping)
def preferredVoice(self):
return self._config_get(None)
def preferredVoice(self, text: str):
self._config(text, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def type(self):
return self._config_get('speech')
def type(self, text: str):
self._config(text, js_type=False) |
def make_plugin_bumper(plugin_dir: Path, new_version: Version) -> PythonPackageVersionBumper:
plugin_package_dir = (plugin_dir / plugin_dir.name.replace('-', '_'))
plugin_version_bumper = PythonPackageVersionBumper(ROOT_DIR, plugin_package_dir, new_version, files_to_pattern={}, specifier_set_patterns=[YAML_DEPENDENCY_SPECIFIER_SET_PATTERN, JSON_DEPENDENCY_SPECIFIER_SET_PATTERN], package_name=plugin_dir.name)
return plugin_version_bumper |
def test():
assert ('spacy.blank("es")' in __solution__), 'Estas creando el pipeline vacio en espanol?'
assert ('DocBin(docs=docs)' in __solution__), 'Creaste el objeto DocBin correctamente?'
assert ('doc_bin.to_disk(' in __solution__), 'Utilizaste el metodo to_disk?'
assert ('train.spacy' in __solution__), 'Nombraste el archivo correctamente?'
__msg__.good('Bien hecho! Antes de entrenar un modelo con los datos, siempre debes revisar dos veces que tu matcher no identifique ningun falso positivo. Pero ese proceso aun es mas rapido que hacer *todo* manualmente.') |
def vertices_frame(size, frame_thickness):
return [(((size[0] - frame_thickness) - 1), 0), (0, ((- size[1]) + 1)), (((- size[0]) + 1), 0), (0, (size[1] - 1)), ((frame_thickness - 1), 0), (0, (- ((size[1] - frame_thickness) - 1))), (((size[0] - (2 * frame_thickness)) - 1), 0), (0, ((size[1] - (2 * frame_thickness)) - 1)), ((- ((size[0] - (2 * frame_thickness)) - 2)), 0)] |
def knowledge_list(api_address: str, space_name: str, page: int, page_size: int, doc_id: int, show_content: bool, out_format: str):
visualizer = _KnowledgeVisualizer(api_address, out_format)
if (not space_name):
visualizer.list_spaces()
elif (not doc_id):
visualizer.list_documents(space_name, page, page_size)
else:
visualizer.list_chunks(space_name, doc_id, page, page_size, show_content) |
def test_cli_enums():
cli = Radicli()
ran = False
class FoodEnum(Enum):
pizza = ''
pasta = ''
burger = ''
class DrinkEnum(Enum):
soda = ''
juice = ''
beer = ''
('test', a=Arg('--a'), b=Arg('--b'))
def test(a: FoodEnum, b: DrinkEnum):
assert (a == FoodEnum.burger)
assert (b == DrinkEnum.beer)
nonlocal ran
ran = True
cli.run(['', 'test', '--a', 'burger', '--b', 'beer'])
assert ran |
class TFEstimator(Estimator):
def __init__(self, statement_set: StatementSet, model: tf.keras.Model, loss: Union[(tf.keras.losses.Loss, str)], optimizer: Union[(tf.keras.optimizers.Optimizer, str)], worker_num: int, feature_cols: List[str], label_col: str, max_epochs: int=1, batch_size: Optional[int]=32, cluster_config_properties: Optional[Mapping[(str, str)]]=None):
self.max_epochs = max_epochs
self.optimizer = optimizer
self.loss = loss
self.model = model
self.batch_size = batch_size
self.label_col = label_col
self.feature_cols = feature_cols
self.worker_num = worker_num
self._statement_set = statement_set
self.cluster_config_properties = (cluster_config_properties if (cluster_config_properties is not None) else {})
def fit(self, *inputs: Table) -> 'TFModel':
if (len(inputs) != 1):
raise ValueError('Only one input table is allowed.')
self._verify_input_table(inputs[0])
input_table = inputs[0]
tf_cluster_config_builder = TFClusterConfig.new_builder()
predict_col_data_type = input_table.get_schema().get_field_data_type(self.label_col)
tf_cluster_config_builder.set_worker_count(self.worker_num).set_node_entry(tf_multi_worker_entry).set_property(INPUT_COL_NAMES, self._get_column_names(input_table.get_schema())).set_property(FEATURE_COLS, ','.join(self.feature_cols)).set_property(LABEL_COL, self.label_col).set_property(BATCH_SIZE, str(self.batch_size)).set_property(INPUT_TYPES, self._get_input_type(input_table.get_schema())).set_property(MAX_EPOCHS, str(self.max_epochs))
for (k, v) in self.cluster_config_properties.items():
tf_cluster_config_builder.set_property(k, v)
tf_model_factory = SimpleTFModelFactory(model=self.model, loss=self.loss, optimizer=self.optimizer)
tf_cluster_config_builder.set_property(MODEL_FACTORY_BASE64, self._pickle_model_factory(tf_model_factory))
return TFModel(tf_cluster_config_builder=tf_cluster_config_builder, predict_col_data_type=predict_col_data_type, statement_set=self._statement_set, input_table=inputs[0])
def save(self, path: str) -> None:
raise Exception('TFEstimator does not support save and load')
def load(cls, env: StreamExecutionEnvironment, path: str) -> 'TFEstimator':
raise Exception('TFEstimator does not support save and load')
def get_param_map(self) -> Dict[('Param[Any]', Any)]:
return {}
def _pickle_model_factory(model_factory: TFModelFactory) -> str:
return base64.encodebytes(pickle.dumps(model_factory)).decode('utf-8')
def _get_input_type(schema: TableSchema) -> str:
data_types = schema.get_field_data_types()
dl_on_flink_types = []
for data_type in data_types:
data_type = type(data_type)
if (data_type not in FLINK_TYPE_TO_DL_ON_FLINK_TYPE):
raise TypeError(f'Unsupported type of column {data_type}')
dl_on_flink_types.append(FLINK_TYPE_TO_DL_ON_FLINK_TYPE[data_type])
return ','.join(dl_on_flink_types)
def _get_column_names(schema: TableSchema):
return ','.join(schema.get_field_names())
def _verify_input_table(self, table: Table):
for feature_col in self.feature_cols:
assert (feature_col in table.get_schema().get_field_names())
assert (self.label_col in table.get_schema().get_field_names()) |
def main(argv):
import argparse
client = LibgenFictionClient()
parser = argparse.ArgumentParser(description='Use Libgen.Fiction from the command line')
parser.add_argument('--query', '-q', help='Search query')
parser.add_argument('--title', '-t', help='Title to search for')
parser.add_argument('--author', '-a', help='Author to search for')
parser.add_argument('--series', '-s', help='Series')
parser.add_argument('--language', '-l', help='Language')
parser.add_argument('--format', '-f', help='Ebook format (epub, mobi, azw, azw3, fb2, pdf, rtf, txt)')
args = parser.parse_args(args=(None if sys.argv[1:] else ['--help']))
query = ''
criteria = ''
if args.query:
query = args.query
elif args.title:
criteria = 'title'
query = args.title
elif args.author:
query = args.author
criteria = 'authors'
elif args.series:
query = args.series
criteria = 'series'
print(((query + ' ') + criteria))
if query:
search_results = client.search(query, criteria, args.language, args.format)
else:
sys.exit()
for result in search_results.results[:5]:
print(((result.title + ' by ') + result.authors))
print('Detail', client.get_detail_url(result.md5))
print('Download', client.get_download_url(result.md5)) |
def convert_size_string_to_bytes(size: str) -> (float | int):
units = [item.lower() for item in SIZE_UNITS]
parts = size.strip().replace(' ', ' ').split(' ')
amount = float(parts[0])
unit = parts[1]
factor = units.index(unit.lower())
if (not factor):
return amount
return int(((1024 ** factor) * amount)) |
class Components():
def __init__(self, page):
self.page = page
if (self.page.ext_packages is None):
self.page.ext_packages = {}
self.page.ext_packages.update(PkgImports.FLUENTUI)
self.page.imports.reload()
Defaults_css.ICON_FAMILY = 'office-ui-fabric-core'
Defaults_css.ICON_MAPPINGS[Defaults_css.ICON_FAMILY] = PkgImports.ICON_MAPPINGS
self.page.jsImports.add('fabric-ui')
self.page.cssImport.add('fabric-ui/components')
self.select = self.lists.select
self.button = self.buttons.button
self.toggle = self.buttons.toggle
self.check = self.buttons.check
self.icon = self.page.ui.images.icon
def label(self, text=None, width=(100, 'px'), height=(None, 'px'), html_code=None, tooltip='', options=None, profile=None):
component = self.page.web.std.tags.label(text, width, height, html_code, tooltip, options, profile)
component.add_style(['ms-Label'], clear_first=True)
if ((options is not None) and options.get('disabled')):
component.attr['class'].add('is-disabled')
if ((options is not None) and options.get('required')):
component.attr['class'].add('is-required')
return component
def link(self, text='', url='', icon=None, align='left', tooltip=None, helper=None, height=(None, 'px'), decoration=False, html_code=None, options=None, profile=None):
component = self.page.web.std.link(text, url, icon, align, tooltip, helper, height, decoration, html_code, options, profile)
component.add_style(['ms-Link'], clear_first=True)
return component
def loading(self, text='Loading...', width=(None, '%'), height=(None, '%'), options=None, profile=None):
width = Arguments.size(width, unit='px')
height = Arguments.size(height, unit='px')
html_but = HtmlFtwForms.Spinner(self.page, text, None, (options or {}), profile, {'width': width, 'height': height})
if ((options is not None) and options.get('large')):
html_but.attr['class'].add('ms-Spinner--large')
return html_but
def buttons(self):
return groups.FtwCompBtns.Components(self)
def lists(self):
return groups.FtwCompLists.Components(self) |
def upload_folder_to_s3(local_folder, s3_folder):
client = boto3.client('s3', aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY)
(bucket, s3_key) = parse_s3_uri(s3_folder)
client_list = []
bucket_list = []
local_path_list = []
s3_path_list = []
for (root, dirs, files) in os.walk(local_folder):
for filename in files:
local_path = os.path.join(root, filename)
s3_path = s3_key
relative_path = os.path.relpath(local_path, local_folder)
s3_path = os.path.join(s3_path, relative_path)
client_list.append(client)
bucket_list.append(bucket)
local_path_list.append(local_path)
s3_path_list.append(s3_path)
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as conc_exec:
already_exist_list = list(tqdm(conc_exec.map(upload_single_file, client_list, bucket_list, local_path_list, s3_path_list), total=len(s3_path_list), desc=f"{colorstr('aws:')} Uploading dataset to S3"))
num_already_exist = np.sum(np.array(already_exist_list))
if (num_already_exist > 0):
LOGGER.warning(f"{colorstr('aws:')} Skipped {num_already_exist} items since they already exists.") |
def parse_srclib_spec(spec):
if (type(spec) != str):
raise MetaDataException(_("can not parse scrlib spec (not a string): '{}'").format(spec))
tokens = spec.split('')
if (len(tokens) > 2):
raise MetaDataException(_("could not parse srclib spec (too many '' signs): '{}'").format(spec))
elif (len(tokens) < 2):
raise MetaDataException(_("could not parse srclib spec (no ref specified): '{}'").format(spec))
name = tokens[0]
ref = tokens[1]
number = None
subdir = None
if (':' in name):
(number, name) = name.split(':', 1)
if ('/' in name):
(name, subdir) = name.split('/', 1)
return (name, ref, number, subdir) |
def test_observation_normalization_configs():
normalization_config = {'default_strategy': 'maze.normalization_strategies.MeanZeroStdOneObservationNormalizationStrategy', 'default_strategy_config': {'clip_range': (None, None), 'axis': 0}, 'default_statistics': None, 'statistics_dump': 'statistics.pkl', 'exclude': None, 'manual_config': None}
run_observation_normalization_pipeline(normalization_config)
normalization_config = {'default_strategy': 'maze.normalization_strategies.MeanZeroStdOneObservationNormalizationStrategy', 'default_strategy_config': {'clip_range': (None, None), 'axis': 0}, 'default_statistics': None, 'statistics_dump': 'statistics.pkl', 'exclude': None, 'manual_config': {'observation': {'clip_range': (0, 1)}}}
run_observation_normalization_pipeline(normalization_config)
normalization_config = {'default_strategy': 'maze.normalization_strategies.MeanZeroStdOneObservationNormalizationStrategy', 'default_strategy_config': {'clip_range': (None, None), 'axis': 0}, 'default_statistics': None, 'statistics_dump': 'statistics.pkl', 'exclude': None, 'manual_config': {'observation': {'strategy': 'maze.normalization_strategies.MeanZeroStdOneObservationNormalizationStrategy', 'statistics': {'mean': [0, 0, 0, 0], 'std': [1, 1, 1, 1]}}}}
run_observation_normalization_pipeline(normalization_config) |
class TestSecureRandomShardingStageService(IsolatedAsyncioTestCase):
('fbpcp.service.storage.StorageService')
def setUp(self, mock_storage_svc: StorageService) -> None:
self.mock_storage_svc = mock_storage_svc
self.mock_mpc_svc = MagicMock(spec=MPCService)
self.mock_mpc_svc.onedocker_svc = MagicMock()
self.magic_mocks_read = []
self.magic_mocks_read.append(MagicMock(return_value=json.dumps({'union_file_size': 1894, 'partner_input_size': 196, 'publisher_input_size': 1793})))
self.magic_mocks_read.append(MagicMock(return_value=json.dumps({'union_file_size': 5569966, 'partner_input_size': 1057038, 'publisher_input_size': 4569271})))
self.magic_mocks_read.append(MagicMock(return_value=json.dumps({'union_file_size': 1894, 'partner_input_size': 196, 'publisher_input_size': 1698})))
self.magic_mocks_read.append(MagicMock(return_value=json.dumps({'union_file_size': 386240, 'partner_input_size': 115872, 'publisher_input_size': 270538})))
onedocker_binary_config_map = defaultdict((lambda : OneDockerBinaryConfig(tmp_directory='/test_tmp_directory/', binary_version='latest', repository_path='test_path/')))
self.stage_svc = SecureRandomShardStageService(self.mock_storage_svc, onedocker_binary_config_map, self.mock_mpc_svc)
self.container_permission_id = 'test-container-permission'
async def test_run_async_with_udp(self) -> None:
containers = [ContainerInstance(instance_id='test_container_id', status=ContainerInstanceStatus.STARTED)]
self.mock_mpc_svc.start_containers.return_value = containers
private_computation_instance = self._create_pc_instance()
num_containers = private_computation_instance.infra_config.num_pid_containers
binary_name = 'data_processing/secure_random_sharder'
test_server_ips = [f'192.0.2.{i}' for i in range(num_containers)]
cmd_args_list = ['cmd_{i}' for i in range(num_containers)]
self.mock_mpc_svc.convert_cmd_args_list.return_value = (binary_name, cmd_args_list)
for magic_mock in self.magic_mocks_read:
self.mock_mpc_svc.start_containers.reset_mock()
self.mock_storage_svc.read = magic_mock
(await self.stage_svc.run_async(private_computation_instance, NullCertificateProvider(), NullCertificateProvider(), '', '', test_server_ips))
self.mock_mpc_svc.start_containers.assert_called_once_with(cmd_args_list=cmd_args_list, onedocker_svc=self.mock_mpc_svc.onedocker_svc, binary_version='latest', binary_name=binary_name, timeout=None, env_vars={'ONEDOCKER_REPOSITORY_PATH': 'test_path/'}, wait_for_containers_to_start_up=True, existing_containers=None, env_vars_list=None, opa_workflow_path=None, permission=ContainerPermissionConfig(self.container_permission_id))
self.assertEqual(containers, private_computation_instance.infra_config.instances[(- 1)].containers)
self.assertEqual('SECURE_RANDOM_RESHARDER', private_computation_instance.infra_config.instances[(- 1)].stage_name)
async def test_get_game_args_with_secure_random_sharding(self) -> None:
private_computation_instance = self._create_pc_instance()
test_shards_per_file = [([1] * private_computation_instance.infra_config.num_pid_containers), ([23] * private_computation_instance.infra_config.num_pid_containers), ([1] * private_computation_instance.infra_config.num_pid_containers), ([1] * private_computation_instance.infra_config.num_pid_containers)]
for i in range(len(self.magic_mocks_read)):
self.mock_storage_svc.read = self.magic_mocks_read[i]
test_game_args = [{'input_filename': f'{private_computation_instance.data_processing_output_path}_combine_{j}', 'output_base_path': f'{private_computation_instance.secure_random_sharder_output_base_path}', 'file_start_index': sum(test_shards_per_file[i][0:j]), 'num_output_files': test_shards_per_file[i][j], 'use_tls': False, 'ca_cert_path': '', 'server_cert_path': '', 'private_key_path': ''} for j in range(private_computation_instance.infra_config.num_pid_containers)]
self.assertEqual(test_game_args, (await self.stage_svc._get_secure_random_sharder_args(private_computation_instance, '', '')))
async def test_get_union_stats(self) -> None:
private_computation_instance = self._create_pc_instance()
test_union_sizes = [([1894] * private_computation_instance.infra_config.num_pid_containers), ([5569966] * private_computation_instance.infra_config.num_pid_containers), ([1894] * private_computation_instance.infra_config.num_pid_containers), ([386240] * private_computation_instance.infra_config.num_pid_containers)]
test_intersection_sizes = [([95] * private_computation_instance.infra_config.num_pid_containers), ([56343] * private_computation_instance.infra_config.num_pid_containers), ([0] * private_computation_instance.infra_config.num_pid_containers), ([170] * private_computation_instance.infra_config.num_pid_containers)]
for i in range(len(self.magic_mocks_read)):
self.mock_storage_svc.read = self.magic_mocks_read[i]
(union_sizes, intersection_sizes) = (await self.stage_svc.get_union_stats(private_computation_instance))
self.assertEqual(test_union_sizes[i], union_sizes)
self.assertEqual(test_intersection_sizes[i], intersection_sizes)
async def test_get_dynamic_shards_num(self) -> None:
private_computation_instance = self._create_pc_instance()
test_shards_per_file = [([1] * private_computation_instance.infra_config.num_pid_containers), ([23] * private_computation_instance.infra_config.num_pid_containers), ([1] * private_computation_instance.infra_config.num_pid_containers), ([1] * private_computation_instance.infra_config.num_pid_containers)]
for i in range(len(self.magic_mocks_read)):
self.mock_storage_svc.read = self.magic_mocks_read[i]
(union_sizes, intersection_sizes) = (await self.stage_svc.get_union_stats(private_computation_instance))
shards_per_file = self.stage_svc.get_dynamic_shards_num(union_sizes, intersection_sizes)
self.assertEqual(test_shards_per_file[i], shards_per_file)
async def test_setup_udp_lift_stages(self) -> None:
test_num_lift_containers = [1, 2, 1, 1]
test_num_udp_containers = [2, 46, 2, 2]
for i in range(len(self.magic_mocks_read)):
private_computation_instance = self._create_pc_instance()
self.mock_storage_svc.read = self.magic_mocks_read[i]
(union_sizes, intersection_sizes) = (await self.stage_svc.get_union_stats(private_computation_instance))
shards_per_file = self.stage_svc.get_dynamic_shards_num(union_sizes, intersection_sizes)
self.stage_svc.setup_udp_lift_stages(private_computation_instance, union_sizes, intersection_sizes, shards_per_file)
self.assertEqual(test_num_lift_containers[i], private_computation_instance.infra_config.num_lift_containers)
self.assertEqual(test_num_udp_containers[i], private_computation_instance.infra_config.num_udp_containers)
async def test_tls_env_vars(self) -> None:
self.mock_mpc_svc.start_containers.return_value = [ContainerInstance(instance_id='test_container_id', status=ContainerInstanceStatus.STARTED)]
private_computation_instance = self._create_pc_instance(pcs_features={PCSFeature.PCF_TLS})
binary_name = 'data_processing/secure_random_sharder'
test_server_ips = [f'192.0.2.{i}' for i in range(private_computation_instance.infra_config.num_pid_containers)]
self.mock_mpc_svc.convert_cmd_args_list.return_value = (binary_name, ['cmd_1', 'cmd_2'])
test_server_hostnames = [f'node{i}.test.com' for i in range(private_computation_instance.infra_config.num_pid_containers)]
expected_server_certificate = 'test_server_cert'
expected_ca_certificate = 'test_ca_cert'
expected_server_key_resource_id = 'test_key'
expected_server_key_region = 'test_region'
expected_server_key_install_path = 'test/path'
expected_server_certificate_path = '/test/server_certificate_path'
expected_ca_certificate_path = '/test/server_certificate_path'
for magic_mock in self.magic_mocks_read:
self.mock_mpc_svc.start_containers.reset_mock()
self.mock_storage_svc.read = magic_mock
(await self.stage_svc.run_async(private_computation_instance, self._get_mock_certificate_provider(expected_server_certificate), self._get_mock_certificate_provider(expected_ca_certificate), expected_server_certificate_path, expected_ca_certificate_path, test_server_ips, test_server_hostnames, StaticPrivateKeyReferenceProvider(expected_server_key_resource_id, expected_server_key_region, expected_server_key_install_path)))
self.mock_mpc_svc.start_containers.assert_called_once()
call_kwargs = self.mock_mpc_svc.start_containers.call_args[1]
self.assertEqual(TLS_OPA_WORKFLOW_PATH, call_kwargs['opa_workflow_path'])
call_env_args_list = call_kwargs['env_vars_list']
self.assertTrue(call_env_args_list)
for (i, call_env_args) in enumerate(call_env_args_list):
self.assertTrue(('ONEDOCKER_REPOSITORY_PATH' in call_env_args))
self.assertEqual('test_path/', call_env_args['ONEDOCKER_REPOSITORY_PATH'])
self.assertTrue((SERVER_CERTIFICATE_ENV_VAR in call_env_args))
self.assertEqual(expected_server_certificate, call_env_args[SERVER_CERTIFICATE_ENV_VAR])
self.assertTrue((SERVER_PRIVATE_KEY_REF_ENV_VAR in call_env_args))
self.assertEqual(expected_server_key_resource_id, call_env_args[SERVER_PRIVATE_KEY_REF_ENV_VAR])
self.assertTrue((SERVER_PRIVATE_KEY_REGION_ENV_VAR in call_env_args))
self.assertEqual(expected_server_key_region, call_env_args[SERVER_PRIVATE_KEY_REGION_ENV_VAR])
self.assertTrue((SERVER_PRIVATE_KEY_PATH_ENV_VAR in call_env_args))
self.assertEqual(expected_server_key_install_path, call_env_args[SERVER_PRIVATE_KEY_PATH_ENV_VAR])
self.assertTrue((CA_CERTIFICATE_ENV_VAR in call_env_args))
self.assertEqual(expected_ca_certificate, call_env_args[CA_CERTIFICATE_ENV_VAR])
self.assertTrue((SERVER_CERTIFICATE_PATH_ENV_VAR in call_env_args))
self.assertEqual(expected_server_certificate_path, call_env_args[SERVER_CERTIFICATE_PATH_ENV_VAR])
self.assertTrue((CA_CERTIFICATE_PATH_ENV_VAR in call_env_args))
self.assertEqual(expected_ca_certificate_path, call_env_args[CA_CERTIFICATE_PATH_ENV_VAR])
self.assertTrue((SERVER_IP_ADDRESS_ENV_VAR in call_env_args))
self.assertEqual(test_server_ips[i], call_env_args[SERVER_IP_ADDRESS_ENV_VAR])
self.assertTrue((SERVER_HOSTNAME_ENV_VAR in call_env_args))
self.assertEqual(test_server_hostnames[i], call_env_args[SERVER_HOSTNAME_ENV_VAR])
def _create_pc_instance(self, pcs_features: Optional[Set[PCSFeature]]=None) -> PrivateComputationInstance:
infra_config: InfraConfig = InfraConfig(instance_id='test_instance_123', role=PrivateComputationRole.PARTNER, _stage_flow_cls_name='PrivateComputationPCF2LiftUDPStageFlow', status=PrivateComputationInstanceStatus.SECURE_RANDOM_SHARDER_STARTED, status_update_ts=, instances=[], game_type=PrivateComputationGameType.LIFT, num_pid_containers=2, num_mpc_containers=4, num_files_per_mpc_container=NUM_NEW_SHARDS_PER_FILE, status_updates=[], log_cost_bucket='test_log_cost_bucket', pcs_features=(pcs_features if pcs_features else set()), container_permission_id=self.container_permission_id)
common: CommonProductConfig = CommonProductConfig(input_path='456', output_dir='789')
product_config: ProductConfig = LiftConfig(common=common)
return PrivateComputationInstance(infra_config=infra_config, product_config=product_config)
def _get_mock_certificate_provider(self, certificate: str) -> MagicMock:
certificate_provider = MagicMock()
certificate_provider.get_certificate.return_value = certificate
return certificate_provider |
def test_that_different_length_is_ok_as_long_as_observation_time_exists(ert_config, storage, prior_ensemble):
sample_prior(prior_ensemble, range(prior_ensemble.ensemble_size))
response_times = [[datetime(2014, 9, 9)], [datetime(2014, 9, 9)], [datetime(2014, 9, 9), datetime(2017, 9, 9)], [datetime(2014, 9, 9)], [datetime(2014, 9, 9), datetime(1988, 9, 9)]]
create_responses(ert_config.user_config_file, prior_ensemble, response_times)
target_ensemble = storage.create_ensemble(prior_ensemble.experiment_id, ensemble_size=ert_config.model_config.num_realizations, iteration=1, name='new_ensemble', prior_ensemble=prior_ensemble)
smoother_update(prior_ensemble, target_ensemble, 'an id', UpdateConfiguration.global_update_step(list(ert_config.observations.keys()), ert_config.ensemble_config.parameters)) |
def run_module():
module = AnsibleModule(argument_spec=dict(state=dict(default='present', choices=['present', 'absent']), name=dict(required=True), node=dict(required=False), value=dict(required=False), cib_file=dict(required=False)), supports_check_mode=True)
state = module.params['state']
name = module.params['name']
value = module.params['value']
node = module.params['node']
cib_file = module.params['cib_file']
result = {}
if (find_executable('pcs') is None):
module.fail_json(msg="'pcs' executable not found. Install 'pcs'.")
if ((state == 'present') and (value is None)):
module.fail_json(msg="To set property 'value' must be specified.")
module.params['cib_file_param'] = ''
if ((cib_file is not None) and os.path.isfile(cib_file)):
module.params['cib_file_param'] = ('-f ' + cib_file)
(rc, out, err) = module.run_command(('pcs %(cib_file_param)s property show' % module.params))
properties = {}
if (rc == 0):
property_type = None
properties['cluster'] = {}
properties['node'] = {}
for row in out.split('\n')[0:(- 1)]:
if (row == 'Cluster Properties:'):
property_type = 'cluster'
elif (row == 'Node Attributes:'):
property_type = 'node'
else:
tmp = row.lstrip().split(':')
if (property_type == 'cluster'):
properties['cluster'][tmp[0]] = tmp[1].lstrip()
elif (property_type == 'node'):
properties['node'][tmp[0]] = {}
match_node_properties = re.compile('(\\w+=\\w+)\\s*')
matched_properties = match_node_properties.findall(':'.join(tmp[1:]))
for prop in matched_properties:
properties['node'][tmp[0]][prop.split('=')[0]] = prop.split('=')[1]
else:
module.fail_json(msg='Failed to load properties from cluster. Is cluster running?')
result['detected_properties'] = properties
if (state == 'present'):
cmd_set = ''
result['changed'] = True
if ((node is None) and ((name not in properties['cluster']) or (properties['cluster'][name] != value))):
cmd_set = ('pcs %(cib_file_param)s property set %(name)s=%(value)s' % module.params)
elif ((node is not None) and ((node not in properties['node']) or (name not in properties['node'][node]) or (properties['node'][node][name] != value))):
cmd_set = ('pcs %(cib_file_param)s property set --node %(node)s %(name)s=%(value)s' % module.params)
else:
result['changed'] = False
if ((not module.check_mode) and result['changed']):
(rc, out, err) = module.run_command(cmd_set)
if (rc == 0):
module.exit_json(**result)
else:
module.fail_json(msg=(("Failed to set property with cmd : '" + cmd_set) + "'"), output=out, error=err)
elif (state == 'absent'):
result['changed'] = True
cmd_unset = ''
if ((node is None) and (name in properties['cluster'])):
cmd_unset = ('pcs %(cib_file_param)s property unset %(name)s' % module.params)
elif ((node is not None) and (node in properties['node']) and (name in properties['node'][node])):
cmd_unset = ('pcs %(cib_file_param)s property unset --node %(node)s %(name)s' % module.params)
else:
result['changed'] = False
if ((not module.check_mode) and result['changed']):
(rc, out, err) = module.run_command(cmd_unset)
if (rc == 0):
module.exit_json(**result)
else:
module.fail_json(msg=(("Failed to unset property with cmd: '" + cmd_unset) + "'"), output=out, error=err)
module.exit_json(**result) |
def test_gridproperty_deprecated_init(testpath):
with pytest.warns(DeprecationWarning, match='Default initialization'):
gp = GridProperty()
assert (gp.ncol == 4)
assert (gp.nrow == 3)
assert (gp.nlay == 5)
with pytest.warns(DeprecationWarning, match='from file name'):
GridProperty((pathlib.Path(testpath) / '3dgrids/bri/b_poro.roff'), fformat='roff') |
class DictlistDataframeConverter(GenericOutputAdapter):
def __init__(self, config_json):
GenericOutputAdapter.__init__(self, config_json=config_json)
def dictlist2dataframe(self, dl, model_id, api_name):
tmp_dir = tempfile.mkdtemp(prefix='ersilia-')
df_file = os.path.join(tmp_dir, 'data.csv')
self.adapt(dl, df_file, model_id, api_name)
df = Dataframe()
df.from_csv(df_file)
return df
def __nan_to_none(self, x):
if np.isnan(x):
return None
return x
def dataframe2dictlist(self, df, model_id, api_name):
schema = ApiSchema(model_id=model_id, config_json=self.config_json).get_output_by_api(api_name=api_name)
the_keys = [k for (k, _) in schema.items()]
if (len(the_keys) == 1):
the_key = the_keys[0]
else:
the_key = None
result = []
features = df.features
grouped_features_idxs = collections.defaultdict(list)
grouped_features = collections.defaultdict(list)
for (i, f) in enumerate(features):
if (the_key is None):
splitted = f.split(FEATURE_MERGE_PATTERN)
if (len(splitted) == 2):
(g, f) = f.split(FEATURE_MERGE_PATTERN)
else:
g = f
else:
g = the_key
grouped_features_idxs[g] += [i]
grouped_features[g] += [f]
for (k, v) in grouped_features.items():
ords = dict(((k_, i_) for (i_, k_) in enumerate(v)))
if (schema[k]['meta'] is not None):
ord_idxs = [ords[v_] for v_ in schema[k]['meta']]
grouped_features[k] = [v[idx] for idx in ord_idxs]
w = grouped_features_idxs[k]
grouped_features_idxs[k] = [w[idx] for idx in ord_idxs]
for r in df.iterrows():
output = {}
for (k, idxs) in grouped_features_idxs.items():
v = [self.__nan_to_none(x) for x in r['values'][idxs].tolist()]
if (len(v) == 1):
v = v[0]
output[k] = v
res = {'input': {'key': r['key'], 'input': r['input'], 'text': None}, 'output': output}
result += [res]
return result |
class DslMeta(type):
_types = {}
def __init__(cls, name, bases, attrs):
super().__init__(name, bases, attrs)
if (not hasattr(cls, '_type_shortcut')):
return
if (cls.name is None):
cls._types[cls._type_name] = cls._type_shortcut
if (not hasattr(cls, '_classes')):
cls._classes = {}
elif (cls.name not in cls._classes):
cls._classes[cls.name] = cls
def get_dsl_type(cls, name):
try:
return cls._types[name]
except KeyError:
raise UnknownDslObject(f'DSL type {name} does not exist.') |
class OptionSeriesOrganizationSonificationContexttracksMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class WaveQuery(commons.BaseRequest):
def __init__(self, session_id, instance_url, query, **kwargs):
super(WaveQuery, self).__init__(session_id, instance_url, **kwargs)
self. = 'POST'
self.request_body = query
self.service = (WAVE_QUERY_SERVICE % self.api_version) |
class PanTool(DragTool):
drag_pointer = Pointer('hand')
speed = Float(1.0)
constrain_key = Enum(None, 'shift', 'control', 'alt')
constrain = Bool(False)
constrain_direction = Enum(None, 'x', 'y')
restrict_to_data = Bool(False)
_original_xy = Tuple
_original_data = Tuple
_auto_constrain = Bool(False)
draw_mode = 'none'
visible = False
def drag_start(self, event):
self._start_pan(event)
def dragging(self, event):
plot = self.component
if (self._auto_constrain and (self.constrain_direction is None)):
if (abs((event.x - self._original_xy[0])) > abs((event.y - self._original_xy[1]))):
self.constrain_direction = 'x'
else:
self.constrain_direction = 'y'
for (direction, bound_name, ndx) in [('x', 'width', 0), ('y', 'height', 1)]:
if ((not self.constrain) or (self.constrain_direction == direction)):
mapper = getattr(plot, (direction + '_mapper'))
range = mapper.range
(domain_min, domain_max) = mapper.domain_limits
eventpos = getattr(event, direction)
origpos = self._original_xy[ndx]
(screenlow, screenhigh) = mapper.screen_bounds
screendelta = (self.speed * (eventpos - origpos))
newlow = mapper.map_data((screenlow - screendelta))
newhigh = mapper.map_data((screenhigh - screendelta))
if (domain_min is None):
if self.restrict_to_data:
domain_min = min([source.get_data().min() for source in range.sources])
else:
domain_min = (- inf)
if (domain_max is None):
if self.restrict_to_data:
domain_max = max([source.get_data().max() for source in range.sources])
else:
domain_max = inf
if ((newlow <= domain_min) and (newhigh >= domain_max)):
continue
if (newlow <= domain_min):
delta = (newhigh - newlow)
newlow = domain_min
newhigh = min(domain_max, (domain_min + delta))
elif (newhigh >= domain_max):
delta = (newhigh - newlow)
newhigh = domain_max
newlow = max(domain_min, (domain_max - delta))
range.set_bounds(newlow, newhigh)
event.handled = True
self._original_xy = (event.x, event.y)
plot.request_redraw()
def drag_cancel(self, event):
pass
def drag_end(self, event):
return self._end_pan(event)
def _start_pan(self, event, capture_mouse=True):
self._original_xy = (event.x, event.y)
if (self.constrain_key is not None):
if getattr(event, (self.constrain_key + '_down')):
self.constrain = True
self._auto_constrain = True
self.constrain_direction = None
self.event_state = 'panning'
if capture_mouse:
event.window.set_pointer(self.drag_pointer)
event.window.set_mouse_owner(self, event.net_transform())
event.handled = True
def _end_pan(self, event):
if self._auto_constrain:
self.constrain = False
self.constrain_direction = None
self.event_state = 'normal'
event.window.set_pointer('arrow')
if (event.window.mouse_owner == self):
event.window.set_mouse_owner(None)
event.handled = True |
def extractRaintranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Half Dragon Slave Life', 'Half Dragon Slave Life', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TestSimpleTrackRepository():
('os.path.exists')
('os.path.isdir')
def test_track_from_directory(self, is_dir, path_exists):
is_dir.return_value = True
path_exists.return_value = True
repo = loader.SimpleTrackRepository('/path/to/track/unit-test')
assert (repo.track_name == 'unit-test')
assert (repo.track_names == ['unit-test'])
assert (repo.track_dir('unit-test') == '/path/to/track/unit-test')
assert (repo.track_file('unit-test') == '/path/to/track/unit-test/track.json')
('os.path.exists')
('os.path.isdir')
('os.path.isfile')
def test_track_from_file(self, is_file, is_dir, path_exists):
is_file.return_value = True
is_dir.return_value = False
path_exists.return_value = True
repo = loader.SimpleTrackRepository('/path/to/track/unit-test/my-track.json')
assert (repo.track_name == 'my-track')
assert (repo.track_names == ['my-track'])
assert (repo.track_dir('my-track') == '/path/to/track/unit-test')
assert (repo.track_file('my-track') == '/path/to/track/unit-test/my-track.json')
('os.path.exists')
('os.path.isdir')
('os.path.isfile')
def test_track_from_named_pipe(self, is_file, is_dir, path_exists):
is_file.return_value = False
is_dir.return_value = False
path_exists.return_value = True
with pytest.raises(exceptions.SystemSetupError) as exc:
loader.SimpleTrackRepository('a named pipe cannot point to a track')
assert (exc.value.args[0] == 'a named pipe cannot point to a track is neither a file nor a directory')
('os.path.exists')
def test_track_from_non_existing_path(self, path_exists):
path_exists.return_value = False
with pytest.raises(exceptions.SystemSetupError) as exc:
loader.SimpleTrackRepository('/path/does/not/exist')
assert (exc.value.args[0] == 'Track path /path/does/not/exist does not exist')
('os.path.isdir')
('os.path.exists')
def test_track_from_directory_without_track(self, path_exists, is_dir):
path_exists.side_effect = [True, False]
is_dir.return_value = True
with pytest.raises(exceptions.SystemSetupError) as exc:
loader.SimpleTrackRepository('/path/to/not/a/track')
assert (exc.value.args[0] == 'Could not find track.json in /path/to/not/a/track')
('os.path.exists')
('os.path.isdir')
('os.path.isfile')
def test_track_from_file_but_not_json(self, is_file, is_dir, path_exists):
is_file.return_value = True
is_dir.return_value = False
path_exists.return_value = True
with pytest.raises(exceptions.SystemSetupError) as exc:
loader.SimpleTrackRepository('/path/to/track/unit-test/my-track.xml')
assert (exc.value.args[0] == '/path/to/track/unit-test/my-track.xml has to be a JSON file') |
class OptionPlotoptionsPolygonMarkerStatesSelect(Options):
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get('#cccccc')
def fillColor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#000000')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(2)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(None)
def radius(self, num: float):
self._config(num, js_type=False) |
class HeapAuthenticationStrategy(AuthenticationStrategy):
name = 'heap'
configuration_model = HeapAuthenticationConfiguration
def __init__(self, configuration: HeapAuthenticationConfiguration):
self.username = configuration.username
self.password = configuration.password
def add_authentication(self, request: PreparedRequest, connection_config: ConnectionConfig) -> PreparedRequest:
secrets = cast(Dict, connection_config.secrets)
domain: Optional[str] = secrets.get('domain')
username: Optional[str] = assign_placeholders(self.username, secrets)
password: Optional[int] = assign_placeholders(self.password, secrets)
response = post(url=f' auth=(username, password))
if response.ok:
json_response = response.json()
token = json_response.get('access_token')
else:
raise FidesopsException(f'Unable to get token {response.json()}')
request.headers['Authorization'] = f'Bearer {token}'
return request |
def production_processor(df) -> list:
datapoints = []
for (index, row) in df.iterrows():
snapshot = {}
snapshot['datetime'] = add_default_tz(parser.parse(row['TimeStamp'], dayfirst=True))
snapshot['gas'] = row['Gas_MW']
snapshot['coal'] = row['Coal_MW']
snapshot['oil'] = (row['Distillate_MW'] + row['Diesel_MW'])
snapshot['wind'] = row['Wind_MW']
if (snapshot['wind'] > (- 20)):
snapshot['wind'] = max(snapshot['wind'], 0)
datapoints.append(snapshot)
return datapoints |
def _add_measure_url(options, entity_type):
options['measureDefinitionUrlTemplate'] = _url_template('measure_definition')
if (entity_type == 'practice'):
options['measureUrlTemplate'] = _url_template('measure_for_all_ccgs')
elif (entity_type != 'pcn'):
options['measureUrlTemplate'] = _url_template('measure_for_all_{}s'.format(entity_type)) |
class AsyncDummyAgent(AgentBase):
def __init__(self):
super().__init__(task_type='async_dummy', asynchronous=True)
async def async_create(self, context: grpc.ServicerContext, output_prefix: str, task_template: TaskTemplate, inputs: typing.Optional[LiteralMap]=None) -> CreateTaskResponse:
return CreateTaskResponse(resource_meta=json.dumps(asdict(Metadata(job_id=dummy_id))).encode('utf-8'))
async def async_get(self, context: grpc.ServicerContext, resource_meta: bytes) -> GetTaskResponse:
return GetTaskResponse(resource=Resource(state=SUCCEEDED))
async def async_delete(self, context: grpc.ServicerContext, resource_meta: bytes) -> DeleteTaskResponse:
return DeleteTaskResponse() |
def extractLuciaelysWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class PDFsTab(QWidget):
def __init__(self, parent):
self.parent = parent
QWidget.__init__(self)
self.setup_ui()
def setup_ui(self):
self.vbox_right = QVBoxLayout()
r_lbl = QLabel('PDF notes, not in Queue')
r_lbl.setAlignment(Qt.AlignmentFlag.AlignCenter)
self.vbox_right.addWidget(r_lbl)
self.search_bar_right = QLineEdit()
self.search_bar_right.setPlaceholderText('Type to search')
self.search_bar_right.textChanged.connect(self.search_enter)
self.vbox_right.addWidget(self.search_bar_right)
self.t_view_right = NoteList(self)
self.vbox_right.addWidget(self.t_view_right)
self.vbox_right.setAlignment(Qt.AlignmentFlag.AlignHCenter)
self.setLayout(self.vbox_right)
def refresh(self):
self.search_bar_right.clear()
self.fill_list(get_pdf_notes_not_in_queue())
def fill_list(self, db_list):
self.t_view_right.fill(db_list)
def search_enter(self):
inp = self.search_bar_right.text()
if ((inp is None) or (len(inp.strip()) == 0)):
self.fill_list(get_pdf_notes_not_in_queue())
return
res = find_unqueued_pdf_notes(inp)
self.t_view_right.clear()
self.fill_list(res) |
class NonlinearADR_Decay_DiracIC(LinearADR_Decay_DiracIC):
def __init__(self, n=1.0, b=numpy.array((1.0, 0.0, 0.0)), a=0.01, c=1.0, d=2.0, tStart=0.0, u0=0.1, x0=numpy.array((0.0, 0.0, 0.0))):
LinearADR_Decay_DiracIC.__init__(self, n, b, a, c, tStart, u0, x0)
self.d_ = d
def uOfXT(self, x, T):
t = (T + self.tStart)
u1 = LinearAD_DiracIC.uOfXT(self, x, T)
if (u1 > 0.0):
return (u1 * exp(old_div((- (((2.0 * self.c_) * t) * pow(u1, (self.d_ - 1.0)))), (self.d_ + 1.0))))
else:
return u1
def rOfUXT(self, u, x, T):
return (self.c_ * (u ** self.d_))
def drOfUXT(self, u, x, T):
return ((self.d_ * self.c_) * (u ** (self.d_ - 1))) |
class FormPage(AbstractEmailForm):
sub_header = models.TextField(default='')
intro = RichTextField(blank=True)
thank_you_text = RichTextField(blank=True)
content_panels = (AbstractEmailForm.content_panels + [FormSubmissionsPanel(), FieldPanel('sub_header'), FieldPanel('intro', classname='full'), InlinePanel('form_fields', label='Form fields'), FieldPanel('thank_you_text', classname='full'), MultiFieldPanel([FieldRowPanel([FieldPanel('from_address', classname='col6'), FieldPanel('to_address', classname='col6')]), FieldPanel('subject')], 'Email')]) |
class OefSearchDialogues(Dialogues, ABC):
END_STATES = frozenset({OefSearchDialogue.EndState.SUCCESSFUL, OefSearchDialogue.EndState.FAILED})
_keep_terminal_state_dialogues = False
def __init__(self, self_address: Address, role_from_first_message: Callable[([Message, Address], Dialogue.Role)], dialogue_class: Type[OefSearchDialogue]=OefSearchDialogue) -> None:
Dialogues.__init__(self, self_address=self_address, end_states=cast(FrozenSet[Dialogue.EndState], self.END_STATES), message_class=OefSearchMessage, dialogue_class=dialogue_class, role_from_first_message=role_from_first_message) |
class Transonic():
def __init__(self, use_transonified=True, frame=None, reuse=True, backend=None):
if (frame is None):
frame = get_frame(1)
self.module_name = module_name = get_module_name(frame)
if (backend is None):
backend = get_backend_name_module(module_name)
if isinstance(backend, str):
backend = backends[backend]
self.backend = backend
modules = modules_backends[backend.name]
self._compile_at_import_at_creation = has_to_compile_at_import()
if (reuse and (module_name in modules)):
ts = modules[module_name]
for (key, value) in ts.__dict__.items():
self.__dict__[key] = value
return
self.is_transpiling = is_transpiling
self.has_to_replace = has_to_replace
if is_transpiling:
self.functions = {}
self.classes = {}
self.signatures_func = {}
self.is_transpiled = False
self.is_compiled = False
return
self.is_compiling = False
if ((not use_transonified) or (not has_to_replace)):
self.is_transpiled = False
self.is_compiled = False
return
if ('.' in module_name):
(package, module_short_name) = module_name.rsplit('.', 1)
module_backend_name = (package + '.')
else:
module_short_name = module_name
module_backend_name = ''
module_backend_name += (f'__{backend.name}__.' + module_short_name)
self.path_mod = path_mod = Path(_get_pathfile_from_frame(frame))
suffix = '.py'
self.path_backend = path_backend = ((path_mod.parent / f'__{backend.name}__') / (module_short_name + suffix))
path_ext = None
if (has_to_compile_at_import() and path_mod.exists()):
if mpi.has_to_build(path_backend, path_mod):
if path_backend.exists():
time_backend = mpi.modification_date(path_backend)
else:
time_backend = 0
returncode = None
if (mpi.rank == 0):
print(f'Running transonic on file {path_mod}... ', end='')
os.environ['TRANSONIC_NO_MPI'] = '1'
returncode = subprocess.call([sys.executable, '-m', 'transonic.run', '-nc', str(path_mod)])
del os.environ['TRANSONIC_NO_MPI']
returncode = mpi.bcast(returncode)
if (returncode != 0):
raise RuntimeError(f'transonic does not manage to produce the {backend.name_capitalized} file for {path_mod}')
if (mpi.rank == 0):
print('Done!')
path_ext = path_backend.with_name(backend.name_ext_from_path_backend(path_backend))
time_backend_after = mpi.modification_date(path_backend)
if ((time_backend_after == time_backend) and (mpi.rank == 0)):
if (not has_to_build(path_ext, path_backend)):
path_backend.touch()
if path_ext.exists():
path_ext.touch()
else:
path_backend.touch()
path_ext = (path_ext or path_backend.with_name(backend.name_ext_from_path_backend(path_backend)))
self.path_extension = path_ext
if (has_to_compile_at_import() and path_mod.exists() and (not self.path_extension.exists())):
if (mpi.rank == 0):
print(f'Launching {backend.name_capitalized} to compile a new extension...')
(self.is_compiling, self.process) = backend.compile_extension(path_backend, name_ext_file=self.path_extension.name)
self.is_compiled = (not self.is_compiling)
self.is_transpiled = True
if ((not path_ext.exists()) and (not self.is_compiling)):
path_ext_alt = path_backend.with_suffix(backend.suffix_extension)
if path_ext_alt.exists():
self.path_extension = path_ext = path_ext_alt
self.reload_module_backend(module_backend_name)
if (not self.is_transpiled):
logger.warning(f'Module {path_mod} has not been compiled for Transonic-{backend.name_capitalized}')
else:
self.is_compiled = backend.check_if_compiled(self.module_backend)
if self.is_compiled:
module = inspect.getmodule(frame)
if (module is not None):
if (backend.name == 'pythran'):
module.__pythran__ = self.module_backend.__pythran__
module.__transonic__ = self.module_backend.__transonic__
if hasattr(self.module_backend, 'arguments_blocks'):
self.arguments_blocks = getattr(self.module_backend, 'arguments_blocks')
modules[module_name] = self
def reload_module_backend(self, module_backend_name=None):
if (module_backend_name is None):
module_backend_name = self.module_backend.__name__
if (self.path_extension.exists() and (not self.is_compiling)):
self.module_backend = import_from_path(self.path_extension, module_backend_name)
elif self.path_backend.exists():
self.module_backend = import_from_path(self.path_backend, module_backend_name)
else:
self.is_transpiled = False
self.is_compiled = False
def transonic_def(self, func):
if is_method(func):
return self.transonic_def_method(func)
if (is_transpiling or (not has_to_replace) or (not self.is_transpiled)):
return func
if (not hasattr(self.module_backend, func.__name__)):
self.reload_module_backend()
try:
func_tmp = getattr(self.module_backend, func.__name__)
except AttributeError:
logger.warning(f'''{self.backend.name_capitalized} file does not seem to be up-to-date:
{self.module_backend}
func: {func.__name__}''')
func_tmp = func
if self.is_compiling:
return functools.wraps(func)(CheckCompiling(self, func_tmp))
return func_tmp
def transonic_def_method(self, func):
if (is_transpiling or (not has_to_replace) or (not self.is_transpiled)):
return func
return TransonicTemporaryMethod(func)
def boost(self, **kwargs):
return self._boost_decor
def _boost_decor(self, obj):
if isinstance(obj, type):
return self.transonic_class(obj)
else:
return self.transonic_def(obj)
def transonic_class(self, cls: type):
if is_transpiling:
return cls
jit_methods = {key: value for (key, value) in cls.__dict__.items() if isinstance(value, TransonicTemporaryJITMethod)}
if jit_methods:
cls = jit_class(cls, jit_methods, self.backend)
if ((not has_to_replace) or (not self.is_transpiled)):
return cls
cls_name = cls.__name__
for (key, value) in cls.__dict__.items():
if (not isinstance(value, TransonicTemporaryMethod)):
continue
func = value.func
func_name = func.__name__
name_backend_func = f'__for_method__{cls_name}__{func_name}'
name_var_code_new_method = f'__code_new_method__{cls_name}__{func_name}'
if (not hasattr(self.module_backend, name_backend_func)):
self.reload_module_backend()
try:
backend_func = getattr(self.module_backend, name_backend_func)
code_new_method = getattr(self.module_backend, name_var_code_new_method)
except AttributeError:
raise RuntimeError(f'{self.backend.name_capitalized} file does not seem to be up-to-date.')
else:
namespace = {'backend_func': backend_func}
exec(code_new_method, namespace)
setattr(cls, key, functools.wraps(func)(namespace['new_method']))
return cls
def use_block(self, name):
if (not self.is_transpiled):
raise ValueError('`use_block` has to be used protected by `if ts.is_transpiled`')
if (self.is_compiling and (not self.process.is_alive(raise_if_error=True))):
self.is_compiling = False
time.sleep(0.1)
self.module_backend = import_from_path(self.path_extension, self.module_backend.__name__)
assert self.backend.check_if_compiled(self.module_backend)
self.is_compiled = True
func = getattr(self.module_backend, name)
argument_names = self.arguments_blocks[name]
locals_caller = get_frame(1).f_locals
arguments = [locals_caller[name] for name in argument_names]
return func(*arguments) |
_converter(torch.ops.aten.sign.default)
def aten_unary_ops_sign(target: Target, args: Tuple[(Argument, ...)], kwargs: Dict[(str, Argument)], name: str) -> ConverterOutput:
input_val = args[0]
if (not isinstance(input_val, AITTensor)):
raise RuntimeError(f'Unexpected input for {name}: {input_val}')
return elementwise(FuncEnum.SIGN)(input_val) |
class MHeadingText(HasTraits):
level = Int(1)
text = Str('Default')
def __init__(self, parent=None, **traits):
if ('image' in traits):
warnings.warn("background images are no-longer supported for Wx and the 'image' trait will be removed in a future Pyface update", DeprecationWarning, stacklevel=2)
create = traits.pop('create', None)
super().__init__(parent=parent, **traits)
if create:
self.create()
warnings.warn('automatic widget creation is deprecated and will be removed in a future Pyface version, code should not pass the create parameter and should instead call create() explicitly', DeprecationWarning, stacklevel=2)
elif (create is not None):
warnings.warn('setting create=False is no longer required', DeprecationWarning, stacklevel=2)
def _initialize_control(self):
super()._initialize_control()
self._set_control_text(self.text)
def _add_event_listeners(self):
super()._add_event_listeners()
self.observe(self._text_updated, 'text', dispatch='ui')
def _remove_event_listeners(self):
self.observe(self._text_updated, 'text', dispatch='ui', remove=True)
super()._remove_event_listeners()
def _set_control_text(self, text):
raise NotImplementedError()
def _get_control_text(self):
raise NotImplementedError()
def _text_updated(self, event):
if (self.control is not None):
self._set_control_text(self.text) |
class OptionSeriesBubbleSonificationTracksMapping(Options):
def frequency(self) -> 'OptionSeriesBubbleSonificationTracksMappingFrequency':
return self._config_sub_data('frequency', OptionSeriesBubbleSonificationTracksMappingFrequency)
def gapBetweenNotes(self) -> 'OptionSeriesBubbleSonificationTracksMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionSeriesBubbleSonificationTracksMappingGapbetweennotes)
def highpass(self) -> 'OptionSeriesBubbleSonificationTracksMappingHighpass':
return self._config_sub_data('highpass', OptionSeriesBubbleSonificationTracksMappingHighpass)
def lowpass(self) -> 'OptionSeriesBubbleSonificationTracksMappingLowpass':
return self._config_sub_data('lowpass', OptionSeriesBubbleSonificationTracksMappingLowpass)
def noteDuration(self) -> 'OptionSeriesBubbleSonificationTracksMappingNoteduration':
return self._config_sub_data('noteDuration', OptionSeriesBubbleSonificationTracksMappingNoteduration)
def pan(self) -> 'OptionSeriesBubbleSonificationTracksMappingPan':
return self._config_sub_data('pan', OptionSeriesBubbleSonificationTracksMappingPan)
def pitch(self) -> 'OptionSeriesBubbleSonificationTracksMappingPitch':
return self._config_sub_data('pitch', OptionSeriesBubbleSonificationTracksMappingPitch)
def playDelay(self) -> 'OptionSeriesBubbleSonificationTracksMappingPlaydelay':
return self._config_sub_data('playDelay', OptionSeriesBubbleSonificationTracksMappingPlaydelay)
def rate(self) -> 'OptionSeriesBubbleSonificationTracksMappingRate':
return self._config_sub_data('rate', OptionSeriesBubbleSonificationTracksMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionSeriesBubbleSonificationTracksMappingTime':
return self._config_sub_data('time', OptionSeriesBubbleSonificationTracksMappingTime)
def tremolo(self) -> 'OptionSeriesBubbleSonificationTracksMappingTremolo':
return self._config_sub_data('tremolo', OptionSeriesBubbleSonificationTracksMappingTremolo)
def volume(self) -> 'OptionSeriesBubbleSonificationTracksMappingVolume':
return self._config_sub_data('volume', OptionSeriesBubbleSonificationTracksMappingVolume) |
class OptionPlotoptionsBulletSonificationTracksMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionPlotoptionsPictorialSonificationPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
('foremast.utils.awslambda.boto3.Session')
('foremast.utils.awslambda.LOG')
def test_add_lambda_permission_failure(LOG, session):
client = session.return_value.client.return_value
client.add_permission.side_effect = boto3.exceptions.botocore.exceptions.ClientError(ERROR_RESPONSE, 'operation_name')
add_lambda_permissions()
(args, _) = LOG.info.call_args
assert args[0].startswith('Did not add') |
class ESENT_CATALOG_DATA_DEFINITION_ENTRY(Structure):
fixed = (('FatherDataPageID', '<L=0'), ('Type', '<H=0'), ('Identifier', '<L=0'))
column_stuff = (('ColumnType', '<L=0'), ('SpaceUsage', '<L=0'), ('ColumnFlags', '<L=0'), ('CodePage', '<L=0'))
other = (('FatherDataPageNumber', '<L=0'),)
table_stuff = (('SpaceUsage', '<L=0'),)
index_stuff = (('SpaceUsage', '<L=0'), ('IndexFlags', '<L=0'), ('Locale', '<L=0'))
lv_stuff = (('SpaceUsage', '<L=0'),)
common = (('Trailing', ':'),)
def __init__(self, data):
dataType = unpack('<H', data[4:][:2])[0]
self.structure = self.fixed
if (dataType == CATALOG_TYPE_TABLE):
self.structure += (self.other + self.table_stuff)
elif (dataType == CATALOG_TYPE_COLUMN):
self.structure += self.column_stuff
elif (dataType == CATALOG_TYPE_INDEX):
self.structure += (self.other + self.index_stuff)
elif (dataType == CATALOG_TYPE_LONG_VALUE):
self.structure += (self.other + self.lv_stuff)
elif (dataType == CATALOG_TYPE_CALLBACK):
raise Exception('CallBack types not supported!')
else:
self.structure = ()
Structure.__init__(self, data)
self.structure += self.common
Structure.__init__(self, data) |
def extractWwwKaitoranslationWebId(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def after(action, *args, is_async=False, **kwargs):
def _after(responder_or_resource):
if isinstance(responder_or_resource, type):
resource = responder_or_resource
for (responder_name, responder) in getmembers(resource, callable):
if _DECORABLE_METHOD_NAME.match(responder_name):
def let(responder=responder):
do_after_all = _wrap_with_after(responder, action, args, kwargs, is_async)
setattr(resource, responder_name, do_after_all)
let()
return resource
else:
responder = responder_or_resource
do_after_one = _wrap_with_after(responder, action, args, kwargs, is_async)
return do_after_one
return _after |
.parametrize('wave, J, mode', [('db1', 1, 'zero'), ('db1', 3, 'zero'), ('db3', 1, 'symmetric'), ('db3', 2, 'reflect'), ('db2', 3, 'periodization'), ('db2', 3, 'periodic'), ('db4', 2, 'zero'), ('db3', 3, 'symmetric'), ('bior2.4', 2, 'periodization'), ('bior2.4', 2, 'periodization')])
def test_equal(wave, J, mode):
x = torch.randn(5, 4, 64).to(dev)
dwt = DWT1DForward(J=J, wave=wave, mode=mode).to(dev)
(yl, yh) = dwt(x)
coeffs = pywt.wavedec(x.cpu().numpy(), wave, level=J, mode=mode)
np.testing.assert_array_almost_equal(yl.cpu(), coeffs[0], decimal=PREC_FLT)
for j in range(J):
np.testing.assert_array_almost_equal(coeffs[(J - j)], yh[j].cpu(), decimal=PREC_FLT)
iwt = DWT1DInverse(wave=wave, mode=mode).to(dev)
x2 = iwt((yl, yh))
np.testing.assert_array_almost_equal(x.cpu(), x2.detach().cpu(), decimal=PREC_FLT) |
def _add_deployment(contract: Any, alias: Optional[str]=None) -> None:
if ('chainid' not in CONFIG.active_network):
return
address = _resolve_address(contract.address)
name = f"chain{CONFIG.active_network['chainid']}"
cur.execute(f"CREATE TABLE IF NOT EXISTS {name} (address UNIQUE, alias UNIQUE, paths, {', '.join(DEPLOYMENT_KEYS)})")
all_sources = {}
for (key, path) in contract._build.get('allSourcePaths', {}).items():
source = contract._sources.get(path)
if (source is None):
source = Path(path).read_text()
hash_ = sha1(source.encode()).hexdigest()
cur.insert('sources', hash_, source)
all_sources[key] = [hash_, path]
values = [contract._build.get(i) for i in DEPLOYMENT_KEYS]
cur.insert(name, address, alias, all_sources, *values) |
def test_operations_inside_outside_polygon_shortforms(tmpdir):
zurf = xtgeo.surface_from_file(SURF1)
poly = xtgeo.polygons_from_file(POLY1)
surf = zurf.copy()
surf.add_inside(poly, 200)
assert (surf.values.mean() == pytest.approx(1759.06, abs=0.01))
surf = zurf.copy()
surf.add_outside(poly, 200)
assert (surf.values.mean() == pytest.approx(1838.24, abs=0.01))
surf = zurf.copy()
surf2 = zurf.copy()
surf.add_inside(poly, surf2)
assert (surf.values.mean() == pytest.approx(2206.2, abs=0.01))
surf = zurf.copy()
surf.div_inside(poly, 0.0)
surf.to_file(join(tmpdir, 'div2.gri'))
surf.to_file(join(tmpdir, 'div2.fgr'), fformat='irap_ascii')
surf = zurf.copy()
surf.set_inside(poly, 700)
assert (surf.values.mean() == pytest.approx(1402.52, abs=0.01))
surf = zurf.copy()
surf.eli_inside(poly)
assert (surf.values.mean() == pytest.approx(1706.52, abs=0.01)) |
def validate_user_form(hashid, host):
form = Form.get_with_hashid(hashid)
if (not form):
raise SubmitFormError(errors.bad_hashid_error(hashid))
assign_ajax(form, request_wants_json())
if form.disabled:
raise SubmitFormError(errors.disabled_error())
if (not form.host):
form.host = host
DB.session.add(form)
DB.session.commit()
elif (((not form.sitewide) and (form.host.rstrip('/') != host.rstrip('/'))) or (form.sitewide and (not remove_www(host).startswith(remove_www(form.host))))):
raise SubmitFormError(errors.mismatched_host_error(host, form))
return form |
def make_multiplexer_and_dialogues() -> Tuple[(Multiplexer, OefSearchDialogues, Crypto, SOEFConnection)]:
crypto = make_crypto(DEFAULT_LEDGER)
identity = Identity('identity', address=crypto.address, public_key=crypto.public_key)
skill_id = 'some/skill:0.1.0'
oef_search_dialogues = OefSearchDialogues(skill_id)
configuration = ConnectionConfig(api_key='TwiCIriSl0mLahw17pyqoA', soef_addr='s-oef.fetch.ai', soef_port=443, restricted_to_protocols={OefSearchMessage.protocol_specification_id, OefSearchMessage.protocol_id}, connection_id=SOEFConnection.connection_id)
soef_connection = SOEFConnection(configuration=configuration, data_dir=MagicMock(), identity=identity)
multiplexer = Multiplexer([soef_connection])
return (multiplexer, oef_search_dialogues, crypto, soef_connection) |
def show_stats_inside_rms():
prj = project
gridmodel = 'Reek'
faciesname = 'Facies'
propnames = ['Poro', 'Perm']
facies = xtgeo.gridproperty_from_roxar(prj, gridmodel, faciesname)
print('Facies codes are: {}'.format(facies.codes))
for propname in propnames:
prop = xtgeo.gridproperty_from_roxar(prj, gridmodel, propname)
print('Working with {}'.format(prop.name))
for (key, fname) in facies.codes.items():
avg = prop.values[(facies.values == key)].mean()
std = prop.values[(facies.values == key)].std()
print('For property {} in facies {}, avg is {:10.3f} and stddev is {:9.3f}'.format(propname, fname, avg, std))
avg = prop.values.mean()
std = prop.values.std()
print('For property {} in ALL facies, avg is {:10.3f} and stddev is {:9.3f}'.format(propname, avg, std)) |
def run(fips_dir, proj_dir, args):
if (not util.is_valid_project_dir(proj_dir)):
log.error('must be run in a project directory')
tgt_name = None
cfg_name = None
build_tool_args = None
if ('--' in args):
idx = args.index('--')
build_tool_args = args[(idx + 1):]
args = args[:idx]
if (len(args) > 0):
tgt_name = args[0]
if (len(args) > 1):
cfg_name = args[1]
if (not cfg_name):
cfg_name = settings.get(proj_dir, 'config')
if (not tgt_name):
tgt_name = settings.get(proj_dir, 'target')
if (tgt_name == 'clean'):
project.make_clean(fips_dir, proj_dir, cfg_name)
else:
project.build(fips_dir, proj_dir, cfg_name, tgt_name, build_tool_args) |
def get_data_after_after_leadership(dst: bool) -> list[dict[(str, int)]]:
data: list[dict[(str, int)]] = []
data.append(next_int_len(4))
if (not dst):
data.append(next_int_len(5))
if dst:
data.append(next_int_len(12))
else:
data.append(next_int_len(7))
return data |
class MeasureCalculation(object):
def __init__(self, measure, start_date=None, end_date=None, verbose=False):
self.verbose = verbose
self.fpath = os.path.dirname(__file__)
self.measure = measure
self.start_date = start_date
self.end_date = end_date
def table_name(self, org_type):
return '{}_data_{}'.format(org_type, self.measure.id)
def check_definition(self):
self.calculate_practice_ratios(dry_run=True)
def calculate(self, bigquery_only=False):
self.calculate_practices(bigquery_only=bigquery_only)
self.calculate_orgs('pcn', bigquery_only=bigquery_only)
self.calculate_orgs('ccg', bigquery_only=bigquery_only)
self.calculate_orgs('stp', bigquery_only=bigquery_only)
self.calculate_orgs('regtm', bigquery_only=bigquery_only)
self.calculate_global(bigquery_only=bigquery_only)
def calculate_practices(self, bigquery_only=False):
self.calculate_practice_ratios()
self.add_practice_percent_rank()
self.calculate_global_centiles_for_practices()
if self.measure.is_cost_based:
self.calculate_cost_savings_for_practices()
if (not bigquery_only):
self.write_practice_ratios_to_database()
def calculate_practice_ratios(self, dry_run=False):
m = self.measure
numerator_aliases = ''
denominator_aliases = ''
aliased_numerators = ''
aliased_denominators = ''
for col in self._get_col_aliases('denominator'):
denominator_aliases += (', denom.%s AS denom_%s' % (col, col))
aliased_denominators += (', denom_%s' % col)
for col in self._get_col_aliases('numerator'):
numerator_aliases += (', num.%s AS num_%s' % (col, col))
aliased_numerators += (', num_%s' % col)
context = {'numerator_from': m.numerator_from, 'numerator_where': m.numerator_where, 'numerator_columns': self._columns_for_select('numerator'), 'denominator_columns': self._columns_for_select('denominator'), 'denominator_from': m.denominator_from, 'denominator_where': m.denominator_where, 'numerator_aliases': numerator_aliases, 'denominator_aliases': denominator_aliases, 'aliased_denominators': aliased_denominators, 'aliased_numerators': aliased_numerators, 'start_date': self.start_date, 'end_date': self.end_date}
self.insert_rows_from_query('practice_ratios', self.table_name('practice'), context, dry_run=dry_run)
def add_practice_percent_rank(self):
self.insert_rows_from_query('practice_percent_rank', self.table_name('practice'), {})
def calculate_global_centiles_for_practices(self):
extra_fields = []
for col in self._get_col_aliases('numerator'):
extra_fields.append(('num_' + col))
for col in self._get_col_aliases('denominator'):
extra_fields.append(('denom_' + col))
extra_select_sql = ''
for f in extra_fields:
extra_select_sql += (', SUM(%s) as %s' % (f, f))
if (self.measure.is_cost_based and self.measure.is_percentage):
extra_select_sql += ', (SUM(denom_cost) - SUM(num_cost)) / (SUM(denom_quantity)- SUM(num_quantity)) AS cost_per_denom,SUM(num_cost) / SUM(num_quantity) as cost_per_num'
context = {'extra_select_sql': extra_select_sql}
self.insert_rows_from_query('global_deciles_practices', self.table_name('global'), context)
def calculate_cost_savings_for_practices(self):
if self.measure.is_percentage:
query_id = 'practice_percentage_measure_cost_savings'
else:
query_id = 'practice_list_size_measure_cost_savings'
self.insert_rows_from_query(query_id, self.table_name('practice'), {})
def write_practice_ratios_to_database(self):
f = tempfile.TemporaryFile(mode='r+')
writer = csv.DictWriter(f, fieldnames=MEASURE_FIELDNAMES)
for datum in self.get_rows_as_dicts(self.table_name('practice')):
datum['measure_id'] = self.measure.id
if self.measure.is_cost_based:
datum['cost_savings'] = json.dumps(convertSavingsToDict(datum))
datum['percentile'] = normalisePercentile(datum['percentile'])
datum = {fn: datum[fn] for fn in MEASURE_FIELDNAMES if (fn in datum)}
writer.writerow(datum)
copy_str = 'COPY frontend_measurevalue(%s) FROM STDIN '
copy_str += 'WITH (FORMAT CSV)'
self.log((copy_str % ', '.join(MEASURE_FIELDNAMES)))
f.seek(0)
with connection.cursor() as cursor:
cursor.copy_expert((copy_str % ', '.join(MEASURE_FIELDNAMES)), f)
f.close()
def calculate_orgs(self, org_type, bigquery_only=False):
self.calculate_org_ratios(org_type)
self.add_org_percent_rank(org_type)
self.calculate_global_centiles_for_orgs(org_type)
if self.measure.is_cost_based:
self.calculate_cost_savings_for_orgs(org_type)
if (not bigquery_only):
self.write_org_ratios_to_database(org_type)
def calculate_org_ratios(self, org_type):
numerator_aliases = denominator_aliases = ''
for col in self._get_col_aliases('denominator'):
denominator_aliases += (', SUM(denom_%s) AS denom_%s' % (col, col))
for col in self._get_col_aliases('numerator'):
numerator_aliases += (', SUM(num_%s) AS num_%s' % (col, col))
context = {'denominator_aliases': denominator_aliases, 'numerator_aliases': numerator_aliases}
self.insert_rows_from_query('{}_ratios'.format(org_type), self.table_name(org_type), context)
def add_org_percent_rank(self, org_type):
self.insert_rows_from_query('{}_percent_rank'.format(org_type), self.table_name(org_type), {})
def calculate_global_centiles_for_orgs(self, org_type):
extra_fields = []
for col in self._get_col_aliases('numerator'):
extra_fields.append(('num_' + col))
for col in self._get_col_aliases('denominator'):
extra_fields.append(('denom_' + col))
extra_select_sql = ''
for f in extra_fields:
extra_select_sql += (', global_deciles.%s as %s' % (f, f))
if (self.measure.is_cost_based and self.measure.is_percentage):
extra_select_sql += ', global_deciles.cost_per_denom AS cost_per_denom, global_deciles.cost_per_num AS cost_per_num'
context = {'extra_select_sql': extra_select_sql}
self.insert_rows_from_query('global_deciles_{}s'.format(org_type), self.table_name('global'), context)
def calculate_cost_savings_for_orgs(self, org_type):
if self.measure.is_percentage:
query_id = '{}_percentage_measure_cost_savings'.format(org_type)
else:
query_id = '{}_list_size_measure_cost_savings'.format(org_type)
self.insert_rows_from_query(query_id, self.table_name(org_type), {})
def write_org_ratios_to_database(self, org_type):
for datum in self.get_rows_as_dicts(self.table_name(org_type)):
datum['measure_id'] = self.measure.id
if self.measure.is_cost_based:
datum['cost_savings'] = convertSavingsToDict(datum)
datum['percentile'] = normalisePercentile(datum['percentile'])
datum = {fn: datum[fn] for fn in MEASURE_FIELDNAMES if (fn in datum)}
MeasureValue.objects.create(**datum)
def calculate_global(self, bigquery_only=False):
if self.measure.is_cost_based:
self.calculate_global_cost_savings()
if (not bigquery_only):
self.write_global_centiles_to_database()
def calculate_global_cost_savings(self):
self.insert_rows_from_query('global_cost_savings', self.table_name('global'), {})
def write_global_centiles_to_database(self):
self.log(('Writing global centiles from %s to database' % self.table_name('global')))
for d in self.get_rows_as_dicts(self.table_name('global')):
regtm_cost_savings = {}
stp_cost_savings = {}
ccg_cost_savings = {}
pcn_cost_savings = {}
practice_cost_savings = {}
d['measure_id'] = self.measure.id
new_d = {}
for (attr, value) in d.items():
new_d[attr.replace('global_', '')] = value
d = new_d
(mg, _) = MeasureGlobal.objects.get_or_create(measure_id=self.measure.id, month=d['month'])
if self.measure.is_cost_based:
practice_cost_savings = convertSavingsToDict(d, prefix='practice')
pcn_cost_savings = convertSavingsToDict(d, prefix='pcn')
ccg_cost_savings = convertSavingsToDict(d, prefix='ccg')
stp_cost_savings = convertSavingsToDict(d, prefix='stp')
regtm_cost_savings = convertSavingsToDict(d, prefix='regtm')
mg.cost_savings = {'regional_team': regtm_cost_savings, 'stp': stp_cost_savings, 'ccg': ccg_cost_savings, 'pcn': pcn_cost_savings, 'practice': practice_cost_savings}
practice_deciles = convertDecilesToDict(d, prefix='practice')
pcn_deciles = convertDecilesToDict(d, prefix='pcn')
ccg_deciles = convertDecilesToDict(d, prefix='ccg')
stp_deciles = convertDecilesToDict(d, prefix='stp')
regtm_deciles = convertDecilesToDict(d, prefix='regtm')
mg.percentiles = {'regional_team': regtm_deciles, 'stp': stp_deciles, 'ccg': ccg_deciles, 'pcn': pcn_deciles, 'practice': practice_deciles}
for (attr, value) in d.items():
setattr(mg, attr, value)
mg.save()
def insert_rows_from_query(self, query_id, table_name, ctx, dry_run=False):
query_path = os.path.join(self.fpath, 'measure_sql', (query_id + '.sql'))
ctx['measure_id'] = self.measure.id
with open(query_path) as f:
sql = f.read()
self.get_table(table_name).insert_rows_from_query(sql, substitutions=ctx, dry_run=dry_run)
def get_rows_as_dicts(self, table_name):
return self.get_table(table_name).get_rows_as_dicts()
def get_table(self, table_name):
client = Client('measures')
return client.get_table(table_name)
def log(self, message):
if self.verbose:
logger.warning(message)
else:
logger.info(message)
def _get_col_aliases(self, num_or_denom):
assert (num_or_denom in ['numerator', 'denominator'])
cols = []
cols = self._columns_for_select(num_or_denom)
aliases = re.findall('AS ([a-z0-9_]+)', cols)
return [x for x in aliases if (x not in num_or_denom)]
def _columns_for_select(self, num_or_denom):
assert (num_or_denom in ['numerator', 'denominator'])
fieldname = ('%s_columns' % num_or_denom)
val = getattr(self.measure, fieldname)
if (self.measure.is_cost_based and self.measure.is_percentage):
val += '\n , SUM(items) AS items, SUM(actual_cost) AS cost, SUM(quantity) AS quantity '
return val |
class TestItem(object):
def testEuropean(self):
env = Environ()
t = EventTap()
s = env.server_core()
(a, b, c, d) = [env.client_core() for _ in range(4)]
t.tap(a, b, c, d)
a.auth.login('Alice')
b.auth.login('Bob')
c.auth.login('Cirno')
d.auth.login('Daiyousei')
wait()
a.room.create('Boom', 'THBattleDummy4', {})
wait()
gid = a.game.gid_of(t[a.events.game_joined])
b.room.join(gid)
c.room.join(gid)
d.room.join(gid)
a.room.use_item('whatever-blah-blah')
wait()
assert (t.take(a.events.server_error) == 'invalid_item_sku')
a.room.use_item('imperial-role:attacker')
wait()
assert (t.take(a.events.server_error) == 'incorrect_game_mode')
s.backend.items[a.auth.pid] = {'european': 0}
a.room.use_item('european')
wait()
assert (t.take(a.events.server_error) == 'item_not_found')
s.backend.items[a.auth.pid] = {'european': 1}
a.room.use_item('european')
wait()
assert (t.take(a.events.server_info) == 'use_item_success')
s.backend.items[b.auth.pid] = {'european': 1}
b.room.use_item('european')
wait()
assert (t.take(b.events.server_error) == 'european_conflict')
a.room.get_ready()
b.room.get_ready()
c.room.get_ready()
d.room.get_ready()
wait()
assert (s.backend.items[a.auth.pid]['european'] == 0)
assert (s.backend.items[b.auth.pid]['european'] == 1) |
def test_writer_schema_always_read():
'
schema = {'type': 'record', 'name': 'Outer', 'fields': [{'name': 'item', 'type': [{'type': 'record', 'name': 'Inner1', 'fields': [{'name': 'id', 'type': {'type': 'record', 'name': 'UUID', 'fields': [{'name': 'id', 'type': 'string'}]}, 'default': {'id': ''}}, {'name': 'description', 'type': 'string'}, {'name': 'size', 'type': 'int'}]}, {'type': 'record', 'name': 'Inner2', 'fields': [{'name': 'id', 'type': 'UUID', 'default': {'id': ''}}, {'name': 'name', 'type': 'string'}, {'name': 'age', 'type': 'long'}]}]}]}
records = [{'item': {'description': 'test', 'size': 1}}, {'item': {'id': {'id': '#1'}, 'name': 'foobar', 'age': 12}}]
file = BytesIO()
fastavro.writer(file, fastavro.parse_schema(schema), records)
file.seek(0)
fastavro.reader(file) |
(CONNECTIONS, dependencies=[Security(verify_oauth_client, scopes=[CONNECTION_READ])], response_model=Page[ConnectionConfigurationResponse])
def get_connections(*, db: Session=Depends(deps.get_db), params: Params=Depends(), search: Optional[str]=None, disabled: Optional[bool]=None, test_status: Optional[TestStatus]=None, system_type: Optional[SystemType]=None, orphaned_from_system: Optional[bool]=None, connection_type: Optional[List[str]]=Query(default=None)) -> AbstractPage[ConnectionConfig]:
logger.info("Finding connection configurations with pagination params {} and search query: '{}'.", params, (search if search else ''))
query = ConnectionConfig.query(db)
if search:
query = query.filter(or_(ConnectionConfig.key.ilike(f'%{escape_like(search)}%'), ConnectionConfig.name.ilike(f'%{escape_like(search)}%'), ConnectionConfig.description.ilike(f'%{escape_like(search)}%')))
if connection_type:
connection_types = []
saas_connection_types = []
for ct in connection_type:
ct = ct.lower()
try:
conn_type = ConnectionType(ct)
connection_types.append(conn_type)
except ValueError:
saas_connection_types.append(ct)
query = query.filter(or_(ConnectionConfig.connection_type.in_(connection_types), ConnectionConfig.saas_config['type'].astext.in_(saas_connection_types)))
if (disabled is not None):
query = query.filter((ConnectionConfig.disabled == disabled))
if test_status:
query = query.filter(ConnectionConfig.last_test_succeeded.is_(test_status.str_to_bool()))
if (orphaned_from_system is not None):
if orphaned_from_system:
query = query.filter(ConnectionConfig.system_id.is_(null()))
else:
query = query.filter(ConnectionConfig.system_id.is_not(null()))
if system_type:
if (system_type == SystemType.saas):
query = query.filter((ConnectionConfig.connection_type == ConnectionType.saas))
elif (system_type == SystemType.manual):
query = query.filter((ConnectionConfig.connection_type == ConnectionType.manual))
elif (system_type == SystemType.database):
query = query.filter(ConnectionConfig.connection_type.notin_([ConnectionType.saas, ConnectionType.manual]))
return paginate(query.order_by(ConnectionConfig.name.asc()), params=params) |
class OptionSeriesScatter3dSonificationContexttracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
.parametrize('name,kwargs', [('glorot_uniform_init.v1', {}), ('zero_init.v1', {}), ('uniform_init.v1', {'lo': (- 0.5), 'hi': 0.5}), ('normal_init.v1', {'mean': 0.1})])
def test_initializer_from_config(name, kwargs):
cfg = {'test': {'': name, **kwargs}}
func = registry.resolve(cfg)['test']
func(NumpyOps(), (1, 2, 3, 4)) |
def get_git_commit(characters=8, filename=None):
cmd = 'git log -n 1 --pretty=format:"%H"'
cmd_args = cmd.split(' ')
if (filename and os.path.isfile(filename)):
cmd_args.append(filename)
shell = subprocess.run(cmd_args, stdout=subprocess.PIPE, check=True)
if shell.stdout:
return shell.stdout[1:(characters + 1)].decode()
return None |
def test_is_method_of():
class TestClass():
def method(self):
pass
assigned_function = len
member = 1234
string_member = 'abcd'
object = TestClass()
for source_object in [object, TestClass]:
assert is_method_of(source_object.method, object)
assert (not is_method_of(source_object.assigned_function, object))
assert (not is_method_of(source_object.member, object))
assert (not is_method_of(source_object.string_member, object)) |
class TestTimerStop():
.parametrize('state', [State.ENDED, State.STOPPED])
def test_not_stop_when_timer_is_not_running(self, bus, state):
timer = Timer(bus)
timer.state = state
assert (not timer.stop())
def test_stops_when_timer_is_running(self, bus, mocker):
timer = Timer(bus)
subscriber = mocker.Mock()
bus.connect(Events.TIMER_STOP, subscriber, weak=False)
timer.start(60)
result = timer.stop()
assert (result is True)
assert (timer.is_running() is False)
subscriber.assert_called_once_with(Events.TIMER_STOP, payload=TimerPayload(time_left=0, duration=0)) |
.unit
class TestLogLevel():
def error_message(self):
return 'Error message'
.parametrize('dev_mode', [True, False])
def test_logger_exception(self, dev_mode, error_message, loguru_caplog):
_log_exception(ValueError(error_message), dev_mode)
assert ('ERROR' in loguru_caplog.text)
assert (error_message in loguru_caplog.text)
def test_logger_warning_dev_mode(self, error_message, loguru_caplog):
_log_warning(ValueError(error_message), True)
assert ('WARNING' in loguru_caplog.text)
assert (error_message in loguru_caplog.text)
def test_logger_warning_prod_mode(self, error_message, loguru_caplog):
_log_warning(ValueError(error_message), False)
assert ('ERROR' in loguru_caplog.text)
assert (error_message in loguru_caplog.text) |
class GPComment(HasMentions, HasReactions, Document):
on_delete_set_null = ['GP Notification']
mentions_field = 'content'
def before_insert(self):
if (self.reference_doctype not in ['GP Discussion']):
return
reference_doc = frappe.get_doc(self.reference_doctype, self.reference_name)
if reference_doc.meta.has_field('closed_at'):
if reference_doc.closed_at:
frappe.throw('Cannot add comment to a closed discussion')
def after_insert(self):
if (self.reference_doctype not in ['GP Discussion', 'GP Task']):
return
reference_doc = frappe.get_doc(self.reference_doctype, self.reference_name)
if reference_doc.meta.has_field('last_post_at'):
reference_doc.set('last_post_at', frappe.utils.now())
if reference_doc.meta.has_field('last_post_by'):
reference_doc.set('last_post_by', frappe.session.user)
if reference_doc.meta.has_field('comments_count'):
reference_doc.set('comments_count', (reference_doc.comments_count + 1))
if (reference_doc.doctype == 'GP Discussion'):
reference_doc.update_participants_count()
reference_doc.track_visit()
reference_doc.save(ignore_permissions=True)
def on_trash(self):
if (self.reference_doctype not in ['GP Discussion', 'GP Task']):
return
reference_doc = frappe.get_doc(self.reference_doctype, self.reference_name)
if reference_doc.meta.has_field('comments_count'):
reference_doc.db_set('comments_count', (reference_doc.comments_count - 1))
def validate(self):
self.content = remove_empty_trailing_paragraphs(self.content)
self.de_duplicate_reactions()
def on_update(self):
self.update_discussion_index()
self.notify_mentions()
self.notify_reactions()
def update_discussion_index(self):
if (self.reference_doctype in ['GP Discussion', 'GP Task']):
search = GameplanSearch()
if self.deleted_at:
search.remove_doc(self)
else:
search.index_doc(self) |
('Discount Codes > Discount Code Detail > Discount Code Detail')
def discount_code_get_detail(transaction):
with stash['app'].app_context():
event = EventFactoryBasic()
db.session.add(event)
db.session.commit()
discount_code = DiscountCodeTicketFactory(event_id=1)
db.session.add(discount_code)
db.session.commit() |
def check_package_name(manifest: Manifest, warnings: Dict[(str, str)]) -> Dict[(str, str)]:
if (('name' not in manifest) or (not manifest['name'])):
return assoc(warnings, 'name', WARNINGS['name_missing'])
if (not bool(re.match(PACKAGE_NAME_REGEX, manifest['name']))):
return assoc(warnings, 'name', WARNINGS['name_invalid'])
return warnings |
class CloudBillingClient(object):
def __init__(self, global_configs, **kwargs):
(max_calls, quota_period) = api_helpers.get_ratelimiter_config(global_configs, API_NAME)
cache_discovery = (global_configs['cache_discovery'] if ('cache_discovery' in global_configs) else False)
self.repository = CloudBillingRepositoryClient(quota_max_calls=max_calls, quota_period=quota_period, use_rate_limiter=kwargs.get('use_rate_limiter', True), cache_discovery=cache_discovery, cache=global_configs.get('cache'))
def get_billing_info(self, project_id):
try:
name = self.repository.projects.get_name(project_id)
results = self.repository.projects.get_billing_info(name)
LOGGER.debug('Getting the billing information for a project, project_id = %s, results = %s', project_id, results)
return results
except (errors.HttpError, HttpLib2Error) as e:
if (isinstance(e, errors.HttpError) and (e.resp.status == 404)):
LOGGER.warning(e)
return {}
api_exception = api_errors.ApiExecutionError('billing_info', e, 'project_id', project_id)
LOGGER.exception(api_exception)
raise api_exception
def get_billing_accounts(self, master_account_id=None):
filters = ''
if master_account_id:
filters = 'master_billing_account={}'.format(self.repository.billing_accounts.get_name(master_account_id))
try:
paged_results = self.repository.billing_accounts.list(filter=filters)
flattened_results = api_helpers.flatten_list_results(paged_results, 'billingAccounts')
LOGGER.debug('Getting billing_accounts, master_account_id = %s, flattened_results = %s', master_account_id, flattened_results)
return flattened_results
except (errors.HttpError, HttpLib2Error) as e:
api_exception = api_errors.ApiExecutionError('billing_accounts', e, 'filter', filters)
LOGGER.exception(api_exception)
raise api_exception
def get_billing_acct_iam_policies(self, account_id):
name = self.repository.billing_accounts.get_name(account_id)
try:
results = self.repository.billing_accounts.get_iam_policy(name, include_body=False)
LOGGER.debug('Getting IAM policies for a given billing account, account_id = %s, results = %s', account_id, results)
return results
except (errors.HttpError, HttpLib2Error) as e:
api_exception = api_errors.ApiExecutionError(account_id, e)
LOGGER.exception(api_exception)
raise api_exception |
.parametrize('route, expected, num', (('/c/foo/arg/baz', {'bar': 'arg'}, 1), ('/c/foo/bar/other', {'foo': 'foo'}, 2), ('/c/foo/42-7/baz', {'bar': '42-7'}, 1), ('/upload/youtube/auth/token', {'service': 'youtube'}, 4), ('/x/y/o.o/w', {'y': 'y'}, 7)))
def test_params_in_non_taken_branches(param_router, route, expected, num):
(resource, __, params, __) = param_router.find(route)
assert (resource.resource_id == num)
assert (params == expected) |
class Uniform(Distribution):
def __init__(self, lower, upper):
if (lower is None):
self.lower = 0.0
else:
self.lower = lower
if (upper is None):
self.upper = 1.0
else:
self.upper = upper
self.parent = uniform(loc=self.lower, scale=(self.upper - self.lower))
self.bounds = np.array([self.lower, self.upper])
(self.mean, self.variance, self.skewness, self.kurtosis) = self.parent.stats(moments='mvsk')
self.shape_parameter_A = 0.0
self.shape_parameter_B = 0.0
self.x_range_for_pdf = np.linspace(self.lower, self.upper, RECURRENCE_PDF_SAMPLES)
def get_description(self):
text = (((('is a uniform distribution over the support ' + str(self.lower)) + ' to ') + str(self.upper)) + '.')
return text
def get_cdf(self, points=None):
if (points is not None):
return self.parent.cdf(points)
else:
raise ValueError('Please digit an input for getCDF method')
def get_pdf(self, points=None):
if (points is not None):
return self.parent.pdf(points)
else:
raise ValueError('Please digit an input for get_pdf method')
def get_recurrence_coefficients(self, order):
ab = jacobi_recurrence_coefficients(self.shape_parameter_A, self.shape_parameter_B, self.lower, self.upper, order)
return ab
def get_icdf(self, xx):
return self.parent.ppf(xx)
def get_samples(self, m=None):
if (m is not None):
number = m
else:
number = 500000
return self.parent.rvs(size=number) |
class ExcelReader():
def __init__(self, file_path):
file_name = os.path.basename(file_path)
self.file_name_without_extension = os.path.splitext(file_name)[0]
(encoding, confidence) = detect_encoding(file_path)
logger.info(f'Detected Encoding: {encoding} (Confidence: {confidence})')
self.excel_file_name = file_name
self.extension = os.path.splitext(file_name)[1]
if (file_path.endswith('.xlsx') or file_path.endswith('.xls')):
df_tmp = pd.read_excel(file_path, index_col=False)
self.df = pd.read_excel(file_path, index_col=False, converters={i: csv_colunm_foramt for i in range(df_tmp.shape[1])})
elif file_path.endswith('.csv'):
df_tmp = pd.read_csv(file_path, index_col=False, encoding=encoding)
self.df = pd.read_csv(file_path, index_col=False, encoding=encoding, converters={i: csv_colunm_foramt for i in range(df_tmp.shape[1])})
else:
raise ValueError('Unsupported file format.')
self.df.replace('', np.nan, inplace=True)
self.columns_map = {}
for column_name in df_tmp.columns:
self.columns_map.update({column_name: excel_colunm_format(column_name)})
try:
if (not pd.api.types.is_datetime64_ns_dtype(self.df[column_name])):
self.df[column_name] = pd.to_numeric(self.df[column_name])
self.df[column_name] = self.df[column_name].fillna(0)
except Exception as e:
print(("can't transfor numeric column" + column_name))
self.df = self.df.rename(columns=(lambda x: x.strip().replace(' ', '_')))
self.db = duckdb.connect(database=':memory:', read_only=False)
self.table_name = 'excel_data'
self.db.register(self.table_name, self.df)
result = self.db.execute(f'DESCRIBE {self.table_name}')
columns = result.fetchall()
for column in columns:
print(column)
def run(self, sql):
try:
if (f'"{self.table_name}"' in sql):
sql = sql.replace(f'"{self.table_name}"', self.table_name)
sql = add_quotes_to_chinese_columns(sql)
print(f'excute sql:{sql}')
results = self.db.execute(sql)
colunms = []
for descrip in results.description:
colunms.append(descrip[0])
return (colunms, results.fetchall())
except Exception as e:
logger.error(f'excel sql run error!, {str(e)}')
raise ValueError(f'''Data Query Exception!
SQL[{sql}].
Error:{str(e)}''')
def get_df_by_sql_ex(self, sql):
(colunms, values) = self.run(sql)
return pd.DataFrame(values, columns=colunms)
def get_sample_data(self):
return self.run(f'SELECT * FROM {self.table_name} LIMIT 5;') |
def sdiv(evm: Evm) -> None:
dividend = pop(evm.stack).to_signed()
divisor = pop(evm.stack).to_signed()
charge_gas(evm, GAS_LOW)
if (divisor == 0):
quotient = 0
elif ((dividend == (- U255_CEIL_VALUE)) and (divisor == (- 1))):
quotient = (- U255_CEIL_VALUE)
else:
sign = get_sign((dividend * divisor))
quotient = (sign * (abs(dividend) // abs(divisor)))
push(evm.stack, U256.from_signed(quotient))
evm.pc += 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.