code stringlengths 281 23.7M |
|---|
class InteriorWall(Wall):
name: str
material: str
def __init__(self, material: str):
super().__init__(material)
self.name = f'Interior wall made out of {material}'
def setName(self, name: str) -> None:
self.name = name
def __str__(self) -> str:
return self.name
def __repr__(self) -> str:
return str(self) |
class CallInfo():
def __init__(self, function_name, args, keywords, args_arg, keywords_arg, implicit_arg, constructor):
self.function_name = function_name
self.args = args
self.keywords = keywords
self.args_arg = args_arg
self.keywords_arg = keywords_arg
self.implicit_arg = implicit_arg
self.constructor = constructor
def to_string(self):
function = self.function_name
if self.implicit_arg:
function = ((self.args[0] + '.') + self.function_name)
params = []
start = 0
if (self.implicit_arg or self.constructor):
start = 1
if self.args[start:]:
params.extend(self.args[start:])
if self.keywords:
params.extend([f'{name}={value}' for (name, value) in self.keywords])
if (self.args_arg is not None):
params.append(('*' + self.args_arg))
if self.keywords_arg:
params.append(('**' + self.keywords_arg))
return '{}({})'.format(function, ', '.join(params))
def read(primary, pyname, definition_info, code):
is_method_call = CallInfo._is_method_call(primary, pyname)
is_constructor = CallInfo._is_class(pyname)
is_classmethod = CallInfo._is_classmethod(pyname)
info = _FunctionParser(code, (is_method_call or is_classmethod))
(args, keywords) = info.get_parameters()
args_arg = None
keywords_arg = None
if (args and args[(- 1)].startswith('**')):
keywords_arg = args[(- 1)][2:]
del args[(- 1)]
if (args and args[(- 1)].startswith('*')):
args_arg = args[(- 1)][1:]
del args[(- 1)]
if is_constructor:
args.insert(0, definition_info.args_with_defaults[0][0])
return CallInfo(info.get_function_name(), args, keywords, args_arg, keywords_arg, (is_method_call or is_classmethod), is_constructor)
def _is_method_call(primary, pyname):
return ((primary is not None) and isinstance(primary.get_object().get_type(), pyobjects.PyClass) and CallInfo._is_method(pyname))
def _is_class(pyname):
return ((pyname is not None) and isinstance(pyname.get_object(), pyobjects.PyClass))
def _is_method(pyname):
if ((pyname is not None) and isinstance(pyname.get_object(), pyobjects.PyFunction)):
return (pyname.get_object().get_kind() == 'method')
return False
def _is_classmethod(pyname):
if ((pyname is not None) and isinstance(pyname.get_object(), pyobjects.PyFunction)):
return (pyname.get_object().get_kind() == 'classmethod')
return False |
class representation():
def __init__(self, x, smiles=None):
if (smiles is not None):
self.smiles = []
for (i, smile) in enumerate(smiles):
if smile.endswith('.smi'):
smile = smile[:(- 4)]
self.smiles.append(smile)
else:
self.smiles = None
self.x = x
def __str__(self):
return self.to_string()
def from_pyxtal(cls, struc, standard=False):
if (standard and (not struc.standard_setting)):
pmg = struc.to_pymatgen()
struc.from_seed(pmg, molecules=struc.molecules, standard=True)
symmetry = [struc.mol_sites[0].wp.hall_number]
lat = struc.lattice.encode()
vector = [(symmetry + lat)]
smiles = []
for site in struc.mol_sites:
vector.append(site.encode())
smiles.append(site.molecule.smile)
x = vector
return cls(x, smiles)
def from_string(cls, inputs, smiles, composition=None):
if (composition is None):
composition = ([1] * len(smiles))
inputs = [float(tmp) for tmp in inputs.split()]
hn = int(inputs[0])
if (hn <= 2):
n_cell = 8
elif (hn <= 107):
n_cell = 6
elif (hn <= 348):
n_cell = 5
elif (hn <= 488):
n_cell = 4
else:
n_cell = 3
cell = ([hn] + inputs[1:(n_cell - 1)])
x = [cell]
n_site = int(inputs[(n_cell - 1)])
if (n_site != sum(composition)):
msg = 'Composition is inconsistent: {:d}/{:d}\n'.format(sum(composition), n_site)
msg += str(inputs)
raise ValueError(msg)
n_cell += 1
for (i, smile) in enumerate(smiles):
if smile.endswith('.smi'):
smile = smile[:(- 4)]
for c in range(composition[i]):
if (smile in ['Cl-']):
n_mol = 4
else:
n_torsion = len(find_rotor_from_smile(smile))
n_mol = (7 + n_torsion)
inputs[((n_cell + n_mol) - 2)] = int(inputs[((n_cell + n_mol) - 2)])
x.append(inputs[(n_cell - 1):((n_cell + n_mol) - 1)])
n_cell += n_mol
return cls(x, smiles)
def to_standard_setting(self):
xtal = self.to_pyxtal()
rep0 = representation.from_pyxtal(xtal, standard=True)
self.x = rep0.x
def to_pyxtal(self, smiles=None, composition=None):
from pyxtal import pyxtal
if (smiles is None):
smiles = self.smiles
if (composition is None):
composition = ([1] * len(smiles))
if ((sum(composition) + 1) != len(self.x)):
msg = 'Composition is inconsistent:\n'
msg += (str(composition) + '\n')
msg += self.to_string()
raise ValueError(msg)
v = self.x[0]
struc = pyxtal(molecular=True)
(struc.group, number) = (Group(v[0], use_hall=True), v[0])
ltype = struc.group.lattice_type
if (ltype == 'triclinic'):
(a, b, c, alpha, beta, gamma) = (v[1], v[2], v[3], v[4], v[5], v[6])
elif (ltype == 'monoclinic'):
(a, b, c, alpha, beta, gamma) = (v[1], v[2], v[3], 90, v[4], 90)
elif (ltype == 'orthorhombic'):
(a, b, c, alpha, beta, gamma) = (v[1], v[2], v[3], 90, 90, 90)
elif (ltype == 'tetragonal'):
(a, b, c, alpha, beta, gamma) = (v[1], v[1], v[2], 90, 90, 90)
elif (ltype == 'hexagonal'):
(a, b, c, alpha, beta, gamma) = (v[1], v[1], v[2], 90, 90, 120)
else:
(a, b, c, alpha, beta, gamma) = (v[1], v[1], v[1], 90, 90, 90)
try:
struc.lattice = Lattice.from_para(a, b, c, alpha, beta, gamma, ltype=ltype)
except:
print(a, b, c, alpha, beta, gamma, ltype)
raise ValueError('Problem in Lattice')
struc.numMols = ([0] * len(smiles))
struc.molecules = []
struc.mol_sites = []
count = 1
for (i, comp) in enumerate(composition):
smile = smiles[i]
if smile.endswith('.smi'):
smile = smile[:(- 4)]
for j in range(comp):
v = self.x[count]
dicts = {}
dicts['smile'] = smile
dicts['type'] = i
dicts['dim'] = 3
dicts['PBC'] = [1, 1, 1]
dicts['hn'] = struc.group.hall_number
dicts['index'] = v[0]
dicts['lattice'] = struc.lattice.matrix
dicts['lattice_type'] = ltype
dicts['center'] = v[1:4]
if (smile not in ['Cl-']):
dicts['orientation'] = np.array(v[4:7])
dicts['rotor'] = v[7:(- 1)]
dicts['reflect'] = int(v[(- 1)])
site = mol_site.from_1D_dicts(dicts)
site.type = i
struc.mol_sites.append(site)
struc.numMols[i] += site.wp.multiplicity
count += 1
struc.molecules.append(site.molecule)
struc._get_formula()
struc.source = '1D rep.'
struc.valid = True
struc.standard_setting = site.wp.is_standard_setting()
return struc
def to_string(self, time=None, eng=None, tag=None):
x = self.x
strs = '{:3d} '.format(int(x[0][0]))
if (x[0][0] <= 348):
num = 4
elif (x[0][0] <= 488):
num = 3
else:
num = 2
for c in x[0][1:num]:
strs += '{:5.2f} '.format(c)
for c in x[0][num:]:
strs += '{:5.1f} '.format(c)
strs += '{:d} '.format((len(x) - 1))
for i in range(1, len(x)):
strs += '{:d} '.format(x[i][0])
for v in x[i][1:4]:
strs += '{:4.2f} '.format(v)
for v in x[i][4:(- 1)]:
strs += '{:6.1f} '.format(v)
strs += '{:d} '.format(int(x[i][(- 1)]))
if (time is not None):
strs += '{:5.2f}'.format(time)
if (eng is not None):
strs += '{:11.3f}'.format(eng)
if (tag is not None):
strs += ' {:s}'.format(tag)
return strs
def same_smiles(self, smiles):
if (len(self.smiles) == smiles):
for (s1, s2) in zip(self.smiles, smiles):
if (s2 != s2):
return False
return True
else:
return False
def get_dist(self, rep):
from pyxtal.symmetry import Wyckoff_position as WP
if self.same_smiles(rep.smiles):
msg = 'different smiles'
print(msg)
return None
elif (len(self.x) != len(rep.x)):
msg = 'different number of sites'
print(msg)
return None
elif (self.x[0][0] != rep.x[0][0]):
msg = 'different space group numbers'
print(msg)
return None
else:
diffs = []
wp = WP.from_group_and_index(self.x[0][0], 0, use_hall=True)
for i in range(len(self.x)):
diff = np.zeros(len(self.x[i]))
tmp1 = np.array(self.x[i])
tmp2 = np.array(rep.x[i])
if (i == 0):
diff_cell = (tmp2 - tmp1)
diffs.extend(diff_cell)
else:
xyzs = wp.apply_ops(tmp2[:3])
diff_xyzs = (xyzs - tmp1[:3])
diff_xyzs -= np.round(diff_xyzs)
id = np.argmin(np.linalg.norm(diff_xyzs, axis=1))
diff_xyz = diff_xyzs[id]
diff_ori = (tmp2[3:6] - tmp1[3:6])
diff_ori /= [360.0, 180.0, 360.0]
diff_ori -= np.round(diff_ori)
diff_ori *= [360.0, 180.0, 360.0]
diff_tor = (tmp2[6:] - tmp1[6:])
diff_tor /= 360.0
diff_tor -= np.round(diff_tor)
diff_tor *= 360.0
diffs.extend(diff_xyz)
diffs.extend(diff_ori)
diffs.extend(diff_tor)
return np.array(diffs) |
def parse_opts(treestr, category_index=None):
dash_depth = 0
opt_list = treestr.split('\n')
kept_opts = []
if (category_index != None):
if (not is_category(treestr, category_index)):
return True
dash_depth = (dashcount(opt_list[category_index]) + 1)
opt_list = opt_list[(category_index + 1):]
cur_index = 0
for option in opt_list:
if (dashcount(option) == dash_depth):
if (category_index == None):
kept_opts.append((cur_index, option[dash_depth:]))
else:
kept_opts.append((((cur_index + category_index) + 1), option[dash_depth:]))
if (dashcount(option) < dash_depth):
return kept_opts
cur_index += 1
return kept_opts |
class Ui_Form(object):
def setupUi(self, Form):
if (not Form.objectName()):
Form.setObjectName(u'Form')
Form.resize(470, 466)
self.model1_choose = QGroupBox(Form)
self.model1_choose.setObjectName(u'model1_choose')
self.model1_choose.setGeometry(QRect(30, 10, 371, 80))
self.model1_ShuttleNet = QRadioButton(self.model1_choose)
self.model1_ShuttleNet.setObjectName(u'model1_ShuttleNet')
self.model1_ShuttleNet.setGeometry(QRect(20, 20, 161, 20))
self.model1_ShuttleNet.setChecked(True)
self.model1_custom = QRadioButton(self.model1_choose)
self.model1_custom.setObjectName(u'model1_custom')
self.model1_custom.setGeometry(QRect(20, 50, 91, 20))
self.model1_custom_path = QLineEdit(self.model1_choose)
self.model1_custom_path.setObjectName(u'model1_custom_path')
self.model1_custom_path.setEnabled(False)
self.model1_custom_path.setGeometry(QRect(110, 50, 251, 20))
self.model1_load_custom = QToolButton(self.model1_choose)
self.model1_load_custom.setObjectName(u'model1_load_custom')
self.model1_load_custom.setEnabled(False)
self.model1_load_custom.setGeometry(QRect(340, 52, 16, 16))
self.model1_ShuttleNet_player = QComboBox(self.model1_choose)
self.model1_ShuttleNet_player.addItem('')
self.model1_ShuttleNet_player.setObjectName(u'model1_ShuttleNet_player')
self.model1_ShuttleNet_player.setGeometry(QRect(200, 20, 161, 22))
self.model2_choose = QGroupBox(Form)
self.model2_choose.setObjectName(u'model2_choose')
self.model2_choose.setEnabled(True)
self.model2_choose.setGeometry(QRect(30, 100, 371, 80))
self.model2_ShuttleNet = QRadioButton(self.model2_choose)
self.model2_ShuttleNet.setObjectName(u'model2_ShuttleNet')
self.model2_ShuttleNet.setGeometry(QRect(20, 20, 161, 20))
self.model2_ShuttleNet.setChecked(True)
self.model2_custom = QRadioButton(self.model2_choose)
self.model2_custom.setObjectName(u'model2_custom')
self.model2_custom.setGeometry(QRect(20, 50, 91, 20))
self.model2_custom_path = QLineEdit(self.model2_choose)
self.model2_custom_path.setObjectName(u'model2_custom_path')
self.model2_custom_path.setEnabled(False)
self.model2_custom_path.setGeometry(QRect(110, 50, 251, 20))
self.model2_load_custom = QToolButton(self.model2_choose)
self.model2_load_custom.setObjectName(u'model2_load_custom')
self.model2_load_custom.setEnabled(False)
self.model2_load_custom.setGeometry(QRect(340, 52, 16, 16))
self.model2_ShuttleNet_player = QComboBox(self.model2_choose)
self.model2_ShuttleNet_player.addItem('')
self.model2_ShuttleNet_player.setObjectName(u'model2_ShuttleNet_player')
self.model2_ShuttleNet_player.setGeometry(QRect(200, 20, 161, 22))
self.confirm = QPushButton(Form)
self.confirm.setObjectName(u'confirm')
self.confirm.setGeometry(QRect(320, 280, 75, 24))
self.error_message = QLabel(Form)
self.error_message.setObjectName(u'error_message')
self.error_message.setGeometry(QRect(190, 240, 201, 20))
self.error_message.setAlignment(((Qt.AlignRight | Qt.AlignTrailing) | Qt.AlignVCenter))
self.output_filename = QLineEdit(Form)
self.output_filename.setObjectName(u'output_filename')
self.output_filename.setGeometry(QRect(110, 200, 281, 20))
self.label = QLabel(Form)
self.label.setObjectName(u'label')
self.label.setGeometry(QRect(50, 190, 53, 41))
self.label.setAlignment(Qt.AlignCenter)
self.label_2 = QLabel(Form)
self.label_2.setObjectName(u'label_2')
self.label_2.setGeometry(QRect(50, 240, 71, 21))
self.rally_count = QLineEdit(Form)
self.rally_count.setObjectName(u'rally_count')
self.rally_count.setGeometry(QRect(120, 240, 61, 20))
self.progressBar = QProgressBar(Form)
self.progressBar.setObjectName(u'progressBar')
self.progressBar.setGeometry(QRect(50, 280, 261, 23))
self.progressBar.setValue(0)
self.retranslateUi(Form)
QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(QCoreApplication.translate('Form', u'Form', None))
self.model1_choose.setTitle(QCoreApplication.translate('Form', u'Agent 1', None))
self.model1_ShuttleNet.setText(QCoreApplication.translate('Form', u'Default agent ', None))
self.model1_custom.setText(QCoreApplication.translate('Form', u'Other agent', None))
self.model1_load_custom.setText(QCoreApplication.translate('Form', u'...', None))
self.model1_ShuttleNet_player.setItemText(0, QCoreApplication.translate('Form', u'(choose the opponent)', None))
self.model2_choose.setTitle(QCoreApplication.translate('Form', u'Agent 2', None))
self.model2_ShuttleNet.setText(QCoreApplication.translate('Form', u'Default agent ', None))
self.model2_custom.setText(QCoreApplication.translate('Form', u'Other agent', None))
self.model2_load_custom.setText(QCoreApplication.translate('Form', u'...', None))
self.model2_ShuttleNet_player.setItemText(0, QCoreApplication.translate('Form', u'(choose the opponent)', None))
self.confirm.setText(QCoreApplication.translate('Form', u'generate', None))
self.error_message.setText('')
self.label.setText(QCoreApplication.translate('Form', u'Ouput\nFilename', None))
self.label_2.setText(QCoreApplication.translate('Form', u'Rally Count', None))
self.rally_count.setText(QCoreApplication.translate('Form', u'1000', None)) |
def run_iterative_averaging(seed, num_nodes, failure_prob, max_iterations, averaging_algo, target_precision=None):
np.random.seed(seed)
weights = np.random.normal(0, 1, num_nodes).astype(np.float64)
history = np.zeros(((max_iterations + 1),), dtype=np.float64)
iter_num = 0
history[iter_num] = cur_precision = weights.var()
while ((iter_num < max_iterations) and ((target_precision is None) or (cur_precision >= target_precision))):
iter_num += 1
averaging_nodes = (np.random.uniform(0, 1, num_nodes) >= failure_prob)
weights = averaging_algo(weights, averaging_nodes, iter_num)
assert (weights.size == num_nodes)
history[iter_num] = cur_precision = weights.var()
return (history, iter_num) |
class DummyOAuth2Test(OAuth2Test):
backend_path = 'social_core.tests.backends.test_dummy.DummyOAuth2'
user_data_url = '
expected_username = 'foobar'
access_token_body = json.dumps({'access_token': 'foobar', 'token_type': 'bearer'})
user_data_body = json.dumps({'id': 1, 'username': 'foobar', 'url': ' 'first_name': 'Foo', 'last_name': 'Bar', 'email': ''})
def test_login(self):
self.do_login()
def test_partial_pipeline(self):
self.do_partial_pipeline()
def test_tokens(self):
user = self.do_login()
self.assertEqual(user.social[0].access_token, 'foobar')
def test_revoke_token(self):
self.strategy.set_settings({'SOCIAL_AUTH_REVOKE_TOKENS_ON_DISCONNECT': True})
self.do_login()
user = User.get(self.expected_username)
user.password = 'password'
HTTPretty.register_uri(self._method(self.backend.REVOKE_TOKEN_METHOD), self.backend.REVOKE_TOKEN_URL, status=200)
do_disconnect(self.backend, user) |
def test_initial_file_object(rgb_file_object):
with FilePath(rgb_file_object) as vsifile:
with vsifile.open() as src:
assert (src.driver == 'GTiff')
assert (src.count == 3)
assert (src.dtypes == ('uint8', 'uint8', 'uint8'))
assert (src.read().shape == (3, 718, 791)) |
def tilt_mask(size, tilt_ang1, tilt_ang2=None, tilt_axis=1, light_axis=2, sphere_mask=True):
assert (tilt_axis != light_axis)
if (tilt_ang2 is None):
tilt_ang2 = float(N.abs(tilt_ang1))
tilt_ang1 = (- tilt_ang2)
else:
assert (tilt_ang1 < 0)
assert (tilt_ang2 > 0)
tilt_ang1 = ((tilt_ang1 / 180.0) * N.pi)
tilt_ang2 = ((tilt_ang2 / 180.0) * N.pi)
g = AIVU.grid_displacement_to_center(size=size, mid_co=AIVU.fft_mid_co(siz=size))
plane_axis = set([0, 1, 2])
plane_axis.difference_update([light_axis, tilt_axis])
assert (len(plane_axis) == 1)
plane_axis = list(plane_axis)[0]
x_light = g[light_axis]
x_plane = g[plane_axis]
m = N.zeros(size, dtype=float)
m[N.logical_and((x_light <= (N.tan(tilt_ang1) * x_plane)), (x_light >= (N.tan(tilt_ang2) * x_plane)))] = 1.0
m[N.logical_and((x_light >= (N.tan(tilt_ang1) * x_plane)), (x_light <= (N.tan(tilt_ang2) * x_plane)))] = 1.0
if sphere_mask:
m *= MU.sphere_mask(m.shape)
return m |
class CSGameExporter(GameExporter):
_busy: bool = False
def is_busy(self) -> bool:
return self._busy
def export_can_be_aborted(self) -> bool:
return False
def _before_export(self):
assert (not self._busy)
self._busy = True
def _after_export(self):
self._busy = False
def _do_export_game(self, patch_data: dict, export_params: GameExportParams, progress_update: status_update_lib.ProgressUpdateCallable):
assert isinstance(export_params, CSGameExportParams)
new_patch = copy.copy(patch_data)
if (new_patch['mychar'] is not None):
new_patch['mychar'] = str(RandovaniaGame.CAVE_STORY.data_path.joinpath(patch_data['mychar']))
try:
caver_patcher.patch_files(new_patch, export_params.output_path, progress_update)
finally:
json_lib.write_path(export_params.output_path.joinpath('data', 'patcher_data.json'), patch_data) |
class GrafanaOAuth2Test(OAuth2Test):
backend_path = 'social_core.backends.grafana.GrafanaOAuth2'
user_data_url = '
access_token_body = json.dumps({'access_token': 'foobar', 'token_type': 'bearer'})
user_data_body = json.dumps({'login': 'fooboy', 'email': '', 'name': 'Foo Bar'})
expected_username = 'fooboy'
def test_login(self):
self.do_login()
def test_partial_pipeline(self):
self.do_partial_pipeline() |
_on_posix
def test_run_shell_raise_on_fail():
assert (run_shell('true') == (None, None, 0))
assert (run_shell('true', raise_on_fail=False) == (None, None, 0))
with pytest.raises(subprocess.CalledProcessError):
run_shell('false')
assert (run_shell('false', raise_on_fail=False) == (None, None, 1)) |
def test_diff_newline_at_end(pytester: Pytester) -> None:
pytester.makepyfile("\n def test_diff():\n assert 'asdf' == 'asdf\\n'\n ")
result = pytester.runpytest()
result.stdout.fnmatch_lines("\n *assert 'asdf' == 'asdf\\n'\n * - asdf\n * ? -\n * + asdf\n ") |
class SEModule(nn.Module):
def __init__(self, channels, reduction):
super(SEModule, self).__init__()
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc1 = nn.Conv2d(channels, (channels // reduction), kernel_size=1, padding=0, bias=False)
self.relu = nn.ReLU(inplace=True)
self.fc2 = nn.Conv2d((channels // reduction), channels, kernel_size=1, padding=0, bias=False)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
input = x
x = self.avg_pool(x)
x = self.fc1(x)
x = self.relu(x)
x = self.fc2(x)
x = self.sigmoid(x)
return (input * x) |
def _module_name_from_path(path: Path, root: Path) -> str:
path = path.with_suffix('')
try:
relative_path = path.relative_to(root)
except ValueError:
path_parts = path.parts[1:]
else:
path_parts = relative_path.parts
if ((len(path_parts) >= 2) and (path_parts[(- 1)] == '__init__')):
path_parts = path_parts[:(- 1)]
return '.'.join(path_parts) |
def make_support(question, source='wiki40b', method='dense', n_results=10):
if (source == 'none'):
(support_doc, hit_lst) = (' <P> '.join(['' for _ in range(11)]).strip(), [])
elif (method == 'dense'):
(support_doc, hit_lst) = query_qa_dense_index(question, qar_model, qar_tokenizer, passages, gpu_dense_index, n_results)
else:
(support_doc, hit_lst) = query_es_index(question, es_client, index_name='english_wiki40b_snippets_100w', n_results=n_results)
support_list = [(res['article_title'], res['section_title'].strip(), res['score'], res['passage_text']) for res in hit_lst]
question_doc = 'question: {} context: {}'.format(question, support_doc)
return (question_doc, support_list) |
def blend(first, second, coefficient=0.5):
(first, second) = (color_to_rgb(first), color_to_rgb(second))
r = int(((coefficient * first.R) + ((1 - coefficient) * second.R)))
g = int(((coefficient * first.G) + ((1 - coefficient) * second.G)))
b = int(((coefficient * first.B) + ((1 - coefficient) * second.B)))
return rgb(r, g, b) |
class CachingFileBackend(SimpleFileBackend):
def __init__(self, config: 'Configuration', cache_manager: t.Optional[CacheManager]=None):
super().__init__(config)
self.cache_manager = (cache_manager or CacheManager())
def add_package(self, filename: str, stream: t.BinaryIO) -> None:
super().add_package(filename, stream)
self.cache_manager.invalidate_root_cache(self.roots[0])
def remove_package(self, pkg: PkgFile) -> None:
super().remove_package(pkg)
self.cache_manager.invalidate_root_cache(pkg.root)
def get_all_packages(self) -> t.Iterable[PkgFile]:
return itertools.chain.from_iterable((self.cache_manager.listdir(r, listdir) for r in self.roots))
def digest(self, pkg: PkgFile) -> t.Optional[str]:
if ((self.hash_algo is None) or (pkg.fn is None)):
return None
return self.cache_manager.digest_file(pkg.fn, self.hash_algo, digest_file) |
def plot_distribution(*distributions, states=None, label=None, figsize=(9, 3), fig=None, ax=None, lineplot_threshold=64, title='State distribution', y_label='Pr(state)', validate=True, labels=None, **kwargs):
if (validate and (not all((np.allclose(d.sum(), 1, rtol=0.0001) for d in distributions)))):
raise ValueError('a distribution does not sum to 1!')
defaults = dict()
kwargs = {**defaults, **kwargs}
if ((fig is None) and (ax is None)):
(fig, ax) = plt.subplots(figsize=figsize)
if (fig is None):
fig = plt.gcf()
if (ax is None):
ax = plt.gca()
distributions = [pd.Series(distribution.flatten(d)) for d in distributions]
d = distributions[0]
if (validate and (not all(((distributions[0].index == d.index).all() for d in distributions)))):
raise ValueError('distribution indices do not match')
N = log2(np.prod(d.shape))
if (states is None):
if (N.is_integer() and (len(d) <= lineplot_threshold)):
N = int(N)
states = list(all_states_str(N))
if (label is None):
label = string.ascii_uppercase[:N]
else:
states = np.arange(len(d))
if (labels is None):
labels = list(map(str, range(len(distributions))))
data = pd.concat([pd.DataFrame(dict(probability=d, state=states, hue=([label] * len(d)))) for (d, label) in zip(distributions, labels)]).reset_index(drop=True)
if (len(d) > lineplot_threshold):
ax = _plot_distribution_line(data, ax, hue='hue', **kwargs)
else:
ax = _plot_distribution_bar(data, ax, label, hue='hue', **kwargs)
ax.set_title(title)
ax.set_ylabel(y_label, labelpad=12)
ax.set_xlabel('state', labelpad=12)
ax.legend(bbox_to_anchor=(1.1, 1.05))
return (fig, ax) |
def topkp_decoding(inp_ids, attn_mask, model, tokenizer):
topkp_output = model.generate(input_ids=inp_ids, attention_mask=attn_mask, max_length=256, do_sample=True, top_k=40, top_p=0.8, num_return_sequences=3, no_repeat_ngram_size=2, early_stopping=True)
Questions = [tokenizer.decode(out, skip_special_tokens=True, clean_up_tokenization_spaces=True) for out in topkp_output]
return [Question.strip().capitalize() for Question in Questions] |
def test_shell_command_completion_does_path_completion_when_after_command(cmd2_app, request):
test_dir = os.path.dirname(request.module.__file__)
text = os.path.join(test_dir, 'conftest')
line = 'shell cat {}'.format(text)
endidx = len(line)
begidx = (endidx - len(text))
first_match = complete_tester(text, line, begidx, endidx, cmd2_app)
assert ((first_match is not None) and (cmd2_app.completion_matches == [(text + '.py ')])) |
def get_args_parser():
parser = argparse.ArgumentParser('Set transformer detector', add_help=False)
parser.add_argument('--lr', default=0.0001, type=float)
parser.add_argument('--lr_backbone', default=1e-05, type=float)
parser.add_argument('--batch_size', default=2, type=int)
parser.add_argument('--weight_decay', default=0.0001, type=float)
parser.add_argument('--epochs', default=300, type=int)
parser.add_argument('--lr_drop', default=200, type=int)
parser.add_argument('--clip_max_norm', default=0.1, type=float, help='gradient clipping max norm')
parser.add_argument('--backbone', default='resnet50', type=str, help='Name of the convolutional backbone to use')
parser.add_argument('--dilation', action='store_true', help='If true, we replace stride with dilation in the last convolutional block (DC5)')
parser.add_argument('--position_embedding', default='sine', type=str, choices=('sine', 'learned'), help='Type of positional embedding to use on top of the image features')
parser.add_argument('--enc_layers', default=6, type=int, help='Number of encoding layers in the transformer')
parser.add_argument('--dec_layers', default=6, type=int, help='Number of decoding layers in the transformer')
parser.add_argument('--dim_feedforward', default=2048, type=int, help='Intermediate size of the feedforward layers in the transformer blocks')
parser.add_argument('--hidden_dim', default=256, type=int, help='Size of the embeddings (dimension of the transformer)')
parser.add_argument('--dropout', default=0.1, type=float, help='Dropout applied in the transformer')
parser.add_argument('--nheads', default=8, type=int, help="Number of attention heads inside the transformer's attentions")
parser.add_argument('--num_queries', default=100, type=int, help='Number of query slots')
parser.add_argument('--pre_norm', action='store_true')
parser.add_argument('--masks', action='store_true', default=False, help='Train segmentation head if the flag is provided')
parser.add_argument('--frozen_weights', type=str, default=None, help='Path to the pretrained model. If set, only the mask head will be trained')
parser.add_argument('--no_aux_loss', dest='aux_loss', action='store_false', help='Disables auxiliary decoding losses (loss at each layer)')
parser.add_argument('--optimizer', help='Chose type of optimization algorithm, AdamW as default', default='AdamW', choices=['AdamW', 'LaProp'], type=str)
parser.add_argument('--set_cost_class', default=1, type=float, help='Class coefficient in the matching cost')
parser.add_argument('--set_cost_bbox', default=5, type=float, help='L1 box coefficient in the matching cost')
parser.add_argument('--set_cost_giou', default=2, type=float, help='giou box coefficient in the matching cost')
parser.add_argument('--mask_loss_coef', default=1, type=float)
parser.add_argument('--dice_loss_coef', default=1, type=float)
parser.add_argument('--bbox_loss_coef', default=5, type=float)
parser.add_argument('--giou_loss_coef', default=2, type=float)
parser.add_argument('--eos_coef', default=0.1, type=float, help='Relative classification weight of the no-object class')
parser.add_argument('--dataset_file', default='multi')
parser.add_argument('--num_classes', default=1, type=int, help='Number of classes - here, not id for no_object (default: 1)')
parser.add_argument('--coco_path', type=str)
parser.add_argument('--coco_panoptic_path', type=str)
parser.add_argument('--remove_difficult', action='store_true')
parser.add_argument('--output_dir', default='', help='path where to save, empty for no saving')
parser.add_argument('--device', default='cuda', help='device to use for training / testing')
parser.add_argument('--gpu_id', default=(- 1), type=int, help='id of gpu to use for training / testing (if -1 use all available gpus)')
parser.add_argument('--seed', default=42, type=int)
parser.add_argument('--resume', default='', help='resume from checkpoint')
parser.add_argument('--start_epoch', default=0, type=int, metavar='N', help='start epoch')
parser.add_argument('--eval', action='store_true')
parser.add_argument('--neptune', action='store_true', default=False)
parser.add_argument('--num_workers', default=2, type=int)
parser.add_argument('--world_size', default=1, type=int, help='number of distributed processes')
parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training')
return parser |
class EventQueue():
def __init__(self):
self.__data = []
def put(self, item):
if (item is not None):
heapq.heappush(self.__data, item)
def put_all(self, iterable):
for item in iterable:
heapq.heappush(self.__data, item)
def get(self):
return heapq.heappop(self.__data)
def empty(self):
return (len(self.__data) == 0)
def peek(self):
return self.__data[0]
def show(self):
for item in self.__data:
print(item) |
def extract_tw_template():
with open('templates.txt', 'r') as f:
room_intro_templ = {}
phrase_replace = {}
objects_replace = {}
room_desc_templ = {}
d_templ = {}
for (line_index, line) in enumerate(f):
if (line_index in range(2, 30)):
room_intro_templ[(line_index - 1)] = line.split(':')[1]
elif (line_index in range(34, 46)):
(key_phrase, vals) = line.split(':')
phrase_replace[key_phrase] = vals.split(';')
elif (line_index == 295):
prefix = line.split(':')[1].split(';')
elif (line_index == 310):
suffix = line.split(':')[1].split(';')
elif (line_index in range(171, 186)):
(key_phrase, vals) = line.split(':')
objects_replace[key_phrase] = vals.split(';')
elif (line_index in range(127, 136)):
(key_index, vals) = line.split(':')
room_desc_templ[key_index] = vals
elif (line_index in range(138, 143)):
(key_index, vals) = line.split(':')
d_templ[key_index] = vals
if (len(vals.split(';')) == 2):
d_templ[(key_index + '_0')] = vals.split(';')[0]
d_templ[(key_index + '_1')] = vals.split(';')[1]
del d_templ[key_index]
del room_intro_templ[18]
del room_desc_templ['c3'], room_desc_templ['c6'], room_desc_templ['c9']
to_remove = []
for i in range(len(suffix)):
if (('#' in suffix[i]) or (len(suffix[i]) < 5) or ('TextWorld' in suffix[i])):
to_remove += [i]
suffix_copy = [k for (i, k) in enumerate(suffix) if (i not in to_remove)]
suffix = suffix_copy
to_remove = []
for i in range(len(prefix)):
if (('#' in prefix[i]) or (len(prefix[i]) < 5) or ('TextWorld' in prefix[i])):
to_remove += [i]
prefix_copy = [k for (i, k) in enumerate(prefix) if (i not in to_remove)]
prefix = prefix_copy
room_intro_templ_copy = room_intro_templ.copy()
for (k, v) in room_intro_templ_copy.items():
if (len(v.split(';')) >= 2):
del room_intro_templ[k]
return (room_intro_templ, room_desc_templ, suffix, prefix, objects_replace, d_templ, phrase_replace) |
class TestDuration(unittest.TestCase):
def test_wav(self):
actual = file_info.duration(INPUT_FILE)
expected = 10.0
self.assertEqual(expected, actual)
def test_wav_pathlib(self):
actual = file_info.duration(Path(INPUT_FILE))
expected = 10.0
self.assertEqual(expected, actual)
def test_spacey_wav(self):
actual = file_info.duration(SPACEY_FILE)
expected = 10.0
self.assertEqual(expected, actual)
def test_aiff(self):
actual = file_info.duration(INPUT_FILE2)
expected = 10.0
self.assertEqual(expected, actual)
def test_empty(self):
actual = file_info.duration(EMPTY_FILE)
expected = None
self.assertEqual(expected, actual) |
def time_offset_finder(min_switch_ind, final_i_index, i_time, m_time):
assert (type(min_switch_ind) == int), 'min_switch_ind should be an int.'
assert (type(final_i_index) == int), 'final_i_index should be an int.'
assert (type(i_time) == list), 'i_time should be a list.'
assert (type(m_time) == list), 'm_time should be a list.'
time_offset = (i_time[final_i_index] - m_time[min_switch_ind])
time_offset = float(time_offset)
return time_offset |
class Worker(object):
def __init__(self):
self._sched = BackgroundScheduler()
self._operations = []
self._stop = Event()
self._terminated = Event()
self._raven_client = None
if (app.config.get('EXCEPTION_LOG_TYPE', 'FakeSentry') == 'Sentry'):
worker_name = ('%s:worker-%s' % (socket.gethostname(), self.__class__.__name__))
self._raven_client = Client(app.config.get('SENTRY_DSN', ''), name=worker_name)
def is_healthy(self):
return (not self._stop.is_set())
def is_terminated(self):
return self._terminated.is_set()
def ungracefully_terminated(self):
pass
def add_operation(self, operation_func, operation_sec):
(operation_func)
def _operation_func():
try:
with UseThenDisconnect(app.config):
return operation_func()
except Exception:
logger.exception('Operation raised exception')
if self._raven_client:
logger.debug('Logging exception to Sentry')
self._raven_client.captureException()
self._operations.append((_operation_func, operation_sec))
def _setup_and_wait_for_shutdown(self):
signal.signal(signal.SIGTERM, self.terminate)
signal.signal(signal.SIGINT, self.terminate)
while (not self._stop.wait(1)):
pass
def start(self):
logging.config.fileConfig(logfile_path(debug=False), disable_existing_loggers=False)
if (not app.config.get('SETUP_COMPLETE', False)):
logger.info('Product setup is not yet complete; skipping worker startup')
self._setup_and_wait_for_shutdown()
return
if (app.config.get('REGISTRY_STATE', 'normal') == 'readonly'):
logger.info('Product is in read-only mode; skipping worker startup')
self._setup_and_wait_for_shutdown()
return
logger.debug('Scheduling worker.')
self._sched.start()
for (operation_func, operation_sec) in self._operations:
start_date = (datetime.now() + timedelta(seconds=0.001))
if app.config.get('STAGGER_WORKERS'):
start_date += timedelta(seconds=randint(1, operation_sec))
logger.debug('First run scheduled for %s', start_date)
self._sched.add_job(operation_func, 'interval', seconds=operation_sec, start_date=start_date, max_instances=1)
self._setup_and_wait_for_shutdown()
logger.debug('Waiting for running tasks to complete.')
self._sched.shutdown()
logger.debug('Finished.')
self._terminated.set()
def terminate(self, signal_num=None, stack_frame=None, graceful=False):
if self._terminated.is_set():
sys.exit(1)
else:
logger.debug('Shutting down worker.')
self._stop.set()
if (not graceful):
self.ungracefully_terminated()
def join(self):
self.terminate(graceful=True) |
class Extension(Converter):
async def convert(self, ctx: Context, argument: str) -> str:
if ((argument == '*') or (argument == '**')):
return argument
argument = argument.lower()
if (argument in bot_instance.all_extensions):
return argument
if ((qualified_arg := f'{exts.__name__}.{argument}') in bot_instance.all_extensions):
return qualified_arg
matches = []
for ext in bot_instance.all_extensions:
if (argument == unqualify(ext)):
matches.append(ext)
if (len(matches) > 1):
matches.sort()
names = '\n'.join(matches)
raise BadArgument(f''':x: `{argument}` is an ambiguous extension name. Please use one of the following fully-qualified names.```
{names}```''')
if matches:
return matches[0]
raise BadArgument(f':x: Could not find the extension `{argument}`.') |
_dtype_float_test(only64=True, additional_kwargs={'clss': [IntegrationModule, IntegrationNNModule]})
def test_quad(dtype, device, clss):
torch.manual_seed(100)
random.seed(100)
nr = 2
fwd_options = {'method': 'leggauss', 'n': 100}
a = torch.nn.Parameter(torch.rand((nr,), dtype=dtype, device=device).requires_grad_())
b = torch.nn.Parameter(torch.randn((nr,), dtype=dtype, device=device).requires_grad_())
c = torch.randn((nr,), dtype=dtype, device=device).requires_grad_()
xl = torch.zeros((1,), dtype=dtype, device=device).requires_grad_()
xu = (torch.ones((1,), dtype=dtype, device=device) * 0.5).requires_grad_()
module = clss(a, b)
y = quad(module.forward, xl, xu, params=(c,), **fwd_options)
ytrue = ((torch.sin(((a * xu) + (b * c))) - torch.sin(((a * xl) + (b * c)))) / a)
assert torch.allclose(y, ytrue)
def getloss(a, b, c, xl, xu):
module = clss(a, b)
y = quad(module.forward, xl, xu, params=(c,), **fwd_options)
return y
gradcheck(getloss, (a, b, c, xl, xu))
gradgradcheck(getloss, (a, b, c, xl, xu))
gradcheck(getloss, (a, b.detach(), c, xl, xu)) |
class Solution(object):
def mergeTrees(self, t1, t2):
if (t1 is None):
return t2
if (t2 is None):
return t1
t1.val += t2.val
t1.left = self.mergeTrees(t1.left, t2.left)
t1.right = self.mergeTrees(t1.right, t2.right)
return t1 |
(frozen=False)
class CollaborationState():
optimizer_step: int
samples_accumulated: int
target_batch_size: int
num_peers: int
num_clients: int
eta_next_step: float
next_fetch_time: float
def ready_for_step(self):
return ((self.samples_accumulated >= self.target_batch_size) or (get_dht_time() >= self.eta_next_step))
def register_step(self, local_step: int):
self.optimizer_step = max(local_step, self.optimizer_step)
self.samples_accumulated = 0
self.eta_next_step = float('inf') |
class BaseParameterisedDistribution(nn.Module, metaclass=abc.ABCMeta):
def update(self, *input, **kwargs) -> T:
raise NotImplementedError
def forward(self, *input, **kwargs):
num_samples = kwargs.pop('num_samples', 1)
self.update(*input, **kwargs)
if self.training:
return self.sample_via_reparam(num_samples=num_samples).squeeze(1)
else:
return self.mode()
def sample_via_reparam(self, num_samples: int=1) -> list:
raise NotImplementedError
def sample_no_grad(self, num_samples: int=1) -> list:
with torch.no_grad():
return self.sample_via_reparam(num_samples)
def mode(self) -> torch.Tensor:
raise NotImplementedError
def kl_with_other(self, other) -> torch.Tensor:
raise NotImplementedError
def nlog_like_of_obs(self, obs: torch.Tensor) -> torch.Tensor:
raise NotImplementedError
def convolve_with_function(self, obs: torch.Tensor, function: similarity_funcs.BaseSimilarityFunctions) -> torch.Tensor:
raise NotImplementedError |
def run(nsis=False, ace=False, pdfjs=True, legacy_pdfjs=False, fancy_dmg=False, pdfjs_version=None, dicts=False, gh_token=None):
if nsis:
download_nsis_plugins()
if pdfjs:
update_pdfjs(pdfjs_version, legacy=legacy_pdfjs, gh_token=gh_token)
if ace:
update_ace()
if fancy_dmg:
update_dmg_makefile()
if dicts:
test_dicts() |
class Key(BasePathMixin):
tag = ext_x_key
def __init__(self, method, base_uri, uri=None, iv=None, keyformat=None, keyformatversions=None):
self.method = method
self.uri = uri
self.iv = iv
self.keyformat = keyformat
self.keyformatversions = keyformatversions
self.base_uri = base_uri
def __str__(self):
output = [('METHOD=%s' % self.method)]
if self.uri:
output.append(('URI="%s"' % self.uri))
if self.iv:
output.append(('IV=%s' % self.iv))
if self.keyformat:
output.append(('KEYFORMAT="%s"' % self.keyformat))
if self.keyformatversions:
output.append(('KEYFORMATVERSIONS="%s"' % self.keyformatversions))
return ((self.tag + ':') + ','.join(output))
def __eq__(self, other):
if (not other):
return False
return ((self.method == other.method) and (self.uri == other.uri) and (self.iv == other.iv) and (self.base_uri == other.base_uri) and (self.keyformat == other.keyformat) and (self.keyformatversions == other.keyformatversions))
def __ne__(self, other):
return (not self.__eq__(other)) |
def build_progress_bar(args, iterator, epoch: Optional[int]=None, prefix: Optional[str]=None, default: str='tqdm', no_progress_bar: str='none'):
if getattr(args, 'no_progress_bar', False):
default = no_progress_bar
if (getattr(args, 'distributed_rank', 0) == 0):
tensorboard_logdir = getattr(args, 'tensorboard_logdir', None)
else:
tensorboard_logdir = None
return progress_bar(iterator, log_format=args.log_format, log_interval=args.log_interval, epoch=epoch, prefix=prefix, tensorboard_logdir=tensorboard_logdir, default_log_format=default) |
def _get_elts(arg, context):
def is_iterable(n):
return isinstance(n, (nodes.List, nodes.Tuple, nodes.Set))
try:
inferred = next(arg.infer(context))
except (InferenceError, StopIteration) as exc:
raise UseInferenceDefault from exc
if isinstance(inferred, nodes.Dict):
items = inferred.items
elif is_iterable(inferred):
items = []
for elt in inferred.elts:
if (not is_iterable(elt)):
raise UseInferenceDefault()
if (len(elt.elts) != 2):
raise UseInferenceDefault()
if (not isinstance(elt.elts[0], (nodes.Tuple, nodes.Const, nodes.Name))):
raise UseInferenceDefault()
items.append(tuple(elt.elts))
else:
raise UseInferenceDefault()
return items |
class TestReproducibility(unittest.TestCase):
def _test_reproducibility(self, name, extra_flags=None):
if (extra_flags is None):
extra_flags = []
with tempfile.TemporaryDirectory(name) as data_dir:
with contextlib.redirect_stdout(StringIO()):
test_binaries.create_dummy_data(data_dir)
test_binaries.preprocess_translation_data(data_dir)
stdout = StringIO()
with contextlib.redirect_stdout(stdout):
test_binaries.train_translation_model(data_dir, 'fconv_iwslt_de_en', (['--dropout', '0.0', '--log-format', 'json', '--log-interval', '1', '--max-epoch', '3'] + extra_flags))
stdout = stdout.getvalue()
(train_log, valid_log) = map(json.loads, stdout.split('\n')[(- 5):(- 3)])
os.rename(os.path.join(data_dir, 'checkpoint1.pt'), os.path.join(data_dir, 'checkpoint_last.pt'))
stdout = StringIO()
with contextlib.redirect_stdout(stdout):
test_binaries.train_translation_model(data_dir, 'fconv_iwslt_de_en', (['--dropout', '0.0', '--log-format', 'json', '--log-interval', '1', '--max-epoch', '3'] + extra_flags))
stdout = stdout.getvalue()
(train_res_log, valid_res_log) = map(json.loads, stdout.split('\n')[(- 5):(- 3)])
def cast(s):
return round(float(s), 3)
for k in ['train_loss', 'train_ppl', 'train_num_updates', 'train_gnorm']:
self.assertEqual(cast(train_log[k]), cast(train_res_log[k]))
for k in ['valid_loss', 'valid_ppl', 'valid_num_updates', 'valid_best_loss']:
self.assertEqual(cast(valid_log[k]), cast(valid_res_log[k]))
def test_reproducibility(self):
self._test_reproducibility('test_reproducibility')
((not torch.cuda.is_available()), 'test requires a GPU')
def test_reproducibility_fp16(self):
self._test_reproducibility('test_reproducibility_fp16', ['--fp16', '--fp16-init-scale', '4096'])
((not torch.cuda.is_available()), 'test requires a GPU')
def test_reproducibility_memory_efficient_fp16(self):
self._test_reproducibility('test_reproducibility_memory_efficient_fp16', ['--memory-efficient-fp16', '--fp16-init-scale', '4096']) |
class Conv2d(nn.Conv2d, Module):
def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True):
super(Conv2d, self).__init__(in_channels, out_channels, kernel_size, stride, padding, dilation, groups, bias)
def forward(self, x, params=None, episode=None):
if (params is None):
x = super(Conv2d, self).forward(x)
else:
(weight, bias) = (params.get('weight'), params.get('bias'))
if (weight is None):
weight = self.weight
if (bias is None):
bias = self.bias
x = F.conv2d(x, weight, bias, self.stride, self.padding, self.dilation, self.groups)
return x |
class EvaluationPlan():
def __init__(self, metrics: Iterable[SupportsMetricCompute], validators: Optional[Iterable[SupportsMetricValidate]]=None, composite_metrics: Optional[Iterable[SupportsCompositeMetricCompute]]=None, intervention_validators: Optional[List[str]]=None):
self.metrics = metrics
self.validators = (validators or [])
self.composite_metrics = (composite_metrics or [])
self.intervention_validators = (intervention_validators or [])
self._validate_plan()
def metrics_dict(self) -> Dict[(str, SupportsMetricCompute)]:
return {m.metric_name: m for m in self.metrics}
def validators_dict(self) -> Dict[(str, SupportsMetricValidate)]:
return {v.validator_name: v for v in self.validators}
def composite_metrics_dict(self) -> Dict[(str, SupportsCompositeMetricCompute)]:
return {cm.composite_metric_name: cm for cm in self.composite_metrics}
def _validate_plan(self) -> None:
metric_names = [m.metric_name for m in self.metrics]
validator_names = [v.validator_name for v in self.validators]
composite_metric_names = [cm.composite_metric_name for cm in self.composite_metrics]
if (len(set(metric_names)) != len(metric_names)):
raise RuntimeError('You cannot have repeated metric names.')
if (len(set(validator_names)) != len(validator_names)):
raise RuntimeError('You cannot have repeated validator names.')
if (len(set(composite_metric_names)) != len(composite_metric_names)):
raise RuntimeError('You cannot have repeated composite metric names.')
for vname in self.intervention_validators:
if (vname not in validator_names):
raise RuntimeError(f"Validator '{vname}' not found in validators list.")
metric_calculators = self.metrics_dict()
for validator in self.validators:
for metric_requirement in validator.requires_metric:
if (metric_requirement not in metric_calculators):
raise RuntimeError(f"Metric '{metric_requirement}' required by validator '{validator.validator_name}'.")
validators_specified = self.validators_dict()
for cm in self.composite_metrics:
for metric_requirement in cm.requires_metric:
if (metric_requirement not in metric_calculators):
raise RuntimeError(f"Metric '{metric_requirement}' required by composite metric '{cm.composite_metric_name}'.")
for validator_requirement in cm.requires_validator:
if (validator_requirement not in validators_specified):
raise RuntimeError(f"Validator '{validator_requirement}' required by composite metric '{cm.composite_metric_name}'.")
def evaluate(self, simulation_output: SimulationOutputCLE) -> Dict[(str, torch.Tensor)]:
results: Dict[(str, torch.Tensor)] = {}
for metric_calculator in self.metrics:
metric_result = metric_calculator.compute(simulation_output)
results[metric_calculator.metric_name] = metric_result
return results
def evaluate_composite(self, simulation_output: SimulationOutputCLE, scene_metrics: Dict[(str, torch.Tensor)], scene_validation: Dict[(str, ValidatorOutput)]) -> Dict[(str, float)]:
results: Dict[(str, float)] = {}
for cm in self.composite_metrics:
required_metrics = cm.requires_metric
required_validators = cm.requires_validator
scene_metrics_required = {metric_name: scene_metrics[metric_name] for metric_name in required_metrics}
scene_validators_required = {validator_name: scene_validation[validator_name] for validator_name in required_validators}
cm_result = cm.compute(scene_metrics_required, scene_validators_required, simulation_output)
results[cm.composite_metric_name] = cm_result
return results
def process_interventions(self, results: Dict[(str, ValidatorOutput)]) -> Dict[(str, ValidatorOutput)]:
min_intervention_name = None
min_intenvention_frame = float('inf')
for ivname in self.intervention_validators:
voutput: ValidatorOutput = results[ivname]
failed_frames: List[int] = voutput.failed_frames
if (len(failed_frames) <= 0):
continue
inner_min_failed_frame = min(failed_frames)
if (inner_min_failed_frame < min_intenvention_frame):
min_intenvention_frame = inner_min_failed_frame
min_intervention_name = ivname
if (min_intervention_name is not None):
new_results = {ivname: ValidatorOutput(True, []) for ivname in results.keys()}
new_results[min_intervention_name] = results[min_intervention_name]
return new_results
else:
return results
def validate(self, scene_metrics: Dict[(str, torch.Tensor)], simulation_output: SimulationOutputCLE) -> Dict[(str, ValidatorOutput)]:
results: Dict[(str, ValidatorOutput)] = {}
for metric_validator in self.validators:
required_metrics = metric_validator.requires_metric
scene_metrics_required = {metric_name: scene_metrics[metric_name] for metric_name in required_metrics}
validator_result = metric_validator.validate(scene_metrics_required, simulation_output)
results[metric_validator.validator_name] = validator_result
if (len(self.intervention_validators) > 0):
results = self.process_interventions(results)
return results |
def createEditor(parent, filename=None):
if (filename is None):
global newFileCounter
newFileCounter += 1
editor = PyzoEditor(parent)
editor.document().setModified(True)
editor.removeTrailingWS = True
editor._name = '<tmp {}>'.format(newFileCounter)
else:
if (not os.path.isfile(filename)):
raise IOError(("File does not exist '%s'." % filename))
with open(filename, 'rb') as f:
bb = f.read()
f.close()
encoding = determineEncoding(bb)
text = bb.decode(encoding, 'replace')
lineEndings = determineLineEnding(text)
editor = PyzoEditor(parent)
editor.setPlainText(text)
editor.lineEndings = lineEndings
editor.encoding = encoding
editor.document().setModified(False)
editor._filename = filename
editor._name = os.path.split(filename)[1]
(indentWidth, trailing) = determineIndentationAndTrailingWS(text)
editor.removeTrailingWS = (trailing < 10)
if (indentWidth == (- 1)):
editor.setIndentWidth(pyzo.config.settings.defaultIndentWidth)
editor.setIndentUsingSpaces(False)
elif indentWidth:
editor.setIndentWidth(indentWidth)
editor.setIndentUsingSpaces(True)
if editor._filename:
editor._modifyTime = os.path.getmtime(editor._filename)
if editor._filename:
ext = os.path.splitext(editor._filename)[1]
parser = Manager.suggestParser(ext, text)
editor.setParser(parser)
else:
editor.setParser(pyzo.config.settings.defaultStyle)
return editor |
def _upgrade_package(venv: Venv, package_name: str, pip_args: List[str], is_main_package: bool, force: bool, upgrading_all: bool) -> int:
package_metadata = venv.package_metadata[package_name]
if (package_metadata.package_or_url is None):
raise PipxError(f'Internal Error: package {package_name} has corrupt pipx metadata.')
package_or_url = parse_specifier_for_upgrade(package_metadata.package_or_url)
old_version = package_metadata.package_version
venv.upgrade_package(package_name, package_or_url, pip_args, include_dependencies=package_metadata.include_dependencies, include_apps=package_metadata.include_apps, is_main_package=is_main_package, suffix=package_metadata.suffix)
package_metadata = venv.package_metadata[package_name]
display_name = f'{package_metadata.package}{package_metadata.suffix}'
new_version = package_metadata.package_version
if package_metadata.include_apps:
expose_resources_globally('app', constants.LOCAL_BIN_DIR, package_metadata.app_paths, force=force, suffix=package_metadata.suffix)
expose_resources_globally('man', constants.LOCAL_MAN_DIR, package_metadata.man_paths, force=force)
if package_metadata.include_dependencies:
for (_, app_paths) in package_metadata.app_paths_of_dependencies.items():
expose_resources_globally('app', constants.LOCAL_BIN_DIR, app_paths, force=force, suffix=package_metadata.suffix)
for (_, man_paths) in package_metadata.man_paths_of_dependencies.items():
expose_resources_globally('man', constants.LOCAL_MAN_DIR, man_paths, force=force)
if (old_version == new_version):
if upgrading_all:
pass
else:
print(pipx_wrap(f'''
{display_name} is already at latest version {old_version}
(location: {str(venv.root)})
'''))
return 0
else:
print(pipx_wrap(f'''
upgraded package {display_name} from {old_version} to
{new_version} (location: {str(venv.root)})
'''))
return 1 |
def checkSuccess(vmObject, actionData):
retVal = False
if (('SUCCESS_TYPE' in actionData) and ('SUCCESS_METRIC' in actionData)):
if (actionData['SUCCESS_TYPE'] == 'PROCESS'):
retVal = sampleLib.checkForProcess(vmObject, actionData['SUCCESS_METRIC'])
else:
print('NO SUCCESS_TYPE OR SUCCESS METRIC IN THE DICTIONARY')
return retVal |
def main(pepno='426'):
print(('Comparing PEP %s version sort to setuptools.' % pepno))
(projects, public) = get_projects(VERSION_CACHE)
print()
Analysis('release versions', public, releases_only=True).print_report()
print()
Analysis('public versions', public).print_report()
print()
Analysis('all versions', projects).print_report() |
class FairseqDropout(nn.Module):
def __init__(self, p, module_name=None):
super().__init__()
self.p = p
self.module_name = module_name
self.apply_during_inference = False
def forward(self, x, inplace: bool=False):
if (self.training or self.apply_during_inference):
return F.dropout(x, p=self.p, training=True, inplace=inplace)
else:
return x
def make_generation_fast_(self, name: str, retain_dropout: bool=False, retain_dropout_modules: Optional[List[str]]=None, **kwargs):
if retain_dropout:
if ((retain_dropout_modules is not None) and (self.module_name is None)):
logger.warning('Cannot enable dropout during inference for module {} because module_name was not set'.format(name))
elif ((retain_dropout_modules is None) or (self.module_name in retain_dropout_modules)):
logger.info('Enabling dropout during inference for module: {}'.format(name))
self.apply_during_inference = True
else:
logger.info('Disabling dropout for module: {}'.format(name)) |
def test_columns_no_desc(vuln_data):
columns_format = format.ColumnsFormat(False)
expected_columns = 'Name Version ID Fix Versions\n---- ------- ------ \nfoo 1.0 VULN-0 1.1,1.4\nfoo 1.0 VULN-1 1.0\nbar 0.1 VULN-2'
assert (columns_format.format(vuln_data, list()) == expected_columns) |
class ResNetHead(nn.Module):
def __init__(self, block_module, stages, num_groups=1, width_per_group=64, stride_in_1x1=True, stride_init=None, res2_out_channels=256, dilation=1):
super(ResNetHead, self).__init__()
stage2_relative_factor = (2 ** (stages[0].index - 1))
stage2_bottleneck_channels = (num_groups * width_per_group)
out_channels = (res2_out_channels * stage2_relative_factor)
in_channels = (out_channels // 2)
bottleneck_channels = (stage2_bottleneck_channels * stage2_relative_factor)
block_module = _TRANSFORMATION_MODULES[block_module]
self.stages = []
stride = stride_init
for stage in stages:
name = ('layer' + str(stage.index))
if (not stride):
stride = (int((stage.index > 1)) + 1)
module = _make_stage(block_module, in_channels, bottleneck_channels, out_channels, stage.block_count, num_groups, stride_in_1x1, first_stride=stride, dilation=dilation)
stride = None
self.add_module(name, module)
self.stages.append(name)
def forward(self, x):
for stage in self.stages:
x = getattr(self, stage)(x)
return x |
def parse_col(toks, start_idx, tables_with_alias, schema, default_tables=None):
tok = toks[start_idx]
if (tok == '*'):
return ((start_idx + 1), schema.idMap[tok])
if ('.' in tok):
(alias, col) = tok.split('.')
key = ((tables_with_alias[alias] + '.') + col)
return ((start_idx + 1), schema.idMap[key])
assert ((default_tables is not None) and (len(default_tables) > 0)), 'Default tables should not be None or empty'
for alias in default_tables:
table = tables_with_alias[alias]
if (tok in schema.schema[table]):
key = ((table + '.') + tok)
return ((start_idx + 1), schema.idMap[key])
assert False, 'Error col: {}'.format(tok) |
_required
def new_template(request, orgslugname=None):
pytitionuser = get_session_user(request)
ctx = {'user': pytitionuser}
if orgslugname:
redirection = 'org_new_template'
try:
org = Organization.objects.get(slugname=orgslugname)
ctx['org'] = org
except Organization.DoesNotExist:
raise Http404(_('Organization does not exist'))
if (org not in pytitionuser.organization_set.all()):
return HttpResponseForbidden(_('You are not allowed to view this organization dashboard'))
try:
permissions = Permission.objects.get(organization=org, user=pytitionuser)
ctx['user_permissions'] = permissions
except Permission.DoesNotExist:
return HttpResponse(_("Internal error, cannot find your permissions attached to this organization ('{orgname}')".format(orgname=org.name)), status=500)
if (not permissions.can_create_templates):
return HttpResponseForbidden(_("You don't have the permission to create a Template in this organization"))
ctx['base_template'] = 'petition/org_base.html'
else:
redirection = 'user_new_template'
ctx['base_template'] = 'petition/user_base.html'
if (request.method == 'POST'):
template_name = request.POST.get('template_name', '')
if (template_name != ''):
if orgslugname:
template = PetitionTemplate(name=template_name, org=org)
else:
template = PetitionTemplate(name=template_name, user=pytitionuser)
template.save()
return redirect('edit_template', template.id)
else:
messages.error(request, _('You need to provide a template name.'))
return redirect(redirection)
else:
return render(request, 'petition/new_template.html', ctx) |
def parse_args():
parser = argparse.ArgumentParser(description='AB3DMOT')
parser.add_argument('--det_name', type=str, default='pointrcnn', help='we provide pointrcnn on KITTI, megvii for nuScenes')
parser.add_argument('--dataset', type=str, default='KITTI', help='nuScenes, KITTI')
parser.add_argument('--split', type=str, default='val', help='train, val, test')
parser.add_argument('--suffix', type=str, default='H1', help='additional string of the folder to be combined')
parser.add_argument('--num_hypo', type=int, default=1, help='number of hypothesis to combine')
args = parser.parse_args()
return args |
def _write_image_series(metric_file, relative_image_report_dir, series):
for (image_id, metric) in series.iteritems():
relative_image_report_path = _image_report_path(relative_image_report_dir, image_id)
metric_file.write(('<a href="%s">%s</a>: %f, \n' % (relative_image_report_path, image_id, metric))) |
def test_tonality():
duration = 60.0
fs = 10025.0
samples = int((fs * duration))
times = (np.arange(samples) / fs)
signal = Signal(np.sin((((2.0 * np.pi) * 1000.0) * times)), fs)
tonality = Tonality(signal, signal.fs)
tonality.spectrum
tonality.plot_spectrum()
tonality.frequency_resolution
tonality.effective_analysis_bandwidth
with pytest.raises(ValueError):
print(tonality.overview())
tonality.results_as_dataframe()
assert (len(list(tonality.noise_pauses)) == 0)
assert (len(list(tonality.tones)) == 0)
assert (len(list(tonality.critical_bands)) == 0)
tonality.determine_noise_pauses().analyse()
tonality.critical_band_at(900.0)
tonality.dominant_tone
print(tonality.overview())
tonality.results_as_dataframe()
tonality.plot_results() |
def parse_ace_2004(tokenizer: Tokenizer) -> None:
output_dir_path = 'data/ace2004/'
os.makedirs(output_dir_path, mode=493, exist_ok=True)
output_file_list = ['ace2004.train', 'ace2004.dev', 'ace2004.test']
for (split_info_file, output_file) in zip(SPLIT_INFO_FILE_LIST, output_file_list):
output_lines = []
doc_count = 0
sent_count = 0
token_count = 0
for tag in TAG_SET:
TAG_SET[tag] = Stat()
with open((SPLIT_INFO_DIR_PATH + split_info_file), 'r') as f:
for line in f:
basename = (CORPUS_DIR_PATH + line.strip())
output_lines_doc = parse_document(basename, tokenizer)
output_lines.extend(output_lines_doc)
doc_count += 1
sent_count += (len(output_lines_doc) // 3)
for idx in range(0, len(output_lines_doc), 3):
token_count += len(output_lines_doc[idx].split(' '))
with open((output_dir_path + output_file), 'w') as f:
f.writelines(output_lines)
print('')
print('--- {}'.format(output_file))
print('# of documents:\t{:6d}'.format(doc_count))
print('# of sentences:\t{:6d}'.format(sent_count))
print('# of tokens:\t{:6d}'.format(token_count))
total = 0
total_layer = []
total_ignored = 0
for (_, stat) in TAG_SET.items():
total += stat.total
for (depth, num) in enumerate(stat.layer):
if (len(total_layer) == depth):
total_layer.append(0)
total_layer[depth] += num
total_ignored += stat.ignored
print('total # of mentions:\t{}\t(layer:\t{},\tignored:\t{})'.format(total, total_layer, total_ignored))
for (tag, stat) in TAG_SET.items():
print('\t{}:\t{:5d}\t(layer:\t{},\tignored:\t{})'.format(tag, stat.total, stat.layer, stat.ignored))
ave_labels = 0
for (_, stat) in TAG_SET.items():
ave_labels += stat.num_labels
ave_labels /= (token_count * len(TAG_SET))
print('average # of labels:\t{:.2f}'.format(ave_labels)) |
def get_rel_pos_cls(cfg: MaxxVitTransformerCfg, window_size):
rel_pos_cls = None
if (cfg.rel_pos_type == 'mlp'):
rel_pos_cls = partial(RelPosMlp, window_size=window_size, hidden_dim=cfg.rel_pos_dim)
elif (cfg.rel_pos_type == 'bias'):
rel_pos_cls = partial(RelPosBias, window_size=window_size)
return rel_pos_cls |
class MeanShift(nn.Conv2d):
def __init__(self, rgb_range, rgb_mean=(0.4488, 0.4371, 0.404), rgb_std=(1.0, 1.0, 1.0), sign=(- 1)):
super(MeanShift, self).__init__(3, 3, kernel_size=1)
std = torch.Tensor(rgb_std)
self.weight.data = (torch.eye(3).view(3, 3, 1, 1) / std.view(3, 1, 1, 1))
self.bias.data = (((sign * rgb_range) * torch.Tensor(rgb_mean)) / std)
for p in self.parameters():
p.requires_grad = False |
(eq=False, hash=False, slots=True)
class ParkingLot():
_parked: OrderedDict[(Task, None)] = attr.ib(factory=OrderedDict, init=False)
def __len__(self) -> int:
return len(self._parked)
def __bool__(self) -> bool:
return bool(self._parked)
_core.enable_ki_protection
async def park(self) -> None:
task = _core.current_task()
self._parked[task] = None
task.custom_sleep_data = self
def abort_fn(_: _core.RaiseCancelT) -> _core.Abort:
del task.custom_sleep_data._parked[task]
return _core.Abort.SUCCEEDED
(await _core.wait_task_rescheduled(abort_fn))
def _pop_several(self, count: (int | float)) -> Iterator[Task]:
if isinstance(count, float):
if math.isinf(count):
count = len(self._parked)
else:
raise ValueError('Cannot pop a non-integer number of tasks.')
else:
count = min(count, len(self._parked))
for _ in range(count):
(task, _) = self._parked.popitem(last=False)
(yield task)
_core.enable_ki_protection
def unpark(self, *, count: (int | float)=1) -> list[Task]:
tasks = list(self._pop_several(count))
for task in tasks:
_core.reschedule(task)
return tasks
def unpark_all(self) -> list[Task]:
return self.unpark(count=len(self))
_core.enable_ki_protection
def repark(self, new_lot: ParkingLot, *, count: (int | float)=1) -> None:
if (not isinstance(new_lot, ParkingLot)):
raise TypeError('new_lot must be a ParkingLot')
for task in self._pop_several(count):
new_lot._parked[task] = None
task.custom_sleep_data = new_lot
def repark_all(self, new_lot: ParkingLot) -> None:
return self.repark(new_lot, count=len(self))
def statistics(self) -> ParkingLotStatistics:
return ParkingLotStatistics(tasks_waiting=len(self._parked)) |
class Migration(migrations.Migration):
dependencies = [('tasks', '0028_data_migration')]
operations = [migrations.AlterField(model_name='task', name='sites', field=models.ManyToManyField(blank=True, help_text='The sites this task belongs to (in a multi site setup).', to='sites.Site', verbose_name='Sites'))] |
def init_logger(filename, level='INFO'):
formatter = logging.Formatter('[ %(levelname)s : %(asctime)s ] - %(message)s')
logger = logging.getLogger(((__name__ + '.') + filename))
logger.setLevel(getattr(logging, level))
filehandler = logging.FileHandler(filename)
filehandler.setFormatter(formatter)
logger.addHandler(filehandler)
return logger |
class Browser():
def __init__(self, window, handle, browser, parent):
self.window = window
self.handle = handle
self.browser = browser
self.parent = parent
self.text_select = window.text_select
self.uid = window.uid
self.loaded = window.events.loaded
self.shown = window.events.shown
self.inner_hwnd = self.browser.GetWindowHandle()
self.eval_events = {}
self.js_bridge = JSBridge(window, self.eval_events)
self.initialized = False
def initialize(self):
if self.initialized:
return
self.cookie_manager = cef.CookieManager.GetGlobalManager()
self.cookie_visitor = CookieVisitor()
self.browser.GetJavascriptBindings().Rebind()
self.browser.ExecuteJavascript(parse_api_js(self.window, 'cef'))
if (not self.text_select):
self.browser.ExecuteJavascript(disable_text_select)
self.browser.ExecuteJavascript(dom.src)
sleep(0.1)
self.initialized = True
self.loaded.set()
def close(self):
self.browser.CloseBrowser(True)
def resize(self, width, height):
screen = self.parent.RectangleToScreen(self.parent.ClientRectangle)
height_diff = ((screen.Top - self.parent.Top) + 12)
width_diff = ((self.parent.Right - screen.Right) + 12)
windll.user32.SetWindowPos(self.inner_hwnd, 0, 0, 0, (width - width_diff), (height - height_diff), ((2 | 4) | 16))
self.browser.NotifyMoveOrResizeStarted()
def evaluate_js(self, code, unique_id):
self.loaded.wait()
self.eval_events[unique_id] = Event()
eval_script = "\n try {{\n {0}\n }} catch(e) {{\n console.error(e.stack);\n window.external.return_result(null, '{1}');\n }}\n ".format(code, unique_id)
result = self.browser.ExecuteJavascript(eval_script)
self.eval_events[unique_id].wait()
result = copy(self.js_bridge.results[unique_id])
del self.eval_events[unique_id]
del self.js_bridge.results[unique_id]
return result
def get_cookies(self):
self.loaded.wait()
self.cookie_visitor.cookies = []
self.cookie_visitor.lock = Event()
self.cookie_manager.VisitUrlCookies(self.browser.GetUrl(), True, self.cookie_visitor)
self.cookie_visitor.lock.wait()
return self.cookie_visitor.cookies
def get_current_url(self):
self.loaded.wait()
return self.browser.GetUrl()
def load_url(self, url):
self.initialized = False
self.loaded.clear()
self.browser.LoadUrl(url)
def load_html(self, html):
self.initialized = False
self.loaded.clear()
self.browser.LoadUrl('data:text/html,{0}'.format(html))
def focus(self):
self.browser.SendFocusEvent(True) |
class ZoneRecordDal(object):
def list_zone_header():
zones = DnsHeader.query.all()
results = [zone.zone_name for zone in zones if (not zone.zone_name.endswith('.IN-ADDR.ARPA'))]
return sorted(results)
def list_zone_ttl():
pattern = re.compile('\\$TTL\\s+(\\d+)\\s?')
zone_ttl = {}
for (zone, header) in db.session.query(DnsHeader.zone_name, DnsHeader.header_content):
if (pattern.search(header) is None):
raise BadParam(('Can get ttl of zone %s' % zone), msg_ch=(u'zonettl' % zone))
zone_ttl[zone] = pattern.search(header).group(1)
return zone_ttl
def select_zone(domain):
zones = set([zone.zone_name for zone in DnsSerial.query.all()])
for index in range(1, len(domain.split('.'))):
best_match = domain.split('.', index)[(- 1)]
if (best_match in zones):
return best_match
raise BadParam(('No zone for domain: %s' % domain), msg_ch=u'zone')
def get_zone_header(zone_name):
header = DnsHeader.query.filter_by(zone_name=zone_name).first()
if (not header):
raise BadParam(('not header for zone: %s' % zone_name))
serial = DnsSerial.query.filter_by(zone_name=zone_name).first()
if (not serial):
conf = DnsZoneConf.query.filter_by(zone_name=zone_name).first()
if ((not conf) or (conf.zone_type != 0)):
raise BadParam(('not serial for zone: %s' % zone_name))
serial_num =
else:
serial_num = serial.serial_num
return dict(header=header.header_content, serial_num=serial_num)
def get_zone_need_update(group_name):
return [item.zone_name for item in DnsSerial.query.filter_by(zone_group=group_name).filter((DnsSerial.update_serial_num < DnsSerial.serial_num))]
def _get_records_of_view_zone(isp_map):
cname_ttl = CONF.view.cname_ttl
records = DnsRecord.query.filter_by(zone_name=VIEW_ZONE).order_by(DnsRecord.domain_name, DnsRecord.record).all()
res = {isp: [] for isp in isp_map.keys()}
for record in records:
for isp in isp_map.keys():
res[isp].append({'name': record.domain_name, 'record': record.record, 'type': record.record_type, 'ttl': record.ttl})
states = ViewDomainNameState.query.order_by(ViewDomainNameState.domain_name).all()
for state in states:
res[state.isp].append({'name': state.domain_name, 'record': _make_glbs_cname(state.domain_name, isp_map[state.isp]), 'type': 'CNAME', 'ttl': cname_ttl})
return res
def _get_records_of_view_domain(zone, isp_map):
states = ViewDomainNameState.query.all()
records = ViewRecords.query.filter_by(zone_name=zone).order_by(ViewRecords.domain_name, ViewRecords.property).all()
merge_states = {}
for state in states:
if (state.domain_name not in merge_states):
merge_states[state.domain_name] = []
state.enabled_rooms = json.loads(state.enabled_rooms)
merge_states[state.domain_name].append(state)
active_records = []
for record in records:
if (record.record_type not in ('A', 'AAAA', 'CNAME')):
raise BadParam('ViewRecord type error: only [A, AAAA, CNAME] allow.')
states = merge_states[record.domain_name]
for state in states:
if (state.state == 'disabled'):
continue
if ((state.state == 'A') and (record.record_type in ('A', 'AAAA')) and (record.property in state.enabled_rooms)):
active_records.append({'name': _make_glbs_cname(state.domain_name, isp_map[state.isp]), 'record': record.record, 'type': record.record_type, 'ttl': record.ttl})
elif ((record.record_type == 'CNAME') and (str(record.id) == state.state)):
active_records.append({'name': _make_glbs_cname(state.domain_name, isp_map[state.isp]), 'record': record.record, 'type': record.record_type, 'ttl': record.ttl})
return sorted(active_records, key=(lambda record: (record['name'], record['record'])))
def _get_records_of_ordinary_zone(zone_name):
records = DnsRecord.query.filter_by(zone_name=zone_name).all()
record_info = []
for record in records:
if (record.ttl != 0):
record_info.append({'name': record.domain_name, 'record': record.record, 'type': record.record_type, 'ttl': record.ttl})
else:
record_info.append({'name': record.domain_name, 'record': record.record, 'type': record.record_type})
return sorted(record_info, key=(lambda x: (x['name'], x['record'])))
def get_zone_records(zone_name):
isp_map = {item.name_in_english: item.abbreviation for item in ViewIsps.query.all()}
if (zone_name == VIEW_ZONE):
return ZoneRecordDal._get_records_of_view_zone(isp_map)
elif (zone_name in NORMAL_TO_CNAME.values()):
return ZoneRecordDal._get_records_of_view_domain(zone_name, isp_map)
else:
return ZoneRecordDal._get_records_of_ordinary_zone(zone_name)
_on_success
def update_serial_num(zone_name):
item = DnsSerial.query.filter_by(zone_name=zone_name).first()
if (not item):
raise BadParam(('No such zone: %s' % zone_name))
serial_num = item.serial_num
item.update_serial_num = serial_num
return serial_num
def has_no_mx_txt_record(zone, domain_name):
zone_header = ZoneRecordDal.get_zone_header(zone)['header']
pattern = '\\s{0}[\\s\\d]+IN\\s+MX|\\s{0}[\\s\\d]+IN\\s+TXT'.format(domain_name.replace(('.' + zone), ''))
if re.search(pattern, zone_header):
raise BadParam(('%s has mx or txt record in zone: %s' % (domain_name, zone)))
def check_dns_restriction(zone, domain_name, record_type):
if ((record_type == 'A') or (record_type == 'AAAA')):
if DnsRecord.query.filter_by(domain_name=domain_name, record_type='CNAME').first():
raise BadParam(('%s has CNAME record.' % domain_name), msg_ch=u'CNAME')
if (record_type == 'CNAME'):
if DnsRecord.query.filter_by(domain_name=domain_name).first():
raise BadParam(('%s has %s record.' % domain_name), msg_ch=u'CNAME')
ZoneRecordDal.has_no_mx_txt_record(zone, domain_name)
def check_zone_syntax(zone_name, header_content):
tmp_file = os.path.join(CONF.tmp_dir, zone_name)
with open(tmp_file, 'w') as f:
f.write(header_content)
if (CONF.etc.env != 'dev'):
err_file = tempfile.mktemp(prefix='err_', dir='/tmp')
if (os.system(('named-checkzone -k fail %s %s >%s 2>&1' % (zone_name, tmp_file, err_file))) != 0):
with open(err_file) as f:
error_log = f.read()
raise BadParam(('Check header failed:%s' % error_log))
def check_zone_header(zone_name, header_content):
headers = DnsHeader.query.filter_by(zone_name=zone_name).all()
if (len(headers) != 1):
raise BadParam(('No this zone header: %s' % zone_name), msg_ch=u'')
zone_name = zone_name.strip()
if ('pre_serial' not in header_content):
raise BadParam('check header failed: no pre_serial', msg_ch=u'pre_serial,serial')
if (not header_content.endswith('\n')):
raise BadParam('check header failed: end line must be line break', msg_ch=u'')
header_content = header_content.replace('pre_serial', '1')
ZoneRecordDal.check_zone_syntax(zone_name, header_content)
pattern = '(?<=[\\s;])(([\\w-]+\\.)*[-\\w]+)(\\s+\\d+)?\\s+IN\\s+(MX|TXT)'
if re.search('\\sIN\\s(MX|TXT)\\s', header_content):
domains = set(['{}.{}'.format(domain.group(1), zone_name) for domain in re.finditer(pattern, header_content)])
records = DnsRecord.query.filter(DnsRecord.domain_name.in_(domains)).filter((DnsRecord.record_type == 'CNAME')).all()
if records:
conflict_domain = [dns.name for dns in records]
raise BadParam(('%s already has cname record, can not add MX or TXT record.' % conflict_domain), msg_ch=u'%sCNAME,MX/TXT')
_on_success
def increase_serial_num(zone_name):
serials = DnsSerial.query.filter_by(zone_name=zone_name).all()
if (len(serials) != 1):
raise BadParam(('Zone serial should be unique: %s' % zone_name), msg_ch=u'zone serial')
serial = serials[0]
serial.serial_num += 1
return serial.serial_num
def update_zone_header(zone_name, header_content):
ZoneRecordDal.check_zone_header(zone_name, header_content)
old_header = DnsHeader.query.filter_by(zone_name=zone_name).first().header_content
if (md5(header_content.encode('utf-8')).hexdigest() == md5(old_header.encode('utf-8')).hexdigest()):
raise BadParam(('No change for this header: %s' % zone_name), msg_ch=u'')
with db.session.begin(subtransactions=True):
headers = DnsHeader.query.filter_by(zone_name=zone_name).all()
if (len(headers) != 1):
raise BadParam(('Zone header should be unique: %s' % zone_name), msg_ch=u'header')
DnsHeader.query.filter_by(zone_name=zone_name).update({'header_content': header_content})
serial_num = ZoneRecordDal.increase_serial_num(zone_name)
return serial_num
_on_success
def add_record(domain_name, record, record_type, ttl, username):
zone = ZoneRecordDal.select_zone(domain_name)
ZoneRecordDal.check_dns_restriction(zone, domain_name, record_type)
if DnsRecord.query.filter_by(domain_name=domain_name, record=record).first():
raise BadParam('Domain name has already exists which repells this new record.', msg_ch=u'')
other_record = DnsRecord.query.filter_by(domain_name=domain_name).first()
if other_record:
if (ttl == 0):
ttl = other_record.ttl
insert_record = DnsRecord(domain_name=domain_name, record=record, zone_name=zone, update_user=username, record_type=record_type, ttl=ttl)
db.session.add(insert_record)
return ZoneRecordDal.increase_serial_num(zone)
_on_success
def auto_add_record(domain_name, region, username):
zone = ZoneRecordDal.select_zone(domain_name)
records = IpPool.query.outerjoin(DnsRecord, (DnsRecord.record == IpPool.fixed_ip)).add_columns(IpPool.fixed_ip, DnsRecord.record).filter((IpPool.region == region), DnsRecord.record.is_(None), IpPool.allocated.is_(True)).order_by(IpPool.fixed_ip)
for item in records:
ip = item.fixed_ip
if (CONF.etc.env != 'dev'):
(responses, no_responses) = multi_ping([ip], timeout=10, retry=2, ignore_lookup_errors=True)
if responses:
IpPool.query.filter_by(fixed_ip=ip).update({'allocated': False})
log.error(('%s should have been set allocated=False since it is ping-able.' % ip))
continue
with db.session.begin(subtransactions=True):
try:
iprecord = IpPool.query.filter_by(fixed_ip=ip).with_for_update(nowait=True, of=IpPool).first()
except Exception:
log.error(('%s has been locked by other process' % ip))
continue
if DnsRecord.query.filter_by(record=ip).first():
continue
record_type = 'A'
if iprecord.is_ipv6:
record_type = 'AAAA'
insert_record = DnsRecord(domain_name=domain_name, record=ip, zone_name=zone, update_user=username, record_type=record_type)
db.session.add(insert_record)
return ZoneRecordDal.increase_serial_num(zone)
else:
raise BadParam(('No unused ip for region:%s.' % region), msg_ch=u'ip')
_on_success
def modify_record(domain_name, origin_record, update_dict, username):
zone = ZoneRecordDal.select_zone(domain_name)
if (update_dict.get('record_type', None) == 'CNAME'):
ZoneRecordDal.has_no_mx_txt_record(zone, domain_name)
records = DnsRecord.query.filter_by(domain_name=domain_name, record=origin_record).all()
if (len(records) > 1):
raise BadParam('More than one record,check database!', msg_ch='')
if (len(records) == 0):
raise BadParam('Can not find this record!', msg_ch='')
update_dict.pop('check_record', None)
update_dict['update_user'] = username
DnsRecord.query.filter_by(domain_name=domain_name, record=origin_record).update(update_dict)
if (update_dict.get('record_type') in ['A', 'AAAA']):
if (DnsRecord.query.filter_by(domain_name=domain_name, record=update_dict['record']).count() > 1):
raise BadParam(('Domain %s already have record %s' % (domain_name, update_dict['record'])), msg_ch='')
ttl = update_dict.get('ttl')
if (ttl is not None):
DnsRecord.query.filter_by(domain_name=domain_name).update({'ttl': ttl})
if (update_dict.get('record_type') == 'CNAME'):
DnsRecord.query.filter((DnsRecord.domain_name == domain_name), (DnsRecord.record_type != 'A')).delete()
return ZoneRecordDal.increase_serial_num(zone)
_on_success
def delete_record(domain_name, record, record_type):
dns_records = DnsRecord.query.filter_by(domain_name=domain_name, record=record, record_type=record_type).all()
if (len(dns_records) == 0):
raise BadParam(('No such a record:[domain name:%s, record:%s, type:%s]' % (domain_name, record, record_type)), msg_ch=u'')
zone = dns_records[0].zone_name
DnsRecord.query.filter_by(domain_name=domain_name, record=record, record_type=record_type).delete()
return ZoneRecordDal.increase_serial_num(zone)
def get_domain_records(**kwargs):
return [item.json_serialize() for item in DnsRecord.query.filter_by(**kwargs)]
def search_domain_records(field, pattern):
return [item.json_serialize() for item in DnsRecord.query.filter(getattr(DnsRecord, field).like(pattern))] |
def set_session(user_info, session=flask.session, permanent=True):
session.permanent = bool(permanent)
session.update({'authenticated': True, 'id': user_info['id'], 'name': user_info['name'], 'role': user_info['role'], 'perms': user_info['permission'], 'template': user_info['template']})
return session |
def test_project_label_promotion(gl, group):
_id = uuid.uuid4().hex
data = {'name': f'test-project-{_id}', 'namespace_id': group.id}
project = gl.projects.create(data)
label_name = 'promoteme'
promoted_label = project.labels.create({'name': label_name, 'color': '#112233'})
promoted_label.promote()
assert any(((label.name == label_name) for label in group.labels.list()))
group.labels.delete(label_name)
assert (not any(((label.name == label_name) for label in group.labels.list()))) |
class BaseRequest(ABC):
session: ClientSession
log = logging.getLogger('aiosnow.request')
def __init__(self, api_url: str, session: ClientSession, fields: dict=None, headers: dict=None, params: dict=None, resolve: bool=False):
self.api_url = api_url
self.session = session
self.fields = (fields or {})
self.url_segments: List[str] = []
self._resolve = resolve
self._default_headers = {'Content-type': CONTENT_TYPE, **(headers or {})}
self._default_params = (params or {})
self._req_id = f'REQ_{hex(int(round((time.time() * 1000))))}'
def params(self) -> dict:
params = dict(sysparm_display_value='all')
if self.fields:
params['sysparm_fields'] = ','.join(self.fields)
return {**params, **self._default_params}
def url(self) -> str:
api_url = self.api_url
if self.url_segments:
api_url += ('/' + '/'.join(map(str, self.url_segments)))
return f'{api_url}?{urlencode(self.params)}'
def __repr__(self) -> str:
pass
async def send(self, *args: Any, **kwargs: Any) -> Tuple[(Response, dict)]:
pass
def _method(self) -> str:
pass
def _request_id(self) -> str:
return hex(id(self))
def _format_repr(self, params: str='') -> str:
return f'<{self.__class__.__name__} {urlparse(self.url).path} [{params}]>'
async def _do_send(self, *args: Any, **kwargs: Any) -> Any:
return (await self.session.request(*args, **kwargs))
async def _send(self, headers_extra: dict=None, **kwargs: Any) -> Response:
headers = self._default_headers
headers.update(**(headers_extra or {}))
kwargs['headers'] = headers
method = kwargs.pop('method', self._method)
decode = kwargs.pop('decode', True)
try:
self.log.debug(f'{self._req_id}: {self}')
response = (await self._do_send(method, self.url, **kwargs))
self.log.debug(f'{self._req_id}: {response}')
except client_exceptions.ClientConnectionError as exc:
raise ClientConnectionError(str(exc)) from exc
if ((method == methods.DELETE) and (response.status == 204)):
return response
if (not decode):
response.data = (await response.read())
elif (not response.content_type.startswith(CONTENT_TYPE)):
raise UnexpectedContentType(f'Unexpected content-type in response: {response.content_type}, expected: {CONTENT_TYPE}, probable causes: instance down or REST API disabled')
else:
(await response.load_document())
return response |
_model
def hardcorenas_e(pretrained=False, **kwargs):
arch_def = [['ds_r1_k3_s1_e1_c16_nre'], ['ir_r1_k5_s2_e3_c24_nre_se0.25', 'ir_r1_k5_s1_e3_c24_nre_se0.25'], ['ir_r1_k5_s2_e6_c40_nre_se0.25', 'ir_r1_k5_s1_e4_c40_nre_se0.25', 'ir_r1_k5_s1_e4_c40_nre_se0.25', 'ir_r1_k3_s1_e3_c40_nre_se0.25'], ['ir_r1_k5_s2_e4_c80_se0.25', 'ir_r1_k3_s1_e6_c80_se0.25'], ['ir_r1_k5_s1_e6_c112_se0.25', 'ir_r1_k5_s1_e6_c112_se0.25', 'ir_r1_k5_s1_e6_c112_se0.25', 'ir_r1_k5_s1_e3_c112_se0.25'], ['ir_r1_k5_s2_e6_c192_se0.25', 'ir_r1_k5_s1_e6_c192_se0.25', 'ir_r1_k5_s1_e6_c192_se0.25', 'ir_r1_k3_s1_e6_c192_se0.25'], ['cn_r1_k1_s1_c960']]
model = _gen_hardcorenas(pretrained=pretrained, variant='hardcorenas_e', arch_def=arch_def, **kwargs)
return model |
def set_default(default_zone=None, connection_retries=None, connection_pool=None, connection_timeout=None, default_rs_host=None, default_uc_host=None, default_rsf_host=None, default_api_host=None, default_upload_threshold=None, default_query_region_host=None, default_query_region_backup_hosts=None, default_backup_hosts_retry_times=None):
if default_zone:
_config['default_zone'] = default_zone
_is_customized_default['default_zone'] = True
if default_rs_host:
_config['default_rs_host'] = default_rs_host
_is_customized_default['default_rs_host'] = True
if default_rsf_host:
_config['default_rsf_host'] = default_rsf_host
_is_customized_default['default_rsf_host'] = True
if default_api_host:
_config['default_api_host'] = default_api_host
_is_customized_default['default_api_host'] = True
if default_uc_host:
_config['default_uc_host'] = default_uc_host
_is_customized_default['default_uc_host'] = True
_config['default_query_region_host'] = default_uc_host
_is_customized_default['default_query_region_host'] = True
_config['default_query_region_backup_hosts'] = []
_is_customized_default['default_query_region_backup_hosts'] = True
if default_query_region_host:
_config['default_query_region_host'] = default_query_region_host
_is_customized_default['default_query_region_host'] = True
_config['default_query_region_backup_hosts'] = []
_is_customized_default['default_query_region_backup_hosts'] = True
if default_query_region_backup_hosts:
_config['default_query_region_backup_hosts'] = default_query_region_backup_hosts
_is_customized_default['default_query_region_backup_hosts'] = True
if default_backup_hosts_retry_times:
_config['default_backup_hosts_retry_times'] = default_backup_hosts_retry_times
_is_customized_default['default_backup_hosts_retry_times'] = True
if connection_retries:
_config['connection_retries'] = connection_retries
_is_customized_default['connection_retries'] = True
if connection_pool:
_config['connection_pool'] = connection_pool
_is_customized_default['connection_pool'] = True
if connection_timeout:
_config['connection_timeout'] = connection_timeout
_is_customized_default['connection_timeout'] = True
if default_upload_threshold:
_config['default_upload_threshold'] = default_upload_threshold
_is_customized_default['default_upload_threshold'] = True |
class KwsTimeWeight(_lexicographic_weight.KwsTimeWeight):
def __new__(cls, *args):
if (len(args) == 0):
return _lexicographic_weight.KwsTimeWeight()
if (len(args) == 1):
if (isinstance(args[0], tuple) and (len(args[0]) == 2)):
args = args[0]
else:
args = (args[0].value1, args[0].value2)
args = (TropicalWeight(args[0]), TropicalWeight(args[1]))
return _lexicographic_weight.KwsTimeWeight.from_components(*args) |
.django_db
.parametrize('site_name', ['site1', 'site2'])
def test_clear_site_cache_check_site_cache_size(site_name: str, settings) -> None:
assert (len(site_models.SITE_CACHE) == 0)
site = Site.objects.create(domain='foo.com', name=site_name)
settings.SITE_ID = site.id
assert (Site.objects.get_current() == site)
assert (len(site_models.SITE_CACHE) == 1) |
class VGMF_Fusion(nn.Module):
def __init__(self, opt={}):
super(VGMF_Fusion, self).__init__()
self.gate = nn.Linear(1024, opt['embed']['embed_dim'])
def forward(self, sv, kv):
sv = l2norm(sv, dim=(- 1))
kv = l2norm(kv, dim=(- 1))
sw_s = F.sigmoid(self.gate(torch.cat([sv, kv], dim=(- 1))))
ones = torch.ones(sw_s.shape).cuda()
sw_k = (ones - sw_s)
out = ((sw_s * sv) + (sw_k * kv))
return out |
class GenericEnv(VirtualEnv):
def __init__(self, path: Path, base: (Path | None)=None, child_env: (Env | None)=None) -> None:
self._child_env = child_env
super().__init__(path, base=base)
def find_executables(self) -> None:
patterns = [('python*', 'pip*')]
if self._child_env:
minor_version = f'{self._child_env.version_info[0]}.{self._child_env.version_info[1]}'
major_version = f'{self._child_env.version_info[0]}'
patterns = [(f'python{minor_version}', f'pip{minor_version}'), (f'python{major_version}', f'pip{major_version}')]
python_executable = None
pip_executable = None
for (python_pattern, pip_pattern) in patterns:
if (python_executable and pip_executable):
break
if (not python_executable):
python_executables = sorted((p.name for p in self._bin_dir.glob(python_pattern) if re.match('python(?:\\d+(?:\\.\\d+)?)?(?:\\.exe)?$', p.name)))
if python_executables:
executable = python_executables[0]
if executable.endswith('.exe'):
executable = executable[:(- 4)]
python_executable = executable
if (not pip_executable):
pip_executables = sorted((p.name for p in self._bin_dir.glob(pip_pattern) if re.match('pip(?:\\d+(?:\\.\\d+)?)?(?:\\.exe)?$', p.name)))
if pip_executables:
pip_executable = pip_executables[0]
if pip_executable.endswith('.exe'):
pip_executable = pip_executable[:(- 4)]
if python_executable:
self._executable = python_executable
if pip_executable:
self._pip_executable = pip_executable
def get_paths(self) -> dict[(str, str)]:
output = self.run_python_script(GET_PATHS_FOR_GENERIC_ENVS)
paths: dict[(str, str)] = json.loads(output)
return paths
def execute(self, bin: str, *args: str, **kwargs: Any) -> int:
command = (self.get_command_from_bin(bin) + list(args))
env = kwargs.pop('env', dict(os.environ))
if (not self._is_windows):
return os.execvpe(command[0], command, env=env)
exe = subprocess.Popen(command, env=env, **kwargs)
exe.communicate()
return exe.returncode
def _run(self, cmd: list[str], **kwargs: Any) -> str:
return super(VirtualEnv, self)._run(cmd, **kwargs)
def is_venv(self) -> bool:
return (self._path != self._base) |
class GlobalAttentionGeneral(nn.Module):
def __init__(self, idf, cdf):
super(GlobalAttentionGeneral, self).__init__()
self.sm = nn.Softmax()
self.mask = None
def applyMask(self, mask):
self.mask = mask
def forward(self, input, context_key, content_value):
(ih, iw) = (input.size(2), input.size(3))
queryL = (ih * iw)
(batch_size, sourceL) = (context_key.size(0), context_key.size(2))
target = input.view(batch_size, (- 1), queryL)
targetT = torch.transpose(target, 1, 2).contiguous()
sourceT = context_key
attn = torch.bmm(targetT, sourceT)
text_weighted = None
attn = attn.view((batch_size * queryL), sourceL)
if (self.mask is not None):
mask = self.mask.repeat(queryL, 1)
attn.data.masked_fill_(mask.data, (- float('inf')))
attn = self.sm(attn)
attn = attn.view(batch_size, queryL, sourceL)
attn = torch.transpose(attn, 1, 2).contiguous()
weightedContext = torch.bmm(content_value, attn)
weightedContext = weightedContext.view(batch_size, (- 1), ih, iw)
attn = attn.view(batch_size, (- 1), ih, iw)
return (weightedContext, attn) |
def get_project_dependency_packages(locker: Locker, project_requires: list[Dependency], root_package_name: NormalizedName, project_python_marker: (BaseMarker | None)=None, extras: Collection[NormalizedName]=()) -> Iterator[DependencyPackage]:
if (project_python_marker is not None):
marked_requires: list[Dependency] = []
for require in project_requires:
require = require.clone()
require.marker = require.marker.intersect(project_python_marker)
marked_requires.append(require)
project_requires = marked_requires
repository = locker.locked_repository()
locked_extras = {canonicalize_name(extra): [canonicalize_name(dependency) for dependency in dependencies] for (extra, dependencies) in locker.lock_data.get('extras', {}).items()}
extra_package_names = get_extra_package_names(repository.packages, locked_extras, extras)
selected = []
for dependency in project_requires:
try:
package = repository.find_packages(dependency=dependency)[0]
except IndexError:
continue
if (package.optional and (package.name not in extra_package_names)):
continue
selected.append(dependency)
for (package, dependency) in get_project_dependencies(project_requires=selected, locked_packages=repository.packages, root_package_name=root_package_name):
(yield DependencyPackage(dependency=dependency, package=package)) |
def _format_replace(replace: Optional[ReplaceTypes]=None) -> dict[(Variable, Variable)]:
items: dict[(Variable, Variable)]
if isinstance(replace, dict):
items = cast(dict[(Variable, Variable)], replace)
elif isinstance(replace, Iterable):
items = dict(replace)
elif (replace is None):
items = {}
else:
raise ValueError(f'replace is neither a dictionary, list, tuple or None ! The value provided is {replace},of type {type(replace)}')
return items |
class ArgKindsPlugin(Plugin):
def get_function_hook(self, fullname: str) -> (Callable[([FunctionContext], Type)] | None):
if ('func' in fullname):
return extract_arg_kinds_from_function
return None
def get_method_hook(self, fullname: str) -> (Callable[([MethodContext], Type)] | None):
if ('Class.method' in fullname):
return extract_arg_kinds_from_method
return None |
class UserDBHandler(BaseHandler):
.authenticated
async def get(self, userid):
adminflg = False
user = (await self.db.user.get(userid, fields=('role',)))
if (user and (user['role'] == 'admin')):
adminflg = True
(await self.render('DB_manage.html', userid=userid, adminflg=adminflg))
return
.authenticated
async def post(self, userid):
try:
async with self.db.transaction() as sql_session:
user = (await self.db.user.get(userid, fields=('role', 'email'), sql_session=sql_session))
envs = {}
for (k, _) in self.request.body_arguments.items():
envs[k] = self.get_body_argument(k)
mail = envs['adminmail']
pwd = envs['adminpwd']
now = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
if ('backupbtn' in envs):
if ((await self.db.user.challenge_MD5(mail, pwd, sql_session=sql_session)) and (user['email'] == mail)):
if (user and (user['role'] == 'admin')):
if (config.db_type != 'sqlite3'):
raise Exception(u',MySQL!(; `)')
filename = config.sqlite3.path
savename = 'database_{now}.db'.format(now=now)
if (not aio_import):
raise Exception(u'!')
conn_src = sqlite3.connect(filename, check_same_thread=False)
conn_target = sqlite3.connect(savename, check_same_thread=False)
def progress(status, remaining, total):
logger_Web_Handler.info(f'Sqlite_Backup: Copied {(total - remaining)} of {total} pages...')
conn_src.backup(conn_target, progress=progress)
conn_target.commit()
conn_src.close()
conn_target.close()
try:
self.set_header('Content-Type', 'application/octet-stream; charset=UTF-8')
self.set_header('Content-Disposition', ('attachment; filename=' + savename).encode('utf-8'))
content_length = os.stat(savename).st_size
self.set_header('Content-Length', content_length)
async with aiofiles.open(savename, 'rb') as f:
self.set_header('Content-Type', 'application/octet-stream')
self.set_header('Content-Disposition', ('attachment; filename=' + savename).encode('utf-8'))
chunk_size = ((1024 * 1024) * 1)
while True:
chunk = (await f.read(chunk_size))
if (not chunk):
break
try:
self.write(chunk)
(await self.flush())
except iostream.StreamClosedError:
raise Exception('Stream closed')
finally:
del chunk
(await self.finish())
finally:
(await gen.sleep(3))
os.remove(savename)
else:
raise Exception(u'')
else:
raise Exception(u'/')
if ((await self.db.user.challenge_MD5(mail, pwd, sql_session=sql_session)) and (user['email'] == mail)):
if ('backuptplsbtn' in envs):
tpls = []
for tpl in (await self.db.tpl.list(userid=userid, fields=('id', 'siteurl', 'sitename', 'banner', 'note', 'fork', '_groups', 'har', 'tpl', 'variables', 'init_env'), limit=None, sql_session=sql_session)):
tpl['tpl'] = (await self.db.user.decrypt(userid, tpl['tpl'], sql_session=sql_session))
tpl['har'] = (await self.db.user.decrypt(userid, tpl['har'], sql_session=sql_session))
tpls.append(tpl)
tasks = []
for task in (await self.db.task.list(userid, fields=('id', 'tplid', 'retry_count', 'retry_interval', 'note', 'disabled', '_groups', 'init_env', 'env', 'ontimeflg', 'ontime', 'pushsw', 'newontime'), limit=None, sql_session=sql_session)):
task['init_env'] = (await self.db.user.decrypt(userid, task['init_env'], sql_session=sql_session))
task['env'] = ((await self.db.user.decrypt(userid, task['env'], sql_session=sql_session)) if task['env'] else None)
tasks.append(task)
backupdata = {}
backupdata['tpls'] = tpls
backupdata['tasks'] = tasks
savename = '{mail}_{now}.json'.format(mail=user['email'], now=now)
if (not aio_import):
raise Exception(u'!')
async with aiofiles.open(savename, 'w', encoding='utf-8') as fp:
(await fp.write(json.dumps(backupdata, ensure_ascii=False, indent=4)))
fp.close()
self.set_header('Content-Type', 'application/octet-stream; charset=UTF-8')
self.set_header('Content-Disposition', ('attachment; filename=' + savename).encode('utf-8'))
async with aiofiles.open(savename, 'rb') as f:
chunk_size = ((1024 * 1024) * 1)
while True:
data = (await f.read(chunk_size))
if (not data):
break
self.write(data)
(await self.flush())
os.remove(savename)
(await self.finish())
return
if ('recoverytplsbtn' in envs):
if ('recfile' in self.request.files):
envs['recfile'] = self.request.files['recfile'][0]['body']
if (envs['recfile'][:6] == b'SQLite'):
db_dir = os.path.dirname(config.sqlite3.path)
db_restore = os.path.join(db_dir, 'database_restore.db')
with open(db_restore, 'wb') as f:
f.write(envs['recfile'])
db_backup = os.path.join(db_dir, 'database_backup.db')
db_now = os.path.join(db_dir, 'database.db')
conn_src = sqlite3.connect(db_now, check_same_thread=False)
conn_target = sqlite3.connect(db_backup, check_same_thread=False)
def progress(status, remaining, total):
logger_Web_Handler.info(f'Sqlite_Backup: Copied {(total - remaining)} of {total} pages...')
conn_src.backup(conn_target, progress=progress)
conn_target.commit()
conn_src.close()
conn_target.close()
conn_src = sqlite3.connect(db_restore, check_same_thread=False)
conn_target = sqlite3.connect(db_now, check_same_thread=False)
def progress(status, remaining, total):
logger_Web_Handler.info(f'Sqlite_Restore: Copied {(total - remaining)} of {total} pages...')
conn_src.backup(conn_target, progress=progress)
conn_target.commit()
conn_src.close()
conn_target.close()
(await self.render('utils_run_result.html', log=u', QD!!!\r\nPS: database.db database_backup.db !!!\r\n, database_backup.db !!!', title=u'', flg='success'))
return
else:
try:
tpls = json.loads(envs['recfile'])['tpls']
tasks = json.loads(envs['recfile'])['tasks']
except:
raise Exception(u',!( ) \\r\\n "" (**)')
ids = []
for newtpl in tpls:
userid2 = int(userid)
har = (await self.db.user.encrypt(userid2, newtpl['har'], sql_session=sql_session))
tpl = (await self.db.user.encrypt(userid2, newtpl['tpl'], sql_session=sql_session))
variables = newtpl['variables']
init_env = newtpl.get('init_env', '{}')
newid = (await self.db.tpl.add(userid2, har, tpl, variables, init_env=init_env, sql_session=sql_session))
(await self.db.tpl.mod(newid, fork=newtpl['fork'], siteurl=newtpl['siteurl'], sitename=newtpl['sitename'], note=newtpl['note'], _groups=u'', banner=newtpl['banner'], sql_session=sql_session))
for task in tasks:
if (task['tplid'] == newtpl['id']):
task['tplid'] = newid
for newtask in tasks:
userid2 = int(userid)
newtask['init_env'] = (await self.db.user.encrypt(userid2, newtask['init_env'], sql_session=sql_session))
newtask['env'] = (await self.db.user.encrypt(userid2, newtask['env'], sql_session=sql_session))
newtask['retry_count'] = newtask.get('retry_count', config.task_max_retry_count)
newtask['retry_interval'] = newtask.get('retry_interval')
taskid = (await self.db.task.add(newtask['tplid'], userid, newtask['env'], sql_session=sql_session))
(await self.db.task.mod(taskid, disabled=newtask['disabled'], init_env=newtask['init_env'], session=None, retry_count=newtask['retry_count'], retry_interval=newtask['retry_interval'], note=newtask['note'], _groups=u'', ontimeflg=newtask['ontimeflg'], ontime=newtask['ontime'], pushsw=newtask['pushsw'], newontime=newtask['newontime'], sql_session=sql_session))
(await self.render('utils_run_result.html', log=u'', title=u'', flg='success'))
return
else:
raise Exception(u'')
else:
raise Exception(u'/')
except Exception as e:
if config.traceback_print:
traceback.print_exc()
if (str(e).find('get user need id or email') > (- 1)):
e = u'/'
self.set_status(400)
self.set_header('Error-Message', base64.b64encode(str(e).encode('utf-8')))
(await self.render('utils_run_result.html', log=str(e), title=u'', flg='danger'))
logger_Web_Handler.error('UserID: %s backup or restore Database failed! Reason: %s', (userid or '-1'), str(e))
return
return |
class DetectionNetworkBase(object):
def __init__(self, cfgs, is_training):
self.cfgs = cfgs
self.base_network_name = cfgs.NET_NAME
self.is_training = is_training
self.batch_size = (cfgs.BATCH_SIZE if is_training else 1)
if (cfgs.METHOD == 'H'):
self.num_anchors_per_location = (len(cfgs.ANCHOR_SCALES) * len(cfgs.ANCHOR_RATIOS))
else:
self.num_anchors_per_location = ((len(cfgs.ANCHOR_SCALES) * len(cfgs.ANCHOR_RATIOS)) * len(cfgs.ANCHOR_ANGLES))
self.method = cfgs.METHOD
self.losses_dict = {}
self.drawer = DrawBoxTensor(cfgs)
self.backbone = BuildBackbone(cfgs, is_training)
self.pretrain_zoo = PretrainModelZoo()
def build_backbone(self, input_img_batch):
return self.backbone.build_backbone(input_img_batch)
def rpn_cls_net(self, inputs, scope_list, reuse_flag, level):
rpn_conv2d_3x3 = inputs
for i in range(self.cfgs.NUM_SUBNET_CONV):
rpn_conv2d_3x3 = slim.conv2d(inputs=rpn_conv2d_3x3, num_outputs=self.cfgs.FPN_CHANNEL, kernel_size=[3, 3], stride=1, activation_fn=(None if self.cfgs.USE_GN else tf.nn.relu), weights_initializer=self.cfgs.SUBNETS_WEIGHTS_INITIALIZER, biases_initializer=self.cfgs.SUBNETS_BIAS_INITIALIZER, scope='{}_{}'.format(scope_list[0], i), trainable=self.is_training, reuse=reuse_flag)
if self.cfgs.USE_GN:
rpn_conv2d_3x3 = tf.contrib.layers.group_norm(rpn_conv2d_3x3)
rpn_conv2d_3x3 = tf.nn.relu(rpn_conv2d_3x3)
rpn_box_scores = slim.conv2d(rpn_conv2d_3x3, num_outputs=(self.cfgs.CLASS_NUM * self.num_anchors_per_location), kernel_size=[3, 3], stride=1, weights_initializer=self.cfgs.SUBNETS_WEIGHTS_INITIALIZER, biases_initializer=self.cfgs.FINAL_CONV_BIAS_INITIALIZER, scope=scope_list[2], activation_fn=None, trainable=self.is_training, reuse=reuse_flag)
rpn_box_scores = tf.reshape(rpn_box_scores, [self.batch_size, (- 1), self.cfgs.CLASS_NUM], name='rpn_{}_classification_reshape'.format(level))
rpn_box_probs = tf.sigmoid(rpn_box_scores, name='rpn_{}_classification_sigmoid'.format(level))
return (rpn_box_scores, rpn_box_probs)
def rpn_reg_net(self, inputs, scope_list, reuse_flag, level):
rpn_conv2d_3x3 = inputs
for i in range(self.cfgs.NUM_SUBNET_CONV):
rpn_conv2d_3x3 = slim.conv2d(inputs=rpn_conv2d_3x3, num_outputs=self.cfgs.FPN_CHANNEL, kernel_size=[3, 3], weights_initializer=self.cfgs.SUBNETS_WEIGHTS_INITIALIZER, biases_initializer=self.cfgs.SUBNETS_BIAS_INITIALIZER, stride=1, activation_fn=(None if self.cfgs.USE_GN else tf.nn.relu), scope='{}_{}'.format(scope_list[1], i), trainable=self.is_training, reuse=reuse_flag)
if self.cfgs.USE_GN:
rpn_conv2d_3x3 = tf.contrib.layers.group_norm(rpn_conv2d_3x3)
rpn_conv2d_3x3 = tf.nn.relu(rpn_conv2d_3x3)
rpn_delta_boxes = slim.conv2d(rpn_conv2d_3x3, num_outputs=(5 * self.num_anchors_per_location), kernel_size=[3, 3], stride=1, weights_initializer=self.cfgs.SUBNETS_WEIGHTS_INITIALIZER, biases_initializer=self.cfgs.SUBNETS_BIAS_INITIALIZER, scope=scope_list[3], activation_fn=None, trainable=self.is_training, reuse=reuse_flag)
rpn_delta_boxes = tf.reshape(rpn_delta_boxes, [self.batch_size, (- 1), 5], name='rpn_{}_regression_reshape'.format(level))
return rpn_delta_boxes
def rpn_net(self, feature_pyramid, name):
rpn_delta_boxes_list = []
rpn_scores_list = []
rpn_probs_list = []
with tf.variable_scope(name):
with slim.arg_scope([slim.conv2d], weights_regularizer=slim.l2_regularizer(self.cfgs.WEIGHT_DECAY)):
for level in self.cfgs.LEVEL:
if self.cfgs.SHARE_NET:
reuse_flag = (None if (level == self.cfgs.LEVEL[0]) else True)
scope_list = ['conv2d_3x3_cls', 'conv2d_3x3_reg', 'rpn_classification', 'rpn_regression']
else:
reuse_flag = None
scope_list = [('conv2d_3x3_cls_' + level), ('conv2d_3x3_reg_' + level), ('rpn_classification_' + level), ('rpn_regression_' + level)]
(rpn_box_scores, rpn_box_probs) = self.rpn_cls_net(feature_pyramid[level], scope_list, reuse_flag, level)
rpn_delta_boxes = self.rpn_reg_net(feature_pyramid[level], scope_list, reuse_flag, level)
rpn_scores_list.append(rpn_box_scores)
rpn_probs_list.append(rpn_box_probs)
rpn_delta_boxes_list.append(rpn_delta_boxes)
return (rpn_delta_boxes_list, rpn_scores_list, rpn_probs_list)
def make_anchors(self, feature_pyramid, use_tf=True):
with tf.variable_scope('make_anchors'):
anchor = GenerateAnchors(self.cfgs, self.method)
if (use_tf and (self.method == 'H')):
anchor_list = anchor.generate_all_anchor_tf(feature_pyramid)
else:
anchor_list = anchor.generate_all_anchor(feature_pyramid)
return anchor_list
def add_anchor_img_smry(self, img, anchors, labels, method):
positive_anchor_indices = tf.reshape(tf.where(tf.greater_equal(labels, 1)), [(- 1)])
positive_anchor = tf.gather(anchors, positive_anchor_indices)
pos_in_img = self.drawer.only_draw_boxes(img_batch=img, boxes=positive_anchor, method=method)
tf.summary.image('positive_anchor', pos_in_img)
def get_restorer(self):
checkpoint_path = tf.train.latest_checkpoint(os.path.join(self.cfgs.TRAINED_CKPT, self.cfgs.VERSION))
if (checkpoint_path is not None):
if self.cfgs.RESTORE_FROM_RPN:
print('___restore from rpn___')
model_variables = slim.get_model_variables()
restore_variables = ([var for var in model_variables if (not var.name.startswith('FastRCNN_Head'))] + [slim.get_or_create_global_step()])
for var in restore_variables:
print(var.name)
restorer = tf.train.Saver(restore_variables)
else:
restorer = tf.train.Saver()
print('model restore from :', checkpoint_path)
else:
if (self.cfgs.NET_NAME in self.pretrain_zoo.pth_zoo):
return (None, None)
checkpoint_path = self.cfgs.PRETRAINED_CKPT
print('model restore from pretrained mode, path is :', checkpoint_path)
model_variables = slim.get_model_variables()
def name_in_ckpt_rpn(var):
return var.op.name
def name_in_ckpt_fastrcnn_head(var):
return '/'.join(var.op.name.split('/')[1:])
nameInCkpt_Var_dict = {}
for var in model_variables:
if var.name.startswith(('Fast-RCNN/' + self.base_network_name)):
var_name_in_ckpt = name_in_ckpt_fastrcnn_head(var)
nameInCkpt_Var_dict[var_name_in_ckpt] = var
elif var.name.startswith(self.base_network_name):
var_name_in_ckpt = name_in_ckpt_rpn(var)
nameInCkpt_Var_dict[var_name_in_ckpt] = var
else:
continue
restore_variables = nameInCkpt_Var_dict
for (key, item) in restore_variables.items():
print('var_in_graph: ', item.name)
print('var_in_ckpt: ', key)
print((20 * '___'))
restorer = tf.train.Saver(restore_variables)
print((20 * '****'))
print('restore from pretrained_weighs in IMAGE_NET')
return (restorer, checkpoint_path) |
class TestSpiderDev170(unittest.TestCase):
(ONE_TEST_TIMEOUT)
def test_spider_dev(self):
split_name = 'dev'
i_query = 170
db_id = get_db_id(split_name, i_query)
(rdf_graph, schema) = get_graph_and_schema(split_name, db_id)
sql_query = get_sql_query(split_name, i_query)
correct_sparql_query = textwrap.dedent(' SELECT ?country_name\n WHERE\n {\n ?countries arc:countries:CountryName ?country_name.\n MINUS{\n ?car_makers arc:car_makers:Country ?car_maker_country.\n ?car_maker_country arc:car_makers:Country:countries:CountryId ?countries.\n }\n }')
qdmr = get_qdmr_from_break(split_name, i_query)
grounding = {}
grounding[GroundingIndex(0, 0, 'cars')] = GroundingKey.make_table_grounding('cars_data')
grounding[GroundingIndex(1, 1, 'that had 8 cylinders')] = GroundingKey.make_comparative_grounding('=', '8', GroundingKey.make_column_grounding('cars_data', 'Cylinders'))
grounding[GroundingIndex(2, 1, 'that were produced before 1980')] = GroundingKey.make_comparative_grounding('<', '1980', GroundingKey.make_column_grounding('cars_data', 'Year'))
grounding[GroundingIndex(4, 0, 'mpg of #REF')] = GroundingKey.make_column_grounding('cars_data', 'MPG')
sparql_query = create_sparql_query_from_qdmr(qdmr, schema, rdf_graph, grounding)
result_correct = QueryResult.execute_query_sql(sql_query, schema)
result = QueryResult.execute_query_to_rdf(sparql_query, rdf_graph, schema, virtuoso_server=VIRTUOSO_SPARQL_SERVICE)
(equal, message) = result.is_equal_to(result_correct, require_column_order=True, require_row_order=False, return_message=True)
self.assertTrue(equal, message) |
class ChocolateBoiler():
__empty: bool
__boiled: bool
__uniqueInstance = None
def __init__(self):
self.__empty = True
self.__boiled = False
def getInstance():
if (ChocolateBoiler.__uniqueInstance == None):
print('Creating unique instance of Chocolate Boiler')
ChocolateBoiler.__uniqueInstance = ChocolateBoiler()
print('Returning instance of Chocolate Boiler')
return ChocolateBoiler.__uniqueInstance
def fill(self) -> None:
if self.isEmpty():
self.__empty = False
self.__boiled = False
def drain(self) -> None:
if ((not self.isEmpty()) and self.isBoiled()):
self.__empty = True
def boil(self) -> None:
if ((not self.isEmpty()) and (not self.isBoiled())):
self.__boiled = True
def isEmpty(self) -> bool:
return self.__empty
def isBoiled(self) -> bool:
return self.__boiled |
def skip_until(src: str, pos: Pos, expect: str, *, error_on: FrozenSet[str], error_on_eof: bool) -> Pos:
try:
new_pos = src.index(expect, pos)
except ValueError:
new_pos = len(src)
if error_on_eof:
raise suffixed_err(src, new_pos, f'Expected {expect!r}') from None
if (not error_on.isdisjoint(src[pos:new_pos])):
while (src[pos] not in error_on):
pos += 1
raise suffixed_err(src, pos, f'Found invalid character {src[pos]!r}')
return new_pos |
class FilericeCom(XFSDownloader):
__name__ = 'FilericeCom'
__type__ = 'downloader'
__version__ = '0.01'
__status__ = 'testing'
__pattern__ = '
__config__ = [('enabled', 'bool', 'Activated', True), ('use_premium', 'bool', 'Use premium account if available', True), ('fallback', 'bool', 'Fallback to free download if premium fails', True), ('chk_filesize', 'bool', 'Check file size', True), ('max_wait', 'int', 'Reconnect if waiting time is greater than minutes', 10)]
__description__ = 'Filerice.com downloader plugin'
__license__ = 'GPLv3'
__authors__ = [('GammaC0de', 'nitzo2001[AT]yahoo[DOT]com')]
NAME_PATTERN = '<div class="name">[^>]*>(?P<N>.+?)<'
SIZE_PATTERN = '<span>Size (?P<S>[\\d.,]+) (?P<U>[\\w^_]+)<'
WAIT_PATTERN = '<span class="seconds">(\\d+)</span>'
DL_LIMIT_PATTERN = '>You have to wait (.+?) till next download<'
PLUGIN_DOMAIN = 'filerice.com' |
class XFixes():
selection_mask = ((SelectionEventMask.SetSelectionOwner | SelectionEventMask.SelectionClientClose) | SelectionEventMask.SelectionWindowDestroy)
def __init__(self, conn):
self.conn = conn
self.ext = conn.conn(xcffib.xfixes.key)
self.ext.QueryVersion(xcffib.xfixes.MAJOR_VERSION, xcffib.xfixes.MINOR_VERSION)
def select_selection_input(self, window, selection='PRIMARY'):
_selection = self.conn.atoms[selection]
self.conn.xfixes.ext.SelectSelectionInput(window.wid, _selection, self.selection_mask) |
def get_reference_facial_points(output_size=None, inner_padding_factor=0.0, outer_padding=(0, 0), default_square=False):
tmp_5pts = np.array(REFERENCE_FACIAL_POINTS)
tmp_crop_size = np.array(DEFAULT_CROP_SIZE)
if default_square:
size_diff = (max(tmp_crop_size) - tmp_crop_size)
tmp_5pts += (size_diff / 2)
tmp_crop_size += size_diff
if (output_size and (output_size[0] == tmp_crop_size[0]) and (output_size[1] == tmp_crop_size[1])):
return tmp_5pts
if ((inner_padding_factor == 0) and (outer_padding == (0, 0))):
if (output_size is None):
return tmp_5pts
else:
raise FaceWarpException('No paddings to do, output_size must be None or {}'.format(tmp_crop_size))
if (not (0 <= inner_padding_factor <= 1.0)):
raise FaceWarpException('Not (0 <= inner_padding_factor <= 1.0)')
if (((inner_padding_factor > 0) or (outer_padding[0] > 0) or (outer_padding[1] > 0)) and (output_size is None)):
output_size = (tmp_crop_size * (1 + (inner_padding_factor * 2)).astype(np.int32))
output_size += np.array(outer_padding)
if (not ((outer_padding[0] < output_size[0]) and (outer_padding[1] < output_size[1]))):
raise FaceWarpException('Not (outer_padding[0] < output_size[0]and outer_padding[1] < output_size[1])')
if (inner_padding_factor > 0):
size_diff = ((tmp_crop_size * inner_padding_factor) * 2)
tmp_5pts += (size_diff / 2)
tmp_crop_size += np.round(size_diff).astype(np.int32)
size_bf_outer_pad = (np.array(output_size) - (np.array(outer_padding) * 2))
if ((size_bf_outer_pad[0] * tmp_crop_size[1]) != (size_bf_outer_pad[1] * tmp_crop_size[0])):
raise FaceWarpException('Must have (output_size - outer_padding)= some_scale * (crop_size * (1.0 + inner_padding_factor)')
scale_factor = (size_bf_outer_pad[0].astype(np.float32) / tmp_crop_size[0])
tmp_5pts = (tmp_5pts * scale_factor)
tmp_crop_size = size_bf_outer_pad
reference_5point = (tmp_5pts + np.array(outer_padding))
tmp_crop_size = output_size
return reference_5point |
class CartPoleEnv(gym.Env):
metadata = {'render.modes': ['human', 'rgb_array'], 'video.frames_per_second': 50}
def __init__(self):
self.gravity = 9.8
self.masscart = 1.0
self.masspole = 0.1
self.total_mass = (self.masspole + self.masscart)
self.length = 0.5
self.polemass_length = (self.masspole * self.length)
self.force_mag = 10.0
self.tau = 0.02
self.theta_threshold_radians = (((12 * 2) * math.pi) / 360)
self.x_threshold = 2.4
high = np.array([(self.x_threshold * 2), np.finfo(np.float32).max, (self.theta_threshold_radians * 2), np.finfo(np.float32).max])
self.action_space = spaces.Discrete(2)
self.observation_space = spaces.Box((- high), high)
self._seed()
self.reset()
self.viewer = None
self.steps_beyond_done = None
self._configure()
def _configure(self, display=None):
self.display = display
def _seed(self, seed=None):
(self.np_random, seed) = seeding.np_random(seed)
return [seed]
def _step(self, action):
assert self.action_space.contains(action), ('%r (%s) invalid' % (action, type(action)))
state = self.state
(x, x_dot, theta, theta_dot) = state
force = (self.force_mag if (action == 1) else (- self.force_mag))
costheta = math.cos(theta)
sintheta = math.sin(theta)
temp = ((force + (((self.polemass_length * theta_dot) * theta_dot) * sintheta)) / self.total_mass)
thetaacc = (((self.gravity * sintheta) - (costheta * temp)) / (self.length * ((4.0 / 3.0) - (((self.masspole * costheta) * costheta) / self.total_mass))))
xacc = (temp - (((self.polemass_length * thetaacc) * costheta) / self.total_mass))
x = (x + (self.tau * x_dot))
x_dot = (x_dot + (self.tau * xacc))
theta = (theta + (self.tau * theta_dot))
theta_dot = (theta_dot + (self.tau * thetaacc))
self.state = (x, x_dot, theta, theta_dot)
done = ((x < (- self.x_threshold)) or (x > self.x_threshold) or (theta < (- self.theta_threshold_radians)) or (theta > self.theta_threshold_radians))
done = bool(done)
if (not done):
reward = 1.0
elif (self.steps_beyond_done is None):
self.steps_beyond_done = 0
reward = 1.0
else:
if (self.steps_beyond_done == 0):
logger.warn("You are calling 'step()' even though this environment has already returned done = True. You should always call 'reset()' once you receive 'done = True' -- any further steps are undefined behavior.")
self.steps_beyond_done += 1
reward = 0.0
return (np.array(self.state), reward, done, {})
def _reset(self):
self.state = self.np_random.uniform(low=(- 0.05), high=0.05, size=(4,))
self.steps_beyond_done = None
return np.array(self.state)
def _render(self, mode='human', close=False):
if close:
if (self.viewer is not None):
self.viewer.close()
self.viewer = None
return
screen_width = 600
screen_height = 400
world_width = (self.x_threshold * 2)
scale = (screen_width / world_width)
carty = 100
polewidth = 10.0
polelen = (scale * 1.0)
cartwidth = 50.0
cartheight = 30.0
if (self.viewer is None):
from gym.envs.classic_control import rendering
self.viewer = rendering.Viewer(screen_width, screen_height, display=self.display)
(l, r, t, b) = (((- cartwidth) / 2), (cartwidth / 2), (cartheight / 2), ((- cartheight) / 2))
axleoffset = (cartheight / 4.0)
cart = rendering.FilledPolygon([(l, b), (l, t), (r, t), (r, b)])
self.carttrans = rendering.Transform()
cart.add_attr(self.carttrans)
self.viewer.add_geom(cart)
(l, r, t, b) = (((- polewidth) / 2), (polewidth / 2), (polelen - (polewidth / 2)), ((- polewidth) / 2))
pole = rendering.FilledPolygon([(l, b), (l, t), (r, t), (r, b)])
pole.set_color(0.8, 0.6, 0.4)
self.poletrans = rendering.Transform(translation=(0, axleoffset))
pole.add_attr(self.poletrans)
pole.add_attr(self.carttrans)
self.viewer.add_geom(pole)
self.axle = rendering.make_circle((polewidth / 2))
self.axle.add_attr(self.poletrans)
self.axle.add_attr(self.carttrans)
self.axle.set_color(0.5, 0.5, 0.8)
self.viewer.add_geom(self.axle)
self.track = rendering.Line((0, carty), (screen_width, carty))
self.track.set_color(0, 0, 0)
self.viewer.add_geom(self.track)
x = self.state
cartx = ((x[0] * scale) + (screen_width / 2.0))
self.carttrans.set_translation(cartx, carty)
self.poletrans.set_rotation((- x[2]))
return self.viewer.render(return_rgb_array=(mode == 'rgb_array')) |
class ScenePlot(Plot2D):
def __init__(self, scene, **kwargs):
Plot2D.__init__(self, scene, **kwargs)
self.components_available = {'displacement': {'name': 'LOS Displacement', 'eval': (lambda sc: sc.displacement)}, 'theta': {'name': 'LOS Theta', 'eval': (lambda sc: sc.theta)}, 'phi': {'name': 'LOS Phi', 'eval': (lambda sc: sc.phi)}, 'dE': {'name': 'Displacement dE', 'eval': (lambda sc: sc.cartesian.dE)}, 'dN': {'name': 'Displacement dN', 'eval': (lambda sc: sc.cartesian.dN)}, 'dU': {'name': 'Displacement dU', 'eval': (lambda sc: sc.cartesian.dU)}}
self._component = 'displacement'
def plot(self, component='displacement', **kwargs):
self._initImagePlot(**kwargs)
self.component = component
self.title = self.components_available[component]
if self._show_plt:
plt.show()
def component(self):
return self._component
def component(self, component):
try:
if (component not in self.components_available.keys()):
raise AttributeError(('Invalid component %s' % component))
self.data = self.components_available[component]['eval'](self._scene)
except AttributeError:
raise AttributeError(('Could not access component %s' % component)) |
def get_sents_qa_num(file_path, mode):
sents = []
with open(file_path, 'r') as f:
for line in f.readlines():
line = line.strip()
tmp_sum = []
line = line.split('')[:(- 1)]
if (mode == 'final'):
for i in range(int((len(line) / 2))):
tmp_sum.append((((line[(2 * i)] + '') + line[((2 * i) + 1)]) + ''))
if ((len(line) % 2) == 1):
tmp_sum.append((line[(- 1)] + ''))
sents.append(tmp_sum)
else:
sents.append([(s + '') for s in line])
return sents |
class PFMarketPref(PreferenceView):
def __init__(self):
self.priceSettings = MarketPriceSettings.getInstance()
def populatePanel(self, panel):
self.title = _t('Market & Prices')
self.mainFrame = gui.mainFrame.MainFrame.getInstance()
self.sFit = Fit.getInstance()
helpCursor = wx.Cursor(wx.CURSOR_QUESTION_ARROW)
mainSizer = wx.BoxSizer(wx.VERTICAL)
self.stTitle = wx.StaticText(panel, wx.ID_ANY, _t('Market && Prices'), wx.DefaultPosition, wx.DefaultSize, 0)
self.stTitle.Wrap((- 1))
self.stTitle.SetFont(wx.Font(12, 70, 90, 90, False, wx.EmptyString))
mainSizer.Add(self.stTitle, 0, (wx.EXPAND | wx.ALL), 5)
self.m_staticline1 = wx.StaticLine(panel, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.LI_HORIZONTAL)
mainSizer.Add(self.m_staticline1, 0, ((wx.EXPAND | wx.TOP) | wx.BOTTOM), 5)
delayTimer = wx.BoxSizer(wx.HORIZONTAL)
self.stMarketDelay = wx.StaticText(panel, wx.ID_ANY, _t('Market Search Delay (ms):'), wx.DefaultPosition, wx.DefaultSize, 0)
self.stMarketDelay.Wrap((- 1))
if ('wxGTK' not in wx.PlatformInfo):
self.stMarketDelay.SetCursor(helpCursor)
self.stMarketDelay.SetToolTip(wx.ToolTip(_t('The delay between a keystroke and the market search. Can help reduce lag when typing fast in the market search box.')))
delayTimer.Add(self.stMarketDelay, 0, (wx.ALL | wx.ALIGN_CENTER_VERTICAL), 5)
self.intDelay = IntCtrl(panel, max=1000, limited=True)
delayTimer.Add(self.intDelay, 0, wx.ALL, 5)
mainSizer.Add(delayTimer, 0, ((wx.EXPAND | wx.TOP) | wx.RIGHT), 10)
self.intDelay.SetValue(self.sFit.serviceFittingOptions['marketSearchDelay'])
self.intDelay.Bind(wx.lib.intctrl.EVT_INT, self.onMarketDelayChange)
self.cbMarketShortcuts = wx.CheckBox(panel, wx.ID_ANY, _t('Show market shortcuts'), wx.DefaultPosition, wx.DefaultSize, 0)
mainSizer.Add(self.cbMarketShortcuts, 0, ((wx.EXPAND | wx.TOP) | wx.RIGHT), 10)
self.cbMarketShortcuts.SetValue((self.sFit.serviceFittingOptions['showMarketShortcuts'] or False))
self.cbMarketShortcuts.Bind(wx.EVT_CHECKBOX, self.onCBShowShortcuts)
priceSizer = wx.BoxSizer(wx.HORIZONTAL)
self.stDefaultSystem = wx.StaticText(panel, wx.ID_ANY, _t('Default Market Prices:'), wx.DefaultPosition, wx.DefaultSize, 0)
self.stDefaultSystem.Wrap((- 1))
priceSizer.Add(self.stDefaultSystem, 0, (wx.ALL | wx.ALIGN_CENTER_VERTICAL), 5)
if ('wxGTK' not in wx.PlatformInfo):
self.stDefaultSystem.SetCursor(helpCursor)
self.stDefaultSystem.SetToolTip(wx.ToolTip(_t('The source you choose will be tried first, but subsequent sources will be used if the preferred source fails. The system you choose will also be tried first, and if no data is available, global price will be used.')))
self.chPriceSource = wx.Choice(panel, choices=sorted(Price.sources.keys()))
self.chPriceSystem = wx.Choice(panel, choices=list(Price.systemsList.keys()))
priceSizer.Add(self.chPriceSource, 1, (wx.ALL | wx.EXPAND), 5)
priceSizer.Add(self.chPriceSystem, 1, (wx.ALL | wx.EXPAND), 5)
mainSizer.Add(priceSizer, 0, ((wx.EXPAND | wx.TOP) | wx.RIGHT), 10)
self.chPriceSource.SetStringSelection(self.sFit.serviceFittingOptions['priceSource'])
self.chPriceSource.Bind(wx.EVT_CHOICE, self.onPricesSourceSelection)
self.chPriceSystem.SetStringSelection(self.sFit.serviceFittingOptions['priceSystem'])
self.chPriceSystem.Bind(wx.EVT_CHOICE, self.onPriceSelection)
self.tbTotalPriceBox = wx.StaticBoxSizer(wx.VERTICAL, panel, _t('Total Price Includes'))
self.tbTotalPriceDrones = wx.CheckBox(panel, (- 1), _t('Drones'), wx.DefaultPosition, wx.DefaultSize, 1)
self.tbTotalPriceDrones.SetValue(self.priceSettings.get('drones'))
self.tbTotalPriceDrones.Bind(wx.EVT_CHECKBOX, self.OnTotalPriceDroneChange)
self.tbTotalPriceBox.Add(self.tbTotalPriceDrones, 0, ((wx.LEFT | wx.RIGHT) | wx.TOP), 5)
self.tbTotalPriceCargo = wx.CheckBox(panel, (- 1), _t('Cargo'), wx.DefaultPosition, wx.DefaultSize, 1)
self.tbTotalPriceCargo.SetValue(self.priceSettings.get('cargo'))
self.tbTotalPriceCargo.Bind(wx.EVT_CHECKBOX, self.OnTotalPriceCargoChange)
self.tbTotalPriceBox.Add(self.tbTotalPriceCargo, 0, (wx.LEFT | wx.RIGHT), 5)
self.tbTotalPriceCharacter = wx.CheckBox(panel, (- 1), _t('Implants && Boosters'), wx.DefaultPosition, wx.DefaultSize, 1)
self.tbTotalPriceCharacter.SetValue(self.priceSettings.get('character'))
self.tbTotalPriceCharacter.Bind(wx.EVT_CHECKBOX, self.OnTotalPriceCharacterChange)
self.tbTotalPriceBox.Add(self.tbTotalPriceCharacter, 0, ((wx.LEFT | wx.RIGHT) | wx.BOTTOM), 5)
mainSizer.Add(self.tbTotalPriceBox, 0, ((wx.EXPAND | wx.TOP) | wx.RIGHT), 10)
self.rbMarketSearch = wx.RadioBox(panel, (- 1), _t('Market Search && Recent Items'), wx.DefaultPosition, wx.DefaultSize, [_t('No changes to meta buttons'), _t('Enable all meta buttons for a duration of search / recents'), _t('Enable all meta buttons')], 1, wx.RA_SPECIFY_COLS)
self.rbMarketSearch.SetSelection(self.priceSettings.get('marketMGSearchMode'))
mainSizer.Add(self.rbMarketSearch, 0, ((wx.RIGHT | wx.TOP) | wx.EXPAND), 10)
self.rbMarketSearch.Bind(wx.EVT_RADIOBOX, self.OnMarketSearchChange)
self.rbMarketEmpty = wx.RadioBox(panel, (- 1), _t('Market Group Selection'), wx.DefaultPosition, wx.DefaultSize, [_t('No changes to meta buttons'), _t('Enable all meta buttons')], 1, wx.RA_SPECIFY_COLS)
self.rbMarketEmpty.SetSelection(self.priceSettings.get('marketMGMarketSelectMode'))
mainSizer.Add(self.rbMarketEmpty, 0, ((wx.EXPAND | wx.TOP) | wx.RIGHT), 10)
self.rbMarketEmpty.Bind(wx.EVT_RADIOBOX, self.OnMarketGroupSelectionChange)
self.rbMarketEmpty = wx.RadioBox(panel, (- 1), _t('Empty Market View'), wx.DefaultPosition, wx.DefaultSize, [_t('No changes to meta buttons'), _t('Enable leftmost available meta button'), _t('Enable all available meta buttons')], 1, wx.RA_SPECIFY_COLS)
self.rbMarketEmpty.SetSelection(self.priceSettings.get('marketMGEmptyMode'))
mainSizer.Add(self.rbMarketEmpty, 0, ((wx.EXPAND | wx.TOP) | wx.RIGHT), 10)
self.rbMarketEmpty.Bind(wx.EVT_RADIOBOX, self.OnMarketEmptyChange)
self.rbMarketJump = wx.RadioBox(panel, (- 1), _t('Item Market Group Jump'), wx.DefaultPosition, wx.DefaultSize, [_t('No changes to meta buttons'), _t("Enable item's meta button"), _t("Enable item's meta button, disable others"), _t('Enable all meta buttons')], 1, wx.RA_SPECIFY_COLS)
self.rbMarketJump.SetSelection(self.priceSettings.get('marketMGJumpMode'))
mainSizer.Add(self.rbMarketJump, 0, (((wx.EXPAND | wx.TOP) | wx.RIGHT) | wx.BOTTOM), 10)
self.rbMarketJump.Bind(wx.EVT_RADIOBOX, self.OnMarketJumpChange)
panel.SetSizer(mainSizer)
panel.Layout()
def onMarketDelayChange(self, event):
self.sFit.serviceFittingOptions['marketSearchDelay'] = self.intDelay.GetValue()
event.Skip()
def onCBShowShortcuts(self, event):
self.sFit.serviceFittingOptions['showMarketShortcuts'] = self.cbMarketShortcuts.GetValue()
def getImage(self):
return BitmapLoader.getBitmap('settings_market', 'gui')
def onPriceSelection(self, event):
system = self.chPriceSystem.GetString(self.chPriceSystem.GetSelection())
self.sFit.serviceFittingOptions['priceSystem'] = system
fitID = self.mainFrame.getActiveFit()
self.sFit.refreshFit(fitID)
wx.PostEvent(self.mainFrame, GE.FitChanged(fitIDs=(fitID,)))
event.Skip()
def onPricesSourceSelection(self, event):
source = self.chPriceSource.GetString(self.chPriceSource.GetSelection())
self.sFit.serviceFittingOptions['priceSource'] = source
def OnTotalPriceDroneChange(self, event):
self.priceSettings.set('drones', event.GetInt())
fitID = self.mainFrame.getActiveFit()
wx.PostEvent(self.mainFrame, GE.FitChanged(fitIDs=(fitID,)))
def OnTotalPriceCargoChange(self, event):
self.priceSettings.set('cargo', event.GetInt())
fitID = self.mainFrame.getActiveFit()
wx.PostEvent(self.mainFrame, GE.FitChanged(fitIDs=(fitID,)))
def OnTotalPriceCharacterChange(self, event):
self.priceSettings.set('character', event.GetInt())
fitID = self.mainFrame.getActiveFit()
wx.PostEvent(self.mainFrame, GE.FitChanged(fitIDs=(fitID,)))
def OnMarketJumpChange(self, event):
self.priceSettings.set('marketMGJumpMode', event.GetInt())
def OnMarketEmptyChange(self, event):
self.priceSettings.set('marketMGEmptyMode', event.GetInt())
def OnMarketSearchChange(self, event):
self.priceSettings.set('marketMGSearchMode', event.GetInt())
def OnMarketGroupSelectionChange(self, event):
self.priceSettings.set('marketMGMarketSelectMode', event.GetInt()) |
class InceptionB(nn.Module):
def __init__(self):
super(InceptionB, self).__init__()
self.branch0 = BasicConv2d(1024, 384, kernel_size=1, stride=1)
self.branch1 = nn.Sequential(BasicConv2d(1024, 192, kernel_size=1, stride=1), BasicConv2d(192, 224, kernel_size=(1, 7), stride=1, padding=(0, 3)), BasicConv2d(224, 256, kernel_size=(7, 1), stride=1, padding=(3, 0)))
self.branch2 = nn.Sequential(BasicConv2d(1024, 192, kernel_size=1, stride=1), BasicConv2d(192, 192, kernel_size=(7, 1), stride=1, padding=(3, 0)), BasicConv2d(192, 224, kernel_size=(1, 7), stride=1, padding=(0, 3)), BasicConv2d(224, 224, kernel_size=(7, 1), stride=1, padding=(3, 0)), BasicConv2d(224, 256, kernel_size=(1, 7), stride=1, padding=(0, 3)))
self.branch3 = nn.Sequential(nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False), BasicConv2d(1024, 128, kernel_size=1, stride=1))
def forward(self, x):
x0 = self.branch0(x)
x1 = self.branch1(x)
x2 = self.branch2(x)
x3 = self.branch3(x)
out = torch.cat((x0, x1, x2, x3), 1)
return out |
class OggOpus(OggFileType):
_Info = OggOpusInfo
_Tags = OggOpusVComment
_Error = OggOpusHeaderError
_mimes = ['audio/ogg', 'audio/ogg; codecs=opus']
info = None
tags = None
def score(filename, fileobj, header):
return (header.startswith(b'OggS') * (b'OpusHead' in header)) |
def make_dataframe(spark_context, spark_session):
data = [{'id': 1, 'ts': '2016-04-11 11:31:11', 'feature1': 200, 'feature2': 200, 'nonfeature': 0}, {'id': 1, 'ts': '2016-04-11 11:44:12', 'feature1': 300, 'feature2': 300, 'nonfeature': 0}, {'id': 1, 'ts': '2016-04-11 11:46:24', 'feature1': 300, 'feature2': 400, 'nonfeature': 0}, {'id': 1, 'ts': '2016-04-11 12:03:21', 'feature1': 400, 'feature2': 500, 'nonfeature': 0}, {'id': 1, 'ts': '2016-04-22 12:03:21', 'feature1': 1000, 'feature2': 1100, 'nonfeature': 0}]
df = spark_session.read.json(spark_context.parallelize(data, 1))
df = df.withColumn(TIMESTAMP_COLUMN, df.ts.cast(DataType.TIMESTAMP.spark))
return df |
class KnownValues(unittest.TestCase):
def test_n3_diffuse(self):
cell = make_test_cell.test_cell_n3_diffuse()
nmp = [1, 1, 2]
ehf2 = kmf.e_tot
self.assertAlmostEqual(ehf2, (- 6.), 6)
ecc2 = mycc.e_corr
self.assertAlmostEqual(ecc2, (- 0.), 6)
eom = EOMIP(mycc)
imds = eom.make_imds(eris=eris)
(e1_obt, v) = eom.ipccsd(nroots=3, left=True, kptlist=[0], imds=imds)
self.assertAlmostEqual(e1_obt[0][0], (- 1.), 6)
self.assertAlmostEqual(e1_obt[0][1], (- 1.), 6)
self.assertAlmostEqual(e1_obt[0][2], (- 1.), 6)
(e1_obt, v) = eom.ipccsd(nroots=3, kptlist=[0], imds=imds)
self.assertAlmostEqual(e1_obt[0][0], (- 1.), 6)
self.assertAlmostEqual(e1_obt[0][1], (- 1.), 6)
self.assertAlmostEqual(e1_obt[0][2], (- 1.), 6)
(e1_obt, v) = eom.ipccsd(nroots=3, koopmans=True, kptlist=[1], imds=imds)
self.assertAlmostEqual(e1_obt[0][0], (- 0.), 6)
self.assertAlmostEqual(e1_obt[0][1], (- 0.), 6)
self.assertAlmostEqual(e1_obt[0][2], (- 0.), 6)
eom = EOMEA(mycc)
imds = eom.make_imds(eris=eris)
(e2_obt, v) = eom.eaccsd(nroots=3, kptlist=[0], imds=imds)
self.assertAlmostEqual(e2_obt[0][0], 1., 6)
self.assertAlmostEqual(e2_obt[0][1], 1., 6)
self.assertAlmostEqual(e2_obt[0][2], 1., 6)
(e2_obt, v) = eom.eaccsd(nroots=3, left=True, kptlist=[0], imds=imds)
self.assertAlmostEqual(e2_obt[0][0], 1., 6)
self.assertAlmostEqual(e2_obt[0][1], 1., 6)
self.assertAlmostEqual(e2_obt[0][2], 1., 6)
(e2_obt, v) = eom.eaccsd(nroots=3, koopmans=True, kptlist=[1], imds=imds)
self.assertAlmostEqual(e2_obt[0][0], 1., 6)
self.assertAlmostEqual(e2_obt[0][1], 1., 6)
self.assertAlmostEqual(e2_obt[0][2], 1., 6)
eom = EOMEE(mycc)
imds = eom.make_imds(eris=eris)
(ee, v) = eom.eeccsd(nroots=3, kptlist=[0], imds=imds)
self.assertAlmostEqual(ee[0][0], 0., 6)
self.assertAlmostEqual(ee[0][1], 0., 6)
self.assertAlmostEqual(ee[0][2], 0., 6)
(ee, v) = eom.eeccsd(nroots=3, kptlist=[1], imds=imds)
self.assertAlmostEqual(ee[0][0], 0., 6)
self.assertAlmostEqual(ee[0][1], 0., 6)
self.assertAlmostEqual(ee[0][2], 0., 6)
def test_n3_diffuse_frozen(self):
ehf2 = kmf.e_tot
self.assertAlmostEqual(ehf2, (- 6.), 6)
mycc_frozen = cc.KGCCSD(kmf, frozen=[[0, 1], [0, 1, 2, 3]])
mycc_frozen.conv_tol = 1e-07
mycc_frozen.conv_tol_normt = 1e-07
eris = mycc_frozen.ao2mo()
eris.mo_energy = [eris.fock[ikpt].diagonal().real for ikpt in range(mycc_frozen.nkpts)]
(ecc2, t1, t2) = mycc_frozen.kernel(eris=eris)
self.assertAlmostEqual(ecc2, (- 0.), 6)
eom = EOMIP(mycc_frozen)
imds = eom.make_imds(eris=eris)
(e1_obt, v) = eom.ipccsd(nroots=3, koopmans=False, kptlist=[0], imds=imds)
self.assertAlmostEqual(e1_obt[0][0], (- 1.), 6)
self.assertAlmostEqual(e1_obt[0][1], (- 1.), 6)
self.assertAlmostEqual(e1_obt[0][2], (- 1.), 6)
(e1_obt, v) = eom.ipccsd(nroots=3, koopmans=True, kptlist=[1], imds=imds)
self.assertAlmostEqual(e1_obt[0][0], (- 0.), 6)
self.assertAlmostEqual(e1_obt[0][1], (- 0.), 6)
self.assertAlmostEqual(e1_obt[0][2], (- 0.), 6)
eom = EOMEA(mycc_frozen)
imds = eom.make_imds(eris=eris)
(e2_obt, v) = eom.eaccsd(nroots=3, kptlist=[0], imds=imds)
self.assertAlmostEqual(e2_obt[0][0], 1., 6)
self.assertAlmostEqual(e2_obt[0][1], 1., 6)
self.assertAlmostEqual(e2_obt[0][2], 1., 6)
eom = EOMEA(mycc_frozen)
(e2_obt, v) = eom.eaccsd(nroots=3, koopmans=True, kptlist=[1], imds=imds)
self.assertAlmostEqual(e2_obt[0][0], 1., 6)
self.assertAlmostEqual(e2_obt[0][1], 1., 6)
self.assertAlmostEqual(e2_obt[0][2], 1., 6)
def test_n3_diffuse_star(self):
cell = make_test_cell.test_cell_n3_diffuse()
nmp = [1, 1, 2]
ehf2 = kmf.e_tot
self.assertAlmostEqual(ehf2, (- 6.), 6)
ecc2 = mycc.e_corr
self.assertAlmostEqual(ecc2, (- 0.), 6)
eom = EOMIP(mycc)
e1_obt = eom.ipccsd_star(nroots=3, koopmans=True, kptlist=[0], eris=eris)
self.assertAlmostEqual(e1_obt[0][0], (- 1.), 6)
self.assertAlmostEqual(e1_obt[0][1], (- 1.), 6)
self.assertAlmostEqual(e1_obt[0][2], (- 1.), 6)
eom = EOMEA(mycc)
e1_obt = eom.eaccsd_star(nroots=2, koopmans=True, kptlist=[0, 1], eris=eris)
self.assertAlmostEqual(e1_obt[0][0], 1., 6)
self.assertAlmostEqual(e1_obt[0][1], 1., 6)
self.assertAlmostEqual(e1_obt[1][0], 1., 6)
self.assertAlmostEqual(e1_obt[1][1], 1., 6)
def test_n3_diffuse_Ta(self):
cell = make_test_cell.test_cell_n3_diffuse()
nmp = [1, 1, 2]
ehf2 = kmf.e_tot
self.assertAlmostEqual(ehf2, (- 6.), 6)
ecc2 = mycc.e_corr
self.assertAlmostEqual(ecc2, (- 0.), 6)
eom = EOMIP_Ta(mycc)
imds = eom.make_imds(eris=eris)
(e1_obt, v) = eom.ipccsd(nroots=3, koopmans=True, kptlist=[0], imds=imds)
self.assertAlmostEqual(e1_obt[0][0], (- 1.), 6)
self.assertAlmostEqual(e1_obt[0][1], (- 1.), 6)
self.assertAlmostEqual(e1_obt[0][2], (- 1.), 6)
e1_obt = eom.ipccsd_star(nroots=3, koopmans=True, kptlist=[0], imds=imds)
self.assertAlmostEqual(e1_obt[0][0], (- 1.), 6)
self.assertAlmostEqual(e1_obt[0][1], (- 1.), 6)
self.assertAlmostEqual(e1_obt[0][2], (- 1.), 6)
eom = EOMEA_Ta(mycc)
imds = eom.make_imds(eris=eris)
(e2_obt, v) = eom.eaccsd(nroots=3, koopmans=True, kptlist=[0], imds=imds)
self.assertAlmostEqual(e2_obt[0][0], 1., 6)
self.assertAlmostEqual(e2_obt[0][1], 1., 6)
self.assertAlmostEqual(e2_obt[0][2], 1., 6)
(e2_obt, v) = eom.eaccsd(nroots=3, koopmans=True, kptlist=[1], imds=imds)
self.assertAlmostEqual(e2_obt[0][0], 1., 6)
self.assertAlmostEqual(e2_obt[0][1], 1., 6)
self.assertAlmostEqual(e2_obt[0][2], 1., 6)
e2_obt = eom.eaccsd_star(nroots=3, koopmans=True, kptlist=[1], imds=imds)
self.assertAlmostEqual(e2_obt[0][0], 1., 6)
self.assertAlmostEqual(e2_obt[0][1], 1., 6)
self.assertAlmostEqual(e2_obt[0][2], 1., 6) |
def create_metrics_puller():
try:
while True:
KOA_LOGGER.debug('[puller] collecting new samples')
KOA_CONFIG.load_rbac_auth_token()
k8s_usage = K8sUsage()
k8s_usage.extract_namespaces_and_initialize_usage(pull_k8s('/api/v1/namespaces'))
k8s_usage.extract_nodes(pull_k8s('/api/v1/nodes'))
k8s_usage.extract_node_metrics(pull_k8s('/apis/metrics.k8s.io/v1beta1/nodes'))
k8s_usage.extract_pods(pull_k8s('/api/v1/pods'))
k8s_usage.extract_pod_metrics(pull_k8s('/apis/metrics.k8s.io/v1beta1/pods'))
k8s_usage.consolidate_ns_usage()
k8s_usage.dump_nodes()
if ((k8s_usage.cpuCapacity > 0.0) and (k8s_usage.memCapacity > 0.0)):
now_epoch = calendar.timegm(time.gmtime())
cpu_non_allocatable = compute_usage_percent_ratio((k8s_usage.cpuCapacity - k8s_usage.cpuAllocatable), k8s_usage.cpuCapacity)
mem_non_allocatable = compute_usage_percent_ratio((k8s_usage.memCapacity - k8s_usage.memAllocatable), k8s_usage.memCapacity)
rrd = Rrd(db_files_location=KOA_CONFIG.db_location, dbname=KOA_CONFIG.db_non_allocatable)
rrd.add_sample(timestamp_epoch=now_epoch, cpu_usage=cpu_non_allocatable, mem_usage=mem_non_allocatable)
hourly_rate = (- 1)
if (KOA_CONFIG.billing_hourly_rate > 0):
hourly_rate = KOA_CONFIG.billing_hourly_rate
elif (k8s_usage.cloudCostAvailable is not None):
hourly_rate = k8s_usage.hourlyRate
rrd = Rrd(db_files_location=KOA_CONFIG.db_location, dbname=KOA_CONFIG.db_billing_hourly_rate)
rrd.add_sample(timestamp_epoch=now_epoch, cpu_usage=hourly_rate, mem_usage=hourly_rate)
for (ns, ns_usage) in k8s_usage.usageByNamespace.items():
rrd = Rrd(db_files_location=KOA_CONFIG.db_location, dbname=ns)
cpu_usage = compute_usage_percent_ratio(ns_usage.cpu, k8s_usage.cpuCapacity)
mem_usage = compute_usage_percent_ratio(ns_usage.mem, k8s_usage.memCapacity)
rrd.add_sample(timestamp_epoch=now_epoch, cpu_usage=cpu_usage, mem_usage=mem_usage)
cpu_efficiency = 1.0
mem_efficiency = 1.0
request_capacities = k8s_usage.requestByNamespace.get(ns, None)
if (request_capacities is not None):
if (request_capacities.cpu > 0.0):
cpu_efficiency = round((ns_usage.cpu / request_capacities.cpu), 2)
if (request_capacities.mem > 0.0):
mem_efficiency = round((ns_usage.mem / request_capacities.mem), 2)
if ((cpu_efficiency > 0.0) or (mem_efficiency > 0.0)):
rrd = Rrd(db_files_location=KOA_CONFIG.db_location, dbname=KOA_CONFIG.usage_efficiency_db(ns))
rrd.add_sample(timestamp_epoch=now_epoch, cpu_usage=cpu_efficiency, mem_usage=mem_efficiency)
time.sleep(int(KOA_CONFIG.polling_interval_sec))
except Exception as ex:
exception_type = type(ex).__name__
KOA_LOGGER.error('%s Exception in create_metrics_puller => %s', exception_type, traceback.format_exc()) |
def create_datasets_and_loaders(args, model_config, neptune=None):
input_config = resolve_input_config(args, model_config=model_config)
(dataset_train, dataset_eval) = create_dataset(args.dataset, args.root, args.ann_name)
labeler = None
if (not args.bench_labeler):
labeler = AnchorLabeler(Anchors.from_config(model_config), model_config.num_classes, match_threshold=0.5)
loader_train = create_loader(dataset_train, input_size=input_config['input_size'], batch_size=args.batch_size, is_training=True, use_prefetcher=args.prefetcher, re_prob=args.reprob, re_mode=args.remode, re_count=args.recount, interpolation=(args.train_interpolation or input_config['interpolation']), fill_color=input_config['fill_color'], mean=input_config['mean'], std=input_config['std'], num_workers=args.workers, distributed=args.distributed, pin_mem=args.pin_mem, anchor_labeler=labeler)
if (args.val_skip > 1):
dataset_eval = SkipSubset(dataset_eval, args.val_skip)
loader_eval = create_loader(dataset_eval, input_size=input_config['input_size'], batch_size=args.batch_size, is_training=False, use_prefetcher=args.prefetcher, interpolation=input_config['interpolation'], fill_color=input_config['fill_color'], mean=input_config['mean'], std=input_config['std'], num_workers=args.workers, distributed=args.distributed, pin_mem=args.pin_mem, anchor_labeler=labeler)
evaluator = create_evaluator(args.dataset, loader_eval.dataset, neptune, distributed=args.distributed, pred_yxyx=False)
return (loader_train, loader_eval, evaluator) |
def get_quay_user_from_federated_login_name(username):
results = FederatedLogin.select().where(FederatedLogin.metadata_json.contains(username))
user_id = None
for result in results:
if (json.loads(result.metadata_json).get('service_username') == username):
user_id = result.user_id
return (get_namespace_user_by_user_id(user_id) if user_id else None) |
def get_test_sequences() -> dict[(str, TestSequence)]:
filter_list1_deny_dict = {'name': 'testname', 'list_type': 0, 'guild_pings': [], 'filter_dm': True, 'dm_pings': [], 'remove_context': False, 'bypass_roles': [], 'enabled': True, 'dm_content': '', 'dm_embed': '', 'infraction_type': 'NONE', 'infraction_reason': '', 'infraction_duration': timedelta(seconds=0), 'infraction_channel': 0, 'disabled_channels': [], 'disabled_categories': [], 'enabled_channels': [], 'enabled_categories': [], 'send_alert': True}
filter_list1_allow_dict = filter_list1_deny_dict.copy()
filter_list1_allow_dict['list_type'] = 1
filter_list1_allow = FilterList(**filter_list1_allow_dict)
return {'filter_list1': TestSequence(FilterList, 'filterlist', filter_list1_deny_dict, ignored_fields=('filters', 'created_at', 'updated_at')), 'filter_list2': TestSequence(FilterList, 'filterlist', {'name': 'testname2', 'list_type': 1, 'guild_pings': ['Moderators'], 'filter_dm': False, 'dm_pings': ['here'], 'remove_context': True, 'bypass_roles': ['123456'], 'enabled': False, 'dm_content': 'testing testing', 'dm_embed': 'one two three', 'infraction_type': 'TIMEOUT', 'infraction_reason': 'stop testing', 'infraction_duration': timedelta(seconds=10.5), 'infraction_channel': 123, 'disabled_channels': ['python-general'], 'disabled_categories': ['CODE JAM'], 'enabled_channels': ['mighty-mice'], 'enabled_categories': ['Lobby'], 'send_alert': False}, ignored_fields=('filters', 'created_at', 'updated_at')), 'filter': TestSequence(Filter, 'filter', {'content': 'bad word', 'description': 'This is a really bad word.', 'additional_settings': "{'hi': 'there'}", 'guild_pings': None, 'filter_dm': None, 'dm_pings': None, 'remove_context': None, 'bypass_roles': None, 'enabled': None, 'dm_content': None, 'dm_embed': None, 'infraction_type': None, 'infraction_reason': None, 'infraction_duration': None, 'infraction_channel': None, 'disabled_channels': None, 'disabled_categories': None, 'enabled_channels': None, 'enabled_categories': None, 'send_alert': None, 'filter_list': filter_list1_allow}, ignored_fields=('created_at', 'updated_at'))} |
class up_res(nn.Module):
def __init__(self, up_in_ch, up_out_ch, cat_in_ch, cat_out_ch, if_convt=False):
super(up_res, self).__init__()
self.if_convt = if_convt
if self.if_convt:
self.up = nn.ConvTranspose2d(up_in_ch, up_out_ch, 2, stride=2)
else:
self.up = nn.Upsample(scale_factor=2, mode='bilinear', align_corners=False)
self.conv1 = nn.Conv2d(up_in_ch, up_out_ch, (3, 3))
self.conv2 = resconv2(cat_in_ch, cat_out_ch)
def forward(self, x1, x2):
if self.if_convt:
x1 = self.up(x1)
else:
x1 = self.up(x1)
x1 = self.conv1(x1)
diffY = (x2.size()[2] - x1.size()[2])
diffX = (x2.size()[3] - x1.size()[3])
x1 = F.pad(x1, ((diffX // 2), int(math.ceil((diffX / 2.0))), (diffY // 2), int(math.ceil((diffY / 2.0)))))
x = torch.cat([x2, x1], dim=1)
del x2, x1
x = self.conv2(x)
return x |
class _ProcessMemoryInfo(object):
pagesize = PAGESIZE
def __init__(self) -> None:
self.pid = getpid()
self.rss = 0
self.vsz = 0
self.pagefaults = 0
self.os_specific = []
self.data_segment = 0
self.code_segment = 0
self.shared_segment = 0
self.stack_segment = 0
self.available = self.update()
def __repr__(self) -> str:
return ('<%s vsz=%d rss=%d>' % (self.__class__.__name__, self.vsz, self.rss))
def update(self) -> bool:
return False
def __sub__(self, other: '_ProcessMemoryInfo') -> Iterable[Tuple[(str, int)]]:
diff = [('Resident set size (delta)', (self.rss - other.rss)), ('Virtual size (delta)', (self.vsz - other.vsz))]
return diff |
def make_validate_dict(item: feedparser.FeedParserDict) -> dict:
_ = item.get('published_parsed', None)
if _:
published_at = datetime.fromtimestamp(mktime(_))
else:
published_at = datetime.now()
try:
result = {'title': item.title, 'description': item.summary, 'link': item.link, 'published_at': published_at}
except Exception:
result = {}
return result |
class InstallWizard(BaseWizard, Widget):
__events__ = ('on_wizard_complete',)
def on_wizard_complete(self, storage, db):
pass
def waiting_dialog(self, task, msg, on_finished=None):
def target():
try:
task()
except Exception as err:
self.logger.exception('')
self.show_error(str(err))
Clock.schedule_once((lambda dt: app.info_bubble.hide(now=True)), (- 1))
if on_finished:
def protected_on_finished():
try:
on_finished()
except Exception as e:
self.logger.exception('')
self.show_error(str(e))
Clock.schedule_once((lambda dt: protected_on_finished()), (- 1))
app = App.get_running_app()
app.show_info_bubble(text=msg, icon='atlas://electrum/gui/kivy/theming/light/important', pos=Window.center, width='200sp', arrow_pos=None, modal=True)
t = threading.Thread(target=target)
t.start()
def terminate(self, *, storage=None, db=None, aborted=False):
if ((storage is None) and (not aborted)):
(storage, db) = self.create_storage(self.path)
self.dispatch('on_wizard_complete', storage, db)
def choice_dialog(self, **kwargs):
choices = kwargs['choices']
if (len(choices) > 1):
WizardChoiceDialog(self, **kwargs).open()
else:
f = kwargs['run_next']
f(choices[0][0])
def multisig_dialog(self, **kwargs):
WizardMultisigDialog(self, **kwargs).open()
def show_seed_dialog(self, **kwargs):
ShowSeedDialog(self, **kwargs).open()
def line_dialog(self, **kwargs):
LineDialog(self, **kwargs).open()
def derivation_and_script_type_gui_specific_dialog(self, **kwargs):
ChoiceLineDialog(self, **kwargs).open()
def confirm_seed_dialog(self, **kwargs):
kwargs['title'] = _('Confirm Seed')
kwargs['message'] = _('Please retype your seed phrase, to confirm that you properly saved it')
ConfirmSeedDialog(self, **kwargs).open()
def restore_seed_dialog(self, **kwargs):
RestoreSeedDialog(self, **kwargs).open()
def confirm_dialog(self, **kwargs):
WizardConfirmDialog(self, **kwargs).open()
def tos_dialog(self, **kwargs):
WizardTOSDialog(self, **kwargs).open()
def email_dialog(self, **kwargs):
WizardEmailDialog(self, **kwargs).open()
def otp_dialog(self, **kwargs):
if kwargs['otp_secret']:
WizardNewOTPDialog(self, **kwargs).open()
else:
WizardKnownOTPDialog(self, **kwargs).open()
def add_xpub_dialog(self, **kwargs):
kwargs['message'] += (' ' + _('Use the camera button to scan a QR code.'))
AddXpubDialog(self, **kwargs).open()
def add_cosigner_dialog(self, **kwargs):
kwargs['title'] = (_('Add Cosigner') + (' %d' % kwargs['index']))
kwargs['message'] = _('Please paste your cosigners master public key, or scan it using the camera button.')
AddXpubDialog(self, **kwargs).open()
def show_xpub_dialog(self, **kwargs):
ShowXpubDialog(self, **kwargs).open()
def show_message(self, msg):
self.show_error(msg)
def show_error(self, msg):
app = App.get_running_app()
Clock.schedule_once((lambda dt: app.show_error(msg)))
def request_password(self, run_next, force_disable_encrypt_cb=False):
if force_disable_encrypt_cb:
run_next(None, False)
return
def on_success(old_pw, pw):
assert (old_pw is None)
run_next(pw, True)
def on_failure():
self.show_error(_('Password mismatch'))
self.run('request_password', run_next)
app = App.get_running_app()
popup = PasswordDialog(app, check_password=(lambda x: True), on_success=on_success, on_failure=on_failure, is_change=True, is_password=True, message=_('Choose a password'))
popup.open()
def action_dialog(self, action, run_next):
f = getattr(self, action)
f() |
class OfflineHost(github.Host):
def __init__(self, *args, network, **kwargs):
super().__init__(*args, **kwargs)
self._network = network
async def get(self, client, url):
return self._network[('GET', url)]
async def post(self, client, url, payload):
expected = self._network[('POST', url)]
assert (expected == payload), f'{payload!r} != {expected!r}'
async def delete(self, client, url):
assert self._network[('DELETE', url)] |
def restore_previous_ratings(qcw):
incomplete_list = list(qcw.id_list)
prev_done = []
(ratings_file, backup_name_ratings) = get_ratings_path_info(qcw)
if pexists(ratings_file):
(ratings, notes) = load_ratings_csv(ratings_file)
prev_done = set(ratings.keys())
incomplete_list = list((set(qcw.id_list) - prev_done))
else:
ratings = dict()
notes = dict()
if (len(prev_done) > 0):
print('\nRatings for {}/{} subjects were restored.'.format(len(prev_done), len(qcw.id_list)))
if (len(incomplete_list) < 1):
print('No subjects to review/rate - exiting.')
sys.exit(0)
else:
print('To be reviewed : {}\n'.format(len(incomplete_list)))
return (ratings, notes, incomplete_list) |
def Generate(n_max: int=10):
n_controls = []
t_count = []
for n in range(2, (n_max + 2)):
n_controls.append(n)
gate = MultiAnd(cvs=((1,) * n))
op = gate.on_registers(**get_named_qubits(gate.signature))
c = t_complexity(op)
t_count.append(c.t)
return (n_controls, t_count) |
def expand_stream_args(mode):
def wrap(f):
def g(*args, **kwargs):
stream = kwargs.pop('stream', None)
filename = kwargs.get('filename', None)
if (mode != 'r'):
filename = kwargs.pop('filename', None)
string = kwargs.pop('string', None)
assert (sum(((x is not None) for x in (stream, filename, string))) <= 1)
if (stream is not None):
kwargs['stream'] = stream
return f(*args, **kwargs)
elif (filename is not None):
stream = open(filename, (mode + 'b'))
kwargs['stream'] = stream
retval = f(*args, **kwargs)
if isinstance(retval, types.GeneratorType):
def wrap_generator(gen):
try:
for x in gen:
(yield x)
except GeneratorExit:
pass
stream.close()
return wrap_generator(retval)
else:
stream.close()
return retval
elif (string is not None):
assert (mode == 'r'), 'Keyword argument string=... cannot be used in dumper function.'
kwargs['stream'] = BytesIO(string.encode('utf-8'))
return f(*args, **kwargs)
else:
assert (mode == 'w'), 'Use keyword argument stream=... or filename=... in loader function.'
sout = BytesIO()
f(*args, stream=sout, **kwargs)
return sout.getvalue().decode('utf-8')
g.__doc__ = f.__doc__
return g
return wrap |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.