code stringlengths 281 23.7M |
|---|
.django_db
def test_extract_parent_from_hash_failure():
recipient_id = 'acb93cfc-e4f8-ecd5-5ac3-fa62f115e8f5-C'
recipient_hash = TEST_RECIPIENT_PROFILES[recipient_id]['recipient_hash']
baker.make('recipient.RecipientProfile', **TEST_RECIPIENT_PROFILES[recipient_id])
expected_name = None
expected_duns = None
expected_parent_id = None
parents = recipients.extract_parents_from_hash(recipient_hash)
assert (expected_duns == parents[0]['parent_duns'])
assert (expected_name == parents[0]['parent_name'])
assert (expected_parent_id == parents[0]['parent_id']) |
class ViewStridedOpTestCase(unittest.TestCase):
def _gen_view_bmm_module(self, input0: Tensor, input1: Tensor, test_name: str, dtype: str, expected_num_tensors: int, expected_num_ops: int, num_bmms: int=1) -> Model:
Ys = []
for i in range(num_bmms):
Y = ops.bmm_rcr()(input0, input1)
Y._attrs['name'] = f'output{str(i)}'
Y._attrs['is_output'] = True
Ys.append(Y)
target = detect_target()
module = compile_model(Ys, target, './tmp', f'{test_name}_{dtype}')
sorted_graph = module.debug_sorted_graph
self.assertEqual(len(sorted_graph), expected_num_tensors)
sorted_ops = graph_utils.get_sorted_ops(sorted_graph)
self.assertEqual(len(sorted_ops), expected_num_ops)
return module
def _test_view_and_bmm(self, module: Model, x0_pt: torch.Tensor, x1_pt: torch.Tensor, ys: List[torch.Tensor], x0_shape: List[int], x1_shape: List[int]):
y_pts = []
for _ in range(len(ys)):
y_pt = torch.matmul(x0_pt, x1_pt.transpose(1, 2))
y_pts.append(y_pt)
inputs = [x0_pt.reshape(*x0_shape), x1_pt.reshape(*x1_shape)]
module.run_with_tensors(inputs, ys)
for (y, y_pt) in zip(ys, y_pts):
self.assertTrue(torch.allclose(y, y_pt, atol=0.01, rtol=0.01))
([param(f'single_{test_utils.get_src_op_name(tensor0)}_{test_utils.get_src_op_name(tensor0)}_bmm_fusion', tensor0, tensor1, tensor0._attrs['dtype']) for (tensor0, tensor1) in zip(_gen_fusible_view_ops_before_strided_op('input0', batch_dim=IntVar([1, 128, 256], 'batch_size'), n1=13, n2=46), _gen_fusible_view_ops_before_strided_op('input1', batch_dim=IntImm(1), n1=5, n2=46))], name_func=custom_name_func)
def test_single_view_and_bmm_fusible(self, test_name: str, input0: Tensor, input1: Tensor, dtype: str):
orig_a_shape = test_utils.get_src_input(input0)._attrs['shape']
orig_b_shape = test_utils.get_src_input(input1)._attrs['shape']
module = self._gen_view_bmm_module(input0, input1, test_name, dtype, expected_num_tensors=3, expected_num_ops=1)
a_shape = input0._attrs['shape']
b_shape = input1._attrs['shape']
for batch_size in a_shape[0]._attrs['values']:
x0_pt = get_random_torch_tensor([batch_size, a_shape[1].value(), a_shape[2].value()], dtype)
x1_pt = get_random_torch_tensor([dim.value() for dim in b_shape], dtype)
y = get_torch_empty_tensor([batch_size, a_shape[1].value(), b_shape[1].value()], dtype)
dim_to_value_dict = {'batch_size': batch_size}
self._test_view_and_bmm(module, x0_pt, x1_pt, [y], test_utils.get_shape(orig_a_shape, dim_to_value_dict), test_utils.get_shape(orig_b_shape, dim_to_value_dict))
([param(f'single_{test_utils.get_src_op_name(tensor0)}_{test_utils.get_src_op_name(tensor0)}_multi_bmm_fusion', tensor0, tensor1, tensor0._attrs['dtype']) for (tensor0, tensor1) in zip(_gen_fusible_view_ops_before_strided_op('input0', batch_dim=IntVar([1, 128, 256], 'batch_size'), n1=13, n2=46), _gen_fusible_view_ops_before_strided_op('input1', batch_dim=IntImm(1), n1=5, n2=46))], name_func=custom_name_func)
def test_single_view_and_multi_bmm_fusible(self, test_name: str, input0: Tensor, input1: Tensor, dtype: str):
orig_a_shape = test_utils.get_src_input(input0)._attrs['shape']
orig_b_shape = test_utils.get_src_input(input1)._attrs['shape']
module = self._gen_view_bmm_module(input0, input1, test_name, dtype, expected_num_tensors=4, expected_num_ops=2, num_bmms=2)
a_shape = input0._attrs['shape']
b_shape = input1._attrs['shape']
for batch_size in a_shape[0]._attrs['values']:
x0_pt = get_random_torch_tensor([batch_size, a_shape[1].value(), a_shape[2].value()], dtype)
x1_pt = get_random_torch_tensor([dim.value() for dim in b_shape], dtype)
y0 = get_torch_empty_tensor([batch_size, a_shape[1].value(), b_shape[1].value()], dtype)
y1 = y0.clone()
dim_to_value_dict = {'batch_size': batch_size}
self._test_view_and_bmm(module, x0_pt, x1_pt, [y0, y1], test_utils.get_shape(orig_a_shape, dim_to_value_dict), test_utils.get_shape(orig_b_shape, dim_to_value_dict))
def _test_multi_view_and_multi_bmm_fusible(self, dtype='float16'):
batch_dim = IntVar([1, 128, 256], 'batch_size')
N0 = 13
N1 = 46
N2 = 5
X0 = test_utils.gen_input_tensor([batch_dim, (N0 * N1)], name='input0', dtype=dtype)
X1 = test_utils.gen_input_tensor([1, (N2 * N1)], name='input1', dtype=dtype)
X2 = ops.reshape()(X0, [(- 1), N0, N1])
X3 = ops.reshape()(X0, [(- 1), N0, N1])
X4 = ops.reshape()(X1, [(- 1), N2, N1])
X5 = ops.reshape()(X1, [(- 1), N2, N1])
orig_a_shape = X0._attrs['shape']
orig_b_shape = X1._attrs['shape']
Ys = []
Y0 = ops.bmm_rcr()(X2, X4)
Y1 = ops.bmm_rcr()(X3, X5)
Ys = [Y0, Y1]
for (i, Y) in enumerate(Ys):
Y._attrs['name'] = f'output{str(i)}'
Y._attrs['is_output'] = True
target = detect_target()
module = compile_model(Ys, target, './tmp', f'multi_view_multi_bmm_fusion_{dtype}')
sorted_graph = module.debug_sorted_graph
self.assertEqual(len(sorted_graph), 4)
sorted_ops = graph_utils.get_sorted_ops(sorted_graph)
self.assertEqual(len(sorted_ops), 2)
a_shape = X2._attrs['shape']
b_shape = X4._attrs['shape']
for batch_size in a_shape[0]._attrs['values']:
x0_pt = get_random_torch_tensor([batch_size, a_shape[1].value(), a_shape[2].value()], dtype)
x1_pt = get_random_torch_tensor([dim.value() for dim in b_shape], dtype)
y0 = get_torch_empty_tensor([batch_size, a_shape[1].value(), b_shape[1].value()], dtype)
y1 = y0.clone()
dim_to_value_dict = {'batch_size': batch_size}
self._test_view_and_bmm(module, x0_pt, x1_pt, [y0, y1], test_utils.get_shape(orig_a_shape, dim_to_value_dict), test_utils.get_shape(orig_b_shape, dim_to_value_dict))
def test_multi_view_and_multi_bmm_fusible(self):
self._test_multi_view_and_multi_bmm_fusible()
((detect_target().name() == 'rocm'), 'Not supported by ROCM.')
def test_multi_view_and_multi_bmm_fusible_fp32_sm80(self):
self._test_multi_view_and_multi_bmm_fusible(dtype='float')
([param(f'multi_{test_utils.get_src_op_name(tensor0)}_{test_utils.get_src_op_name(tensor0)}_bmm_fusion', tensor0, tensor1, tensor0._attrs['dtype']) for (tensor0, tensor1) in zip(_gen_multiple_fusible_view_ops_before_strided_op('input0', batch_dim=IntVar([1, 128, 256], 'batch_size'), n1=13, n2=46), _gen_multiple_fusible_view_ops_before_strided_op('input1', batch_dim=IntImm(1), n1=5, n2=46))], name_func=custom_name_func)
def test_multiple_view_and_bmm_fusible(self, test_name: str, input0: Tensor, input1: Tensor, dtype: str):
orig_a_shape = test_utils.get_src_input(test_utils.get_src_input(input0))._attrs['shape']
orig_b_shape = test_utils.get_src_input(test_utils.get_src_input(input1))._attrs['shape']
module = self._gen_view_bmm_module(input0, input1, test_name, dtype, expected_num_tensors=3, expected_num_ops=1)
a_shape = input0._attrs['shape']
b_shape = input1._attrs['shape']
for batch_size in a_shape[0]._attrs['values']:
x0_pt = get_random_torch_tensor([batch_size, a_shape[1].value(), a_shape[2].value()], dtype)
x1_pt = get_random_torch_tensor([dim.value() for dim in b_shape], dtype)
y = get_torch_empty_tensor([batch_size, a_shape[1].value(), b_shape[1].value()], dtype)
dim_to_value_dict = {'batch_size': batch_size}
self._test_view_and_bmm(module, x0_pt, x1_pt, [y], test_utils.get_shape(orig_a_shape, dim_to_value_dict), test_utils.get_shape(orig_b_shape, dim_to_value_dict))
([param(f'non_fusible_{test_utils.get_src_op_name(tensor0)}_{test_utils.get_src_op_name(tensor0)}_bmm_fusion', tensor0, tensor1, tensor0._attrs['dtype']) for (tensor0, tensor1) in zip(_gen_non_fusible_view_ops_before_strided_op('input0', batch_dim=IntVar([2, 128, 256], 'batch_size'), n1=13, n2=46), _gen_non_fusible_view_ops_before_strided_op('input1', batch_dim=IntVar([2, 128, 256], 'batch_size'), n1=5, n2=46))], name_func=custom_name_func)
def test_non_fusible_view_and_bmm(self, test_name: str, input0: Tensor, input1: Tensor, dtype: str):
orig_a_shape = test_utils.get_src_input(input0)._attrs['shape']
orig_b_shape = test_utils.get_src_input(input1)._attrs['shape']
module = self._gen_view_bmm_module(input0, input1, test_name, dtype, expected_num_tensors=5, expected_num_ops=3)
a_shape = input0._attrs['shape']
b_shape = input1._attrs['shape']
for batch_size in a_shape[0]._attrs['values']:
x0_pt = get_random_torch_tensor([batch_size, a_shape[1].value(), a_shape[2].value()], dtype)
x1_pt = get_random_torch_tensor([batch_size, b_shape[1].value(), b_shape[2].value()], dtype)
y = get_torch_empty_tensor([batch_size, a_shape[1].value(), b_shape[1].value()], dtype)
dim_to_value_dict = {'batch_size': int((batch_size / 2))}
self._test_view_and_bmm(module, x0_pt, x1_pt, [y], test_utils.get_shape(orig_a_shape, dim_to_value_dict), test_utils.get_shape(orig_b_shape, dim_to_value_dict))
def _test_single_view_and_gemm_fusible(self, dtype='float16'):
batch_dim = IntVar([1, 128, 256], 'batch_size')
N0 = 13
N1 = 46
N2 = 6
X0 = test_utils.gen_input_tensor([batch_dim, (N0 * N1)], name='input0', dtype=dtype)
X1 = test_utils.gen_input_tensor([1, (N2 * N1)], name='input1', dtype=dtype)
X2 = test_utils.gen_input_tensor([N2], name='input2', dtype=dtype)
X3 = ops.reshape()(X0, [(- 1), N0, N1])
X4 = ops.reshape()(X1, [N2, N1])
X5 = ops.reshape()(X1, [N1, N2])
Ys = []
Y0 = ops.gemm_rcr()(X3, X4)
Y1 = ops.gemm_rcr_bias()(X3, X4, X2)
Y2 = ops.gemm_rrr()(X3, X5)
Ys = [Y0, Y1, Y2]
for (i, Y) in enumerate(Ys):
Y._attrs['name'] = f'output{str(i)}'
Y._attrs['is_output'] = True
target = detect_target()
module = compile_model(Ys, target, './tmp', f'single_view_gemm_fusion_{dtype}')
sorted_graph = module.debug_sorted_graph
self.assertEqual(len(sorted_graph), 6)
sorted_ops = graph_utils.get_sorted_ops(sorted_graph)
self.assertEqual(len(sorted_ops), 3)
for batch_size in batch_dim._attrs['values']:
x0_pt = get_random_torch_tensor([batch_size, (N0 * N1)], dtype)
x1_pt = get_random_torch_tensor([1, (N2 * N1)], dtype)
x2_pt = get_random_torch_tensor([N2], dtype)
x3_pt = torch.reshape(x0_pt, [(- 1), N0, N1])
x4_pt = torch.reshape(x1_pt, [N2, N1])
x5_pt = torch.reshape(x1_pt, [N1, N2])
y0_pt = torch.nn.functional.linear(x3_pt, x4_pt)
y1_pt = (torch.nn.functional.linear(x3_pt, x4_pt) + x2_pt)
y2_pt = torch.nn.functional.linear(x3_pt, x5_pt.transpose(0, 1))
y_pts = [y0_pt, y1_pt, y2_pt]
ys = [get_torch_empty_tensor([batch_size, N0, N2], dtype), get_torch_empty_tensor([batch_size, N0, N2], dtype), get_torch_empty_tensor([batch_size, N0, N2], dtype)]
inputs = [x0_pt, x1_pt, x2_pt]
module.run_with_tensors(inputs, ys)
for (y, y_pt) in zip(ys, y_pts):
self.assertTrue(torch.allclose(y, y_pt, atol=0.01, rtol=0.01))
def test_single_view_and_gemm_fusible(self):
self._test_single_view_and_gemm_fusible()
((detect_target().name() == 'rocm'), 'Not supported by ROCM.')
def test_single_view_and_gemm_fusible_fp32_sm80(self):
self._test_single_view_and_gemm_fusible(dtype='float') |
_required
_passes_test(is_installer, 'installer_registration')
def installation(request, event_slug):
installation_form = InstallationForm(event_slug, (request.POST or None), prefix='installation')
hardware_form = HardwareForm((request.POST or None), prefix='hardware')
forms = [installation_form, hardware_form]
errors = []
if request.POST:
if (hardware_form.is_valid() and installation_form.is_valid()):
try:
hardware = hardware_form.save()
install = installation_form.save()
install.hardware = hardware
event = get_object_or_404(Event, event_slug=event_slug)
install.event = event
install.installer = EventUser.objects.filter(user=request.user).filter(event=event).first()
install.save()
postinstall_email = InstallationMessage.objects.filter(event=event).first()
if postinstall_email:
try:
utils_email.send_installation_email(event.name, postinstall_email, install.attendee)
except SMTPException as error:
logger.error(error)
messages.error(request, _("The email couldn't sent successfully, please retry later or contact a organizer"))
messages.success(request, _('The installation has been registered successfully. Happy Hacking!'))
event_index_url = reverse('index', args=[event_slug])
return redirect(event_index_url)
except Exception as error_message:
logger.error(error_message)
if (hardware is not None):
Hardware.delete(hardware)
if (install is not None):
Installation.delete(install)
messages.error(request, _("The installation couldn't be registered (check form errors)"))
errors = get_forms_errors(forms)
return render(request, 'installation/installation-form.html', update_event_info(event_slug, {'forms': forms, 'errors': errors, 'multipart': False})) |
def main():
if (sys.version_info[0] < 3):
print('\nPython 3 is required!\n')
return
if (args.debug == True):
import signal
signal.signal(signal.SIGINT, debug_signal_handler)
if os.path.exists(args.output):
os.remove(args.output)
db = sqlite3.connect(args.output)
file_index = FileIndex(db)
processor = ObjectProcessor(file_index)
processor.init_database(db)
bundle_id = 0
cursor = db.cursor()
if os.path.isdir(args.path):
for (root, dirs, files) in os.walk(args.path):
for f in files:
if fnmatch.fnmatch(f, args.pattern):
filepath = os.path.join(root, f)
ret_code = 0
run_tool_with_timeout('WebExtract', filepath, ret_code, 60, 0)
datapath = (filepath + '_data')
if ((ret_code == 0) and os.path.isdir(datapath)):
bundle_id += 1
bundle_name = os.path.relpath(filepath, os.path.realpath(args.path))
bundle_size = os.path.getsize(filepath)
cursor.execute('\n INSERT INTO asset_bundles(id, name, file_size)\n VALUES(?,?,?)\n ', (bundle_id, bundle_name, bundle_size))
db.commit()
debug_print(('Processing ' + bundle_name), 1)
for f2 in os.listdir(datapath):
datafile = os.path.join(datapath, f2)
run_tool('binary2text', datafile, ret_code, 2)
if ((ret_code == 0) and os.path.isfile((datafile + '.txt'))):
debug_print(('Parsing ' + f2), 3)
p = Parser(file_index)
objs = p.parse((datafile + '.txt'))
processor.process_objects(bundle_id, objs, db, f2, args.store_raw)
if (not args.keep_temp):
shutil.rmtree(datapath)
else:
print('Path is not a directory!')
db.close() |
class Test(unittest.TestCase):
def testTypeCtor(self):
c = fracttypes.Type(id=72, suffix='i', printf='%d', typename='int', default=0, slots=1, cname='int')
self.assertEqual('i', c.suffix)
def testTypeLists(self):
self.assertEqual(len(fracttypes.typeObjectList), len(fracttypes.typeList))
def testTypes(self):
self.assertEqual('bool', fracttypes.typeObjectList[fracttypes.Bool].typename)
self.assertEqual('int', fracttypes.typeObjectList[fracttypes.Int].typename)
self.assertEqual('float', fracttypes.typeObjectList[fracttypes.Float].typename)
self.assertEqual('complex', fracttypes.typeObjectList[fracttypes.Complex].typename)
self.assertEqual('color', fracttypes.typeObjectList[fracttypes.Color].typename)
self.assertEqual('string', fracttypes.typeObjectList[fracttypes.String].typename)
self.assertEqual('hyper', fracttypes.typeObjectList[fracttypes.Hyper].typename)
self.assertEqual('gradient', fracttypes.typeObjectList[fracttypes.Gradient].typename)
self.assertEqual('image', fracttypes.typeObjectList[fracttypes.Image].typename)
self.assertEqual('intarray', fracttypes.typeObjectList[fracttypes.IntArray].typename)
def testArrayTypeOf(self):
pairs = [(fracttypes.Int, fracttypes.IntArray), (fracttypes.Float, fracttypes.FloatArray), (fracttypes.Complex, fracttypes.ComplexArray)]
for (element, array) in pairs:
self.assertEqual(array, fracttypes.arrayTypeOf(element))
self.assertEqual(element, fracttypes.elementTypeOf(array))
self.assertRaises(fracttypes.TranslationError, fracttypes.arrayTypeOf, fracttypes.Image)
def testTypeIDs(self):
for i in range(len(fracttypes.typeObjectList)):
self.assertEqual(i, fracttypes.typeObjectList[i].typeid)
def testPrintfOfType(self):
self.assertEqual('%d', fracttypes.typeObjectList[fracttypes.Bool].printf)
self.assertEqual('%d', fracttypes.typeObjectList[fracttypes.Int].printf)
self.assertEqual('%g', fracttypes.typeObjectList[fracttypes.Float].printf)
self.assertEqual(None, fracttypes.typeObjectList[fracttypes.String].printf)
def testCastsToSelf(self):
for type in fracttypes.typeList:
self.assertEqual(True, fracttypes.canBeCast(type, type))
def testArrayCasts(self):
arraytypes = [fracttypes.IntArray]
for t in arraytypes:
self.assertEqual(True, fracttypes.canBeCast(fracttypes.VoidArray, t))
self.assertEqual(False, fracttypes.canBeCast(t, fracttypes.VoidArray))
def testCType(self):
expected = {fracttypes.Int: 'int', fracttypes.Float: 'double', fracttypes.Complex: 'double', fracttypes.Hyper: 'double', fracttypes.Bool: 'int', fracttypes.Color: 'double', fracttypes.String: '<Error>', fracttypes.Gradient: 'void *', fracttypes.VoidArray: 'void *', fracttypes.IntArray: 'int *'}
for (k, v) in list(expected.items()):
self.assertEqual(v, fracttypes.typeObjectList[k].cname)
def testFloatInitVal(self):
float_type = fracttypes.typeObjectList[fracttypes.Float]
v = fracttypes.Var(fracttypes.Float, 1.234)
self.assertEqual(['1.'], float_type.init_val(v))
self.assertEqual(['1.'], v.init_val())
v.param_slot = 3
self.assertEqual(['t__pfo->p[3].doubleval'], float_type.init_val(v))
self.assertEqual(['t__pfo->p[3].doubleval'], v.init_val())
def testPartNames(self):
v = fracttypes.Var(fracttypes.Float)
self.assertEqual([''], v.part_names)
v = fracttypes.Var(fracttypes.Complex)
self.assertEqual(['_re', '_im'], v.part_names)
v = fracttypes.Var(fracttypes.Color)
self.assertEqual(['_re', '_i', '_j', '_k'], v.part_names)
def testComplexInitVal(self):
v = fracttypes.Var(fracttypes.Complex, [1.234, (- 7.89)])
self.assertEqual(['1.', '-7.'], v.init_val())
v.param_slot = 3
self.assertEqual(['t__pfo->p[3].doubleval', 't__pfo->p[4].doubleval'], v.init_val())
def testGradientInitVal(self):
v = fracttypes.Var(fracttypes.Gradient, 0)
self.assertRaises(fracttypes.TranslationError, v.init_val)
v.param_slot = 3
self.assertEqual(['t__pfo->p[3].gradient'], v.init_val())
def testIntInitVal(self):
v = fracttypes.Var(fracttypes.Int, 1)
self.checkIntInitVal(v)
def testBoolInitVal(self):
v = fracttypes.Var(fracttypes.Bool, 1)
self.checkIntInitVal(v)
def checkIntInitVal(self, v):
self.assertEqual(['1'], v.init_val())
v.param_slot = 3
self.assertEqual(['t__pfo->p[3].intval'], v.init_val())
def testColorInitVal(self):
v = fracttypes.Var(fracttypes.Color, [1.234, (- 7.89), 11.1, .0])
self.checkQuadInitVal(v)
def testHyperInitVal(self):
v = fracttypes.Var(fracttypes.Hyper, [1.234, (- 7.89), 11.1, .0])
self.checkQuadInitVal(v)
def checkQuadInitVal(self, v):
self.assertEqual(['1.', '-7.', '11.', '.'], v.init_val())
v.param_slot = 3
self.assertEqual(['t__pfo->p[3].doubleval', 't__pfo->p[4].doubleval', 't__pfo->p[5].doubleval', 't__pfo->p[6].doubleval'], v.init_val())
def testFunc(self):
f = function.Func([fracttypes.Int, fracttypes.Int], fracttypes.Int, 'wibble', pos=7)
self.assertEqual([fracttypes.Int, fracttypes.Int], f.args)
self.assertEqual(fracttypes.Int, f.ret)
self.assertEqual(7, f.pos)
self.assertEqual('wibble', f.cname)
self.assertEqual('wibble_ii_i', f.genFunc)
self.assertEqual([], f.implicit_args)
f.set_implicit_arg('fish')
f.set_implicit_arg('blouse')
self.assertEqual(['fish', 'blouse'], f.implicit_args) |
_renderer(wrap_type=DatasetSummaryMetric)
class DatasetSummaryMetricRenderer(MetricRenderer):
def _get_table(metric_result: DatasetSummaryMetricResult) -> BaseWidgetInfo:
column_names = ['Metric', 'Current']
rows = (['id column', metric_result.current.id_column], ['target column', metric_result.current.target], ['prediction column', metric_result.current.prediction], ['date column', metric_result.current.date_column], ['number of columns', metric_result.current.number_of_columns], ['number of rows', metric_result.current.number_of_rows], ['missing values', metric_result.current.number_of_missing_values], ['categorical columns', metric_result.current.number_of_categorical_columns], ['numeric columns', metric_result.current.number_of_numeric_columns], ['text columns', metric_result.current.number_of_text_columns], ['datetime columns', metric_result.current.number_of_datetime_columns], ['empty columns', metric_result.current.number_of_empty_columns], ['constant columns', metric_result.current.number_of_constant_columns], ['almost constant features', metric_result.current.number_of_almost_constant_columns], ['duplicated columns', metric_result.current.number_of_duplicated_columns], ['almost duplicated features', metric_result.current.number_of_almost_duplicated_columns])
if (metric_result.reference is not None):
column_names.append('Reference')
rows[0].append(metric_result.reference.id_column)
rows[1].append(metric_result.reference.target)
rows[2].append(metric_result.reference.prediction)
rows[3].append(metric_result.reference.date_column)
rows[4].append(metric_result.reference.number_of_columns)
rows[5].append(metric_result.reference.number_of_rows)
rows[6].append(metric_result.reference.number_of_missing_values)
rows[7].append(metric_result.reference.number_of_categorical_columns)
rows[8].append(metric_result.reference.number_of_numeric_columns)
rows[9].append(metric_result.reference.number_of_text_columns)
rows[10].append(metric_result.reference.number_of_datetime_columns)
rows[11].append(metric_result.reference.number_of_empty_columns)
rows[12].append(metric_result.reference.number_of_constant_columns)
rows[13].append(metric_result.reference.number_of_almost_constant_columns)
rows[14].append(metric_result.reference.number_of_duplicated_columns)
rows[15].append(metric_result.reference.number_of_almost_duplicated_columns)
return table_data(title='', column_names=column_names, data=rows)
def render_html(self, obj: DatasetSummaryMetric) -> List[BaseWidgetInfo]:
metric_result = obj.get_result()
return [header_text(label='Dataset Summary'), self._get_table(metric_result)] |
def test_multiple_functions(mesh, pvd):
V = FunctionSpace(mesh, 'DG', 0)
P = FunctionSpace(mesh, 'CG', 1)
f = Function(V, name='foo')
g = Function(P, name='bar')
pvd.write(f, g)
with pytest.raises(ValueError):
pvd.write(f)
with pytest.raises(ValueError):
pvd.write(g, f) |
class Analyse(QObject):
finished = Signal(str, str)
progress_update = Signal(RunModelEvent)
def __init__(self, ert: EnKFMain, target_fs: EnsembleAccessor, source_fs: EnsembleReader):
QObject.__init__(self)
self._ert = ert
self._target_fs = target_fs
self._source_fs = source_fs
()
def run(self):
error: Optional[str] = None
config = self._ert.ert_config
rng = np.random.default_rng(_seed_sequence(config.random_seed))
update_settings = UpdateSettings(std_cutoff=config.analysis_config.std_cutoff, alpha=config.analysis_config.enkf_alpha, misfit_preprocess=False, min_required_realizations=config.analysis_config.minimum_required_realizations)
try:
smoother_update(self._source_fs, self._target_fs, str(uuid.uuid4()), self._ert.update_configuration, update_settings, config.analysis_config.es_module, rng, self.smoother_event_callback, log_path=config.analysis_config.log_path)
except ErtAnalysisError as e:
error = str(e)
except Exception as e:
error = f'Unknown exception occurred with error: {str(e)}'
self.finished.emit(error, self._source_fs.name)
def smoother_event_callback(self, event: AnalysisEvent) -> None:
if isinstance(event, AnalysisStatusEvent):
self.progress_update.emit(RunModelStatusEvent(iteration=0, msg=event.msg))
elif isinstance(event, AnalysisTimeEvent):
self.progress_update.emit(RunModelTimeEvent(iteration=0, elapsed_time=event.elapsed_time, remaining_time=event.remaining_time)) |
_page.route('/table/change_config', methods=['POST'])
def table_change_config():
res = check_uuid(all_data['uuid'], request.json['uuid'])
if (res != None):
return jsonify(res)
if ('config_name' in request.json):
config_names = _get_config_names(all_data['root_log_dir'])
if (config_names.index(request.json['config_name']) != (- 1)):
log_dir = all_data['root_log_dir']
log_config_name = all_data['log_config_name']
save_all_data(all_data, log_dir, log_config_name)
all_data['log_config_name'] = request.json['config_name']
return jsonify(status='success', msg='')
else:
return jsonify(status='fail', msg='There is no config named:{}.'.format(request.json['config_name']))
else:
return jsonify(status='fail', msg='There is no config_name in your request.') |
class TestDSLBase(unittest.TestCase):
def setUp(self):
reset()
def run_example(self, example: Example) -> None:
_ExampleRunner(example, QuietFormatter(import_module_names=[__name__])).run()
def run_all_examples(self):
for each_context in Context.all_top_level_contexts:
for example in each_context.all_examples:
self.run_example(example)
def run_first_context_first_example(self):
self.run_example(Context.all_top_level_contexts[0].all_examples[0])
def run_first_context_all_examples(self):
for example in Context.all_top_level_contexts[0].all_examples:
self.run_example(example)
def _print_context_hierarchy(self, contexts=None, indent=''):
if (contexts is None):
print('')
contexts = Context.all_top_level_contexts
for ctx in contexts:
print('{}Context: "{}" (skip={}, focus={})'.format(indent, str(ctx), ctx.skip, ctx.focus))
for name in ctx.shared_contexts.keys():
print(' {}Shared context: "{}"'.format(indent, name))
for example in ctx.all_examples:
print(' {}Example: "{}"'.format(indent, example))
if ctx.children_contexts:
self._print_context_hierarchy(ctx.children_contexts, '{} '.format(indent)) |
def crlf_get_url_method(uri, headers, scanid=None):
crlf_payloads = fetch_crlf_payload()
for payload in crlf_payloads:
parsed_uri = (((((urllib.parse.urlparse(uri).scheme + '://') + urllib.parse.urlparse(uri).netloc) + urllib.parse.urlparse(uri).path) + '/') + payload)
crlf_get_method = req.api_request(parsed_uri, 'GET', headers)
for name in crlf_get_method.headers:
if ('CRLF-Test' in name):
attack_result = {'id': 13, 'scanid': scanid, 'url': parsed_uri, 'alert': 'CRLF injection', 'impact': 'High', 'req_headers': headers, 'req_body': 'NA', 'res_headers': crlf_get_method.headers, 'res_body': crlf_get_method.text}
dbupdate.insert_record(attack_result)
print('[+]{0} is vulnerable to CRLF injection'.format(parsed_uri))
return |
class TestOFPTableMod(unittest.TestCase):
table_id = 3
config =
def test_init(self):
c = OFPTableMod(_Datapath, self.table_id, self.config)
eq_(self.table_id, c.table_id)
eq_(self.config, c.config)
def _test_serialize(self, table_id, config):
c = OFPTableMod(_Datapath, table_id, config)
c.serialize()
eq_(ofproto.OFP_VERSION, c.version)
eq_(ofproto.OFPT_TABLE_MOD, c.msg_type)
eq_(0, c.xid)
fmt = (('!' + ofproto.OFP_HEADER_PACK_STR.replace('!', '')) + ofproto.OFP_TABLE_MOD_PACK_STR.replace('!', ''))
res = struct.unpack(fmt, six.binary_type(c.buf))
eq_(res[0], ofproto.OFP_VERSION)
eq_(res[1], ofproto.OFPT_TABLE_MOD)
eq_(res[2], len(c.buf))
eq_(res[3], 0)
eq_(res[4], table_id)
eq_(res[5], config)
def test_serialize_mid(self):
self._test_serialize(self.table_id, self.config)
def test_serialize_max(self):
table_id = ofproto.OFPTT_ALL
config =
self._test_serialize(table_id, config)
def test_serialize_min(self):
table_id = 0
config = 0
self._test_serialize(table_id, config)
def test_serialize_p1(self):
table_id = ofproto.OFPTT_MAX
config = ofproto.OFPTC_TABLE_MISS_CONTINUE
self._test_serialize(table_id, config)
def test_serialize_p2(self):
table_id = ofproto.OFPTT_MAX
config = ofproto.OFPTC_TABLE_MISS_DROP
self._test_serialize(table_id, config)
def test_serialize_p3(self):
table_id = ofproto.OFPTT_MAX
config = ofproto.OFPTC_TABLE_MISS_MASK
self._test_serialize(table_id, config) |
def test_nested_pickling(tmp_path):
def make_pickle() -> bytes:
return pickle.dumps(hello_fn)
pkl = make_pickle()
assert (bytes(__name__, 'ascii') in pkl)
assert (b'hello_fn' in pkl)
ex = LocalExecutor(tmp_path)
j = ex.submit(make_pickle)
assert (j.result() == pkl) |
def comp_appres(depth, res, a, n, srcpts=1, recpts=1, verb=1):
AB = ((n + 1) * a)
model = {'src': [((- a) / 2), (a / 2), 0, 0, 0.001, 0.001], 'rec': [(AB - (a / 2)), (AB + (a / 2)), (AB * 0), (AB * 0), 0.001, 0.001], 'depth': np.r_[(0, np.array(depth, ndmin=1))], 'freqtime': 1e-20, 'verb': verb, 'res': np.r_[(.0, np.array(res, ndmin=1))], 'strength': 1, 'htarg': {'pts_per_dec': (- 1)}}
return ((((((np.real(empymod.bipole(**model)) * np.pi) * a) * n) * (n + 1)) * (n + 2)), (AB / 2)) |
class Exon():
def __init__(self, name, gene_symbol, log2_fold_change=None, p_value=None, data=[]):
self.name = str(name)
self.gene_symbol = str(gene_symbol)
if (log2_fold_change is not None):
self.log2_fold_change = float(log2_fold_change)
else:
self.log2_fold_change = None
if (p_value is not None):
self.p_value = float(p_value)
else:
self.p_value = None
self.data = data
def __repr__(self):
return ('%s:%s:log2FoldChange=%s;p-value=%s' % (self.name, self.gene_symbol, self.log2_fold_change, self.p_value)) |
def test_formatting_list():
instances = [OLS(), ('ols', OLS()), ('ols', OLS()), ['list', OLS()]]
formatted = _format_instances(instances, False)
strings = []
for i in formatted:
assert isinstance(i, tuple)
assert isinstance(i[0], str)
assert isinstance(i[1], OLS)
assert (i[0] not in strings)
strings.append(i[0]) |
_required
_required
def console(request, hostname):
context = collect_view_data(request, 'vm_list')
context['vm'] = get_vm(request, hostname)
context['vms'] = vms = get_vms(request)
context['vms_tags'] = get_vms_tags(vms)
context['can_edit'] = request.user.is_admin(request)
view_vm_console.send(sender='gui.vm.views.console', request=request, context=context)
return render(request, 'gui/vm/console.html', context) |
def main():
print('\nmodule top();\n ')
luts = LutMaker()
lines = []
params_list = []
for (tile_name, sites) in gen_sites():
gen_fun = random.choice((ramb18, ramb18_2x, ramb36, fifo18, fifo18_ramb18, fifo36))
params_list.append(gen_fun(tile_name, luts, lines, sites))
for lut in luts.create_wires_and_luts():
print(lut)
for l in lines:
print(l)
print('endmodule')
with open('params.json', 'w') as f:
json.dump(params_list, f, indent=2) |
class OptionPlotoptionsAreasplinerangeSonificationContexttracksMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class flow_mod_failed_error_msg(error_msg):
version = 3
type = 1
err_type = 5
def __init__(self, xid=None, code=None, data=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (code != None):
self.code = code
else:
self.code = 0
if (data != None):
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.err_type))
packed.append(struct.pack('!H', self.code))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = flow_mod_failed_error_msg()
_version = reader.read('!B')[0]
assert (_version == 3)
_type = reader.read('!B')[0]
assert (_type == 1)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_err_type = reader.read('!H')[0]
assert (_err_type == 5)
obj.code = reader.read('!H')[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.code != other.code):
return False
if (self.data != other.data):
return False
return True
def pretty_print(self, q):
q.text('flow_mod_failed_error_msg {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('code = ')
value_name_map = {0: 'OFPFMFC_UNKNOWN', 1: 'OFPFMFC_TABLE_FULL', 2: 'OFPFMFC_BAD_TABLE_ID', 3: 'OFPFMFC_OVERLAP', 4: 'OFPFMFC_EPERM', 5: 'OFPFMFC_BAD_TIMEOUT', 6: 'OFPFMFC_BAD_COMMAND', 7: 'OFPFMFC_BAD_FLAGS'}
if (self.code in value_name_map):
q.text(('%s(%d)' % (value_name_map[self.code], self.code)))
else:
q.text(('%#x' % self.code))
q.text(',')
q.breakable()
q.text('data = ')
q.pp(self.data)
q.breakable()
q.text('}') |
def divide(pitch_motif: PitchLine, duration_motif: DurationLine, n: int) -> List[Tuple[(PitchLine, DurationLine)]]:
motifs = []
unit = (sum(duration_motif) / n)
pm = []
dm = []
for (i, duration) in enumerate(duration_motif):
current = sum(dm)
tmp = (current + duration)
residual = (tmp - unit)
pitch = pitch_motif[i]
pm.append(pitch)
if (residual <= 0):
dm.append(duration)
else:
last = (unit - current)
dm.append(last)
motif = (pm, dm)
motifs.append(motif)
pm = [pitch]
residual = (residual - unit)
while (residual > 0):
dm = [unit]
motif = (pm, dm)
motifs.append(motif)
residual = (residual - unit)
dm = [(residual + unit)]
if (residual == 0):
motif = (pm, dm)
motifs.append(motif)
pm = []
dm = []
return motifs |
def test_add_vectors(mocker):
mock_qdrant_client = mocker.patch('qdrant_client.QdrantClient', autospec=True)
mocker.patch('os.getenv', side_effect=(lambda x: 'dummy_value'))
qdrant = Qdrant(env_file_path='/path/to/your/env/file')
mock_qdrant_client.return_value.upsert.return_value = True
vectors = [[0.1, 0.2], [0.3, 0.4]]
payload = [{'id': 1}, {'id': 2}]
qdrant.add_vectors('test_collection', vectors, 'test_vector', payload)
mock_qdrant_client.return_value.upsert.assert_called_with(collection_name='test_collection', points=[{'id': 0, 'vector': {'test_vector': [0.1, 0.2]}, 'payload': {'id': 1}}, {'id': 1, 'vector': {'test_vector': [0.3, 0.4]}, 'payload': {'id': 2}}]) |
def test_topbottom(P2):
u = TrialFunction(P2)
v = TestFunction(P2)
xs = SpatialCoordinate(P2.mesh())
a = (inner(grad(u), grad(v)) * dx)
L = (((- inner(20, v)) * dx) + (inner(10, v) * ds_tb))
bc_expr = ((10 * (xs[2] - 0.5)) * (xs[2] - 0.5))
bcs = [DirichletBC(P2, bc_expr, 1), DirichletBC(P2, bc_expr, 2), DirichletBC(P2, bc_expr, 3), DirichletBC(P2, bc_expr, 4)]
u = Function(P2)
solve((a == L), u, bcs)
u_exact = Function(P2)
u_exact.interpolate(bc_expr)
assert (max(abs((u.dat.data - u_exact.dat.data))) < 1e-06) |
def _unpack_string(code, read_fn):
if ((ord(code) & 224) == 160):
length = (ord(code) & (~ 224))
elif (code == b'\xd9'):
length = _struct_unpack('B', read_fn(1))[0]
elif (code == b'\xda'):
length = _struct_unpack('>H', read_fn(2))[0]
elif (code == b'\xdb'):
length = _struct_unpack('>I', read_fn(4))[0]
else:
raise Exception(('logic error, not string: 0x%02x' % ord(code)))
try:
return bytes.decode(read_fn(length), 'utf-8')
except UnicodeDecodeError:
raise InvalidStringException('unpacked string is not utf-8') |
class OptionPlotoptionsVariablepieSonificationTracksMappingTremolo(Options):
def depth(self) -> 'OptionPlotoptionsVariablepieSonificationTracksMappingTremoloDepth':
return self._config_sub_data('depth', OptionPlotoptionsVariablepieSonificationTracksMappingTremoloDepth)
def speed(self) -> 'OptionPlotoptionsVariablepieSonificationTracksMappingTremoloSpeed':
return self._config_sub_data('speed', OptionPlotoptionsVariablepieSonificationTracksMappingTremoloSpeed) |
def write_json_file(file_name: str, results: list) -> str:
now = datetime.now()
timestamp = f'_{now.month}-{now.day}-{now.year}_{now.hour}-{now.minute}.json'
file_path = (file_name + timestamp)
click.secho(f'[*] Writing results to {file_path}', fg='green')
with open(file_path, 'w') as f:
json.dump(results, f, indent=4)
return file_path |
class RealMatrix(BroadcastMatrixType, metaclass=MemoizedClass):
def __init__(self, rows: int, columns: int) -> None:
BroadcastMatrixType.__init__(self, real_element, rows, columns)
def with_dimensions(self, rows: int, columns: int) -> BMGMatrixType:
return RealMatrix(rows, columns) |
def test_always_transact(accounts, tester, argv, web3, monkeypatch, history):
owner = tester.owner()
argv['always_transact'] = True
height = web3.eth.block_number
result = tester.owner()
assert (owner == result)
assert (web3.eth.block_number == height == len(history))
monkeypatch.setattr('brownie.network.chain.undo', (lambda : None))
result = tester.owner()
tx = history[(- 1)]
assert (owner == result)
assert (web3.eth.block_number == (height + 1) == len(history))
assert (tx.fn_name == 'owner') |
class LinkerdHeaderMapping(AmbassadorTest):
target: ServiceType
def init(self):
self.target = HTTP()
self.target_no_header = HTTP(name='noheader')
self.target_add_linkerd_header_only = HTTP(name='addlinkerdonly')
def config(self) -> Generator[(Union[(str, Tuple[(Node, str)])], None, None)]:
(yield (self, self.format('\n---\napiVersion: getambassador.io/v3alpha1\nkind: Module\nname: ambassador\nconfig:\n add_linkerd_headers: true\n defaults:\n add_request_headers:\n fruit:\n append: False\n value: orange\n remove_request_headers:\n - x-evil-header\n---\napiVersion: getambassador.io/v3alpha1\nkind: Mapping\nname: {self.target_add_linkerd_header_only.path.k8s}\nhostname: "*"\nprefix: /target_add_linkerd_header_only/\nservice: {self.target_add_linkerd_header_only.path.fqdn}\nadd_request_headers: {{}}\nremove_request_headers: []\n---\napiVersion: getambassador.io/v3alpha1\nkind: Mapping\nname: {self.target_no_header.path.k8s}\nhostname: "*"\nprefix: /target_no_header/\nservice: {self.target_no_header.path.fqdn}\nadd_linkerd_headers: false\n---\napiVersion: getambassador.io/v3alpha1\nkind: Mapping\nname: {self.target.path.k8s}\nhostname: "*"\nprefix: /target/\nservice: {self.target.path.fqdn}\nadd_request_headers:\n fruit:\n append: False\n value: banana\nremove_request_headers:\n- x-evilness\n')))
def queries(self):
(yield Query(self.url('target/'), headers={'x-evil-header': 'evilness', 'x-evilness': 'more evilness'}, expected=200))
(yield Query(self.url('target_no_header/'), headers={'x-evil-header': 'evilness', 'x-evilness': 'more evilness'}, expected=200))
(yield Query(self.url('target_add_linkerd_header_only/'), headers={'x-evil-header': 'evilness', 'x-evilness': 'more evilness'}, expected=200))
def check(self):
assert self.results[0].backend
assert self.results[0].backend.request
assert (len(self.results[0].backend.request.headers['l5d-dst-override']) > 0)
assert (self.results[0].backend.request.headers['l5d-dst-override'] == ['{}:80'.format(self.target.path.fqdn)])
assert (len(self.results[0].backend.request.headers['fruit']) > 0)
assert (self.results[0].backend.request.headers['fruit'] == ['banana'])
assert (len(self.results[0].backend.request.headers['x-evil-header']) > 0)
assert (self.results[0].backend.request.headers['x-evil-header'] == ['evilness'])
assert ('x-evilness' not in self.results[0].backend.request.headers)
assert self.results[1].backend
assert self.results[1].backend.request
assert ('l5d-dst-override' not in self.results[1].backend.request.headers)
assert (len(self.results[1].backend.request.headers['fruit']) > 0)
assert (self.results[1].backend.request.headers['fruit'] == ['orange'])
assert ('x-evil-header' not in self.results[1].backend.request.headers)
assert (len(self.results[1].backend.request.headers['x-evilness']) > 0)
assert (self.results[1].backend.request.headers['x-evilness'] == ['more evilness'])
assert self.results[2].backend
assert self.results[2].backend.request
assert (len(self.results[2].backend.request.headers['l5d-dst-override']) > 0)
assert (self.results[2].backend.request.headers['l5d-dst-override'] == ['{}:80'.format(self.target_add_linkerd_header_only.path.fqdn)])
assert (len(self.results[2].backend.request.headers['x-evil-header']) > 0)
assert (self.results[2].backend.request.headers['x-evil-header'] == ['evilness'])
assert (len(self.results[2].backend.request.headers['x-evilness']) > 0)
assert (self.results[2].backend.request.headers['x-evilness'] == ['more evilness']) |
class bsn_header(experimenter):
subtypes = {}
version = 6
type = 4
experimenter = 6035143
def __init__(self, xid=None, subtype=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (subtype != None):
self.subtype = subtype
else:
self.subtype = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.subtype))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
(subtype,) = reader.peek('!L', 12)
subclass = bsn_header.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = bsn_header()
_version = reader.read('!B')[0]
assert (_version == 6)
_type = reader.read('!B')[0]
assert (_type == 4)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_experimenter = reader.read('!L')[0]
assert (_experimenter == 6035143)
obj.subtype = reader.read('!L')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.subtype != other.subtype):
return False
return True
def pretty_print(self, q):
q.text('bsn_header {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.breakable()
q.text('}') |
def _mutate_array_js(array, ev):
is_nd = (hasattr(array, 'shape') and hasattr(array, 'dtype'))
mutation = ev.mutation
index = ev.index
objects = ev.objects
if (is_nd is True):
if (mutation == 'set'):
raise NotImplementedError('Cannot set nd array in-place')
elif (mutation in ('extend', 'insert', 'remove')):
raise NotImplementedError('Cannot resize nd arrays')
elif (mutation == 'replace'):
raise NotImplementedError('Cannot replace items in nd array')
else:
if (mutation == 'remove'):
assert isinstance(objects, float)
elif (not isinstance(objects, list)):
raise TypeError('Inplace list/array mutating requires a list of objects.')
if (mutation == 'set'):
array.splice(0, len(array), *objects)
elif (mutation == 'insert'):
array.splice(index, 0, *objects)
elif (mutation == 'remove'):
array.splice(index, objects)
elif (mutation == 'replace'):
array.splice(index, len(objects), *objects)
else:
raise NotImplementedError(mutation) |
def test_find_width_returns_empty_array_with_too_big_threshold(data_width):
result = scared.signal_processing.find_width(data_width, scared.signal_processing.Direction.POSITIVE, 2, 10)
assert np.array_equal(result, np.empty((0, 2)))
result = scared.signal_processing.find_width(data_width, scared.signal_processing.Direction.NEGATIVE, 2, 10)
assert np.array_equal(result, np.empty((0, 2))) |
def test_environ():
environ = Environ()
environ['TESTING'] = 'True'
environ['GONE'] = '123'
del environ['GONE']
assert (environ['TESTING'] == 'True')
assert ('GONE' not in environ)
with pytest.raises(EnvironError):
environ['TESTING'] = 'False'
with pytest.raises(EnvironError):
del environ['GONE']
environ = Environ()
assert (list(iter(environ)) == list(iter(os.environ)))
assert (len(environ) == len(os.environ)) |
def mk_suggested_vote(concise_casper, mk_vote):
def mk_suggested_vote(validator_index, validation_key):
target_hash = concise_casper.recommended_target_hash()
target_epoch = concise_casper.current_epoch()
source_epoch = concise_casper.recommended_source_epoch()
return mk_vote(validator_index, target_hash, target_epoch, source_epoch, validation_key)
return mk_suggested_vote |
class Flag():
def __init__(self, aliases, description, required=False, dtype='bool'):
if isinstance(aliases, str):
self.aliases = [aliases]
else:
self.aliases = aliases
self.description = description
self.required = required
self.dtype = dtype
if (self.required and (self.aliases[0][0] == '-')):
raise Exception('Positional arguments cannot be required!') |
def parse_molden_atoms(data):
atoms = list()
coords = list()
nuc_charges = list()
for atom in data['atoms']:
atoms.append(atom['symbol'])
coords.extend(atom['xyz'])
Z = atom['atomic_number']
nuc_charges.append(Z)
atoms = tuple(atoms)
coords = np.array(coords, dtype=float).flatten()
nuc_charges = np.array(nuc_charges, dtype=int)
return (atoms, coords, nuc_charges) |
class MonoThreadEpisodeBatcher():
def __init__(self, create_agent, agent_args, create_env, env_args):
self.agent = create_agent(**agent_args)
assert (not self.agent.require_history())
self.env = create_env(**env_args)
self.n_envs = self.env.n_envs
def execute(self, agent_info=DictTensor({}), env_info=DictTensor({})):
self.agent_info = agent_info
self.env_info = env_info
def get(self):
with torch.no_grad():
(obs, is_running) = self.env.reset(self.env_info)
n_elems = obs.n_elems()
observations = [{k: obs[k] for k in obs.keys()}]
states = []
agent_state = None
agent_info = self.agent_info
if (agent_info is None):
agent_info = DictTensor({})
t = 0
length = torch.zeros(is_running.size()[0]).long()
first_state = None
first_info = agent_info
while (is_running.size()[0] > 0):
(old_agent_state, agent_output, new_agent_state) = self.agent(agent_state, obs, agent_info)
if (len(states) == 0):
first_state = old_agent_state
s = {k: old_agent_state[k] for k in old_agent_state.keys()}
s = {**s, **{k: agent_output[k] for k in agent_output.keys()}}
s = {**s, **{('_' + k): new_agent_state[k] for k in new_agent_state.keys()}}
states.append(s)
else:
s = {k: old_agent_state[k] for k in old_agent_state.keys()}
s = {**s, **{k: agent_output[k] for k in agent_output.keys()}}
s = {**s, **{('_' + k): new_agent_state[k] for k in new_agent_state.keys()}}
ns = {k: states[0][k].clone() for k in states[0]}
for k in states[0]:
ns[k][is_running] = s[k]
states.append(ns)
((l_o, l_is_running), (obs, is_running)) = self.env.step(agent_output)
for k in l_o.keys():
observations[t][('_' + k)] = observations[0][k].clone()
for k in l_o.keys():
observations[t][('_' + k)][l_is_running] = l_o[k]
length[l_is_running] += 1
t += 1
if (is_running.size()[0] > 0):
observations.append({})
for k in obs.keys():
observations[t][k] = observations[0][k].clone()
for k in obs.keys():
observations[t][k][is_running] = obs[k]
ag = {k: first_state[k].clone() for k in first_state.keys()}
for k in ag:
ag[k][l_is_running] = new_agent_state[k]
agent_state = DictTensor({k: ag[k][is_running] for k in ag})
ai = {k: first_info[k].clone() for k in first_info.keys()}
agent_info = DictTensor({k: ai[k][is_running] for k in ai})
f_observations = {}
for k in observations[0]:
_all = [o[k].unsqueeze(1) for o in observations]
f_observations[k] = torch.cat(_all, dim=1)
f_states = {}
for k in states[0]:
_all = [o[k].unsqueeze(1) for o in states]
f_states[k] = torch.cat(_all, dim=1)
return TemporalDictTensor({**f_observations, **f_states}, lengths=length)
def update(self, info):
self.agent.update(info)
def close(self):
pass |
def autodoc_post_process_docstring(app, what, name, obj, options, lines):
try:
if ansi_clean:
for (il, line) in enumerate(lines):
lines[il] = ansi_clean(line)
def _sub_codeblock(match):
code = match.group(1)
return '::\n\n {}'.format('\n '.join((lne for lne in code.split('\n'))))
underline_map = {1: '-', 2: '=', 3: '^', 4: '"'}
def _sub_header(match):
groupdict = match.groupdict()
(hashes, title) = (groupdict['hashes'], groupdict['title'])
title = title.strip()
lvl = min(max(1, len(hashes)), 4)
return (f'''{title}
''' + (underline_map[lvl] * len(title)))
doc = '\n'.join(lines)
doc = re.sub('```python\\s*\\n+(.*?)```', _sub_codeblock, doc, flags=(re.MULTILINE + re.DOTALL))
doc = re.sub('```', '', doc, flags=re.MULTILINE)
doc = re.sub('`{1}', '**', doc, flags=re.MULTILINE)
doc = re.sub('^(?P<hashes>#{1,4})\\s*?(?P<title>.*?)$', _sub_header, doc, flags=re.MULTILINE)
newlines = doc.split('\n')
lines[:] = newlines[:]
except Exception as err:
print(f'Post-process docstring exception: {err}')
raise |
def build(abcd, node, net, gv, outfile, tpl=template_html, **args):
abcd = ABCD2HTML(node)
pnet = Net2HTML(net, gv, abcd)
d = {'filename': node.st.filename, 'css': template_css, 'jscode': template_jscode, 'headers': template_headers, 'abcd': abcd.html(), 'tree': pnet.html(), 'svg': pnet.svg()}
d.update(args)
if ((tpl is not None) and outfile):
with codecs.open(outfile, 'w', 'utf-8') as out:
out.write((tpl % d))
return d |
def as_ctrait(obj):
if (isinstance(obj, type) and hasattr(obj, 'instantiate_and_get_ctrait')):
return obj.instantiate_and_get_ctrait()
elif ((not isinstance(obj, type)) and hasattr(obj, 'as_ctrait')):
return obj.as_ctrait()
else:
raise TypeError('Object {!r} does not support conversion to CTrait'.format(obj)) |
def fix_citations(ao_no, citation_type, citations):
CITATION_EXCLUDE_LOOKUP = {'2017-03': {'ao': ['2010-11', '2011-12', '2015-16']}, '2019-11': {'statute': [(52, '301')]}}
CITATION_INCLUDE_LOOKUP = {'1999-40': {'regulation': [(11, 110, 3)]}, '2019-14': {'regulation': [(11, 102, 1), (11, 102, 6), (11, 104, 1), (11, 300, 10)]}, '2019-11': {'regulation': [(11, 110, 11)]}, '2018-15': {'regulation': [(11, 112, 1), (11, 112, 5), (11, 113, 1), (11, 113, 2)]}}
exclude_list = CITATION_EXCLUDE_LOOKUP.get(ao_no, {}).get(citation_type)
if exclude_list:
for false_citation in exclude_list:
logger.debug('Removing citation {}'.format(false_citation))
citations.discard(false_citation)
include_list = CITATION_INCLUDE_LOOKUP.get(ao_no, {}).get(citation_type)
if include_list:
for missed_citation in include_list:
logger.debug('Adding citation {}'.format(missed_citation))
citations.add(missed_citation)
return citations |
class PythonicProtocolSpecification():
def __init__(self) -> None:
self.speech_acts = {}
self.all_performatives = []
self.all_unique_contents = {}
self.all_custom_types = []
self.custom_custom_types = {}
self.initial_performatives = []
self.reply = {}
self.terminal_performatives = []
self.roles = []
self.end_states = []
self.keep_terminal_state_dialogues = False
self.typing_imports = {'Set': True, 'Tuple': True, 'cast': True, 'FrozenSet': False, 'Dict': False, 'Union': False, 'Optional': False} |
def __find_commit_message__(sha):
git_show_r = subprocess.Popen(filter(None, ['git', 'show', '-s', '--pretty=%B', '-w', sha]), bufsize=1, stdout=subprocess.PIPE).stdout
commit_message = git_show_r.read()
git_show_r.close()
commit_message = commit_message.strip().decode('unicode_escape', 'ignore')
commit_message = commit_message.encode('latin-1', 'replace')
return commit_message.decode('utf-8', 'replace') |
def run_yamllint(path, indent=0):
path = Path(path)
try:
import yamllint.config
import yamllint.linter
except ImportError:
return ''
result = []
with path.open('r', encoding='utf-8') as f:
problems = yamllint.linter.run(f, yamllint.config.YamlLintConfig(json.dumps(YAML_LINT_CONFIG)))
for problem in problems:
result.append(((((((' ' * indent) + str(path)) + ':') + str(problem.line)) + ': ') + problem.message))
return '\n'.join(result) |
class TestStylesheetsCSS(util.PluginTestCase):
def setup_fs(self):
config = self.dedent("\n matrix:\n - name: css\n sources:\n - '{}/**/*.txt'\n aspell:\n lang: en\n d: en_US\n hunspell:\n d: en_US\n pipeline:\n - pyspelling.filters.stylesheets:\n stylesheets: css\n group_comments: true\n ").format(self.tempdir)
self.mktemp('.stylesheets_css.yml', config, 'utf-8')
def test_stylesheets_css(self):
bad_words = ['flga', 'graet']
good_words = ['yes', 'word']
template = self.dedent('\n /*\n {}\n */\n p#id.class, p.other_id.class {{\n color: white;\n }}\n ').format('\n'.join((bad_words + good_words)))
self.mktemp('test.txt', template, 'utf-8')
self.assert_spellcheck('.stylesheets_css.yml', bad_words) |
class Command(BaseCommand):
def handle(self, *args, **kwargs):
update_bnf_table()
client = BQClient('hscic')
table = client.get_table('practices')
table.insert_rows_from_pg(models.Practice, schemas.PRACTICE_SCHEMA)
table = client.get_table('presentation')
table.insert_rows_from_pg(models.Presentation, schemas.PRESENTATION_SCHEMA, transformer=schemas.presentation_transform)
table = client.get_table('practice_statistics')
columns = [field.name for field in schemas.PRACTICE_STATISTICS_SCHEMA]
columns[0] = 'date'
columns[(- 1)] = 'practice_id'
table.insert_rows_from_pg(models.PracticeStatistics, schema=schemas.PRACTICE_STATISTICS_SCHEMA, columns=columns, transformer=schemas.statistics_transform)
sql = 'SELECT MAX(month) FROM {hscic}.practice_statistics_all_years'
results = client.query(sql)
if (results.rows[0][0] is None):
last_uploaded_practice_statistics_date = datetime.date(1900, 1, 1)
else:
last_uploaded_practice_statistics_date = results.rows[0][0].date()
table = client.get_table('practice_statistics_all_years')
sql = "SELECT *\n FROM {hscic}.practice_statistics\n WHERE month > TIMESTAMP('{date}')"
substitutions = {'date': last_uploaded_practice_statistics_date}
table.insert_rows_from_query(sql, write_disposition='WRITE_APPEND', substitutions=substitutions)
table = client.get_table('pcns')
table.insert_rows_from_pg(models.PCN, schemas.PCN_SCHEMA)
table = client.get_table('ccgs')
table.insert_rows_from_pg(models.PCT, schemas.CCG_SCHEMA, transformer=schemas.ccgs_transform)
table = client.get_table('stps')
table.insert_rows_from_pg(models.STP, schemas.STP_SCHEMA)
table = client.get_table('regional_teams')
table.insert_rows_from_pg(models.RegionalTeam, schemas.REGIONAL_TEAM_SCHEMA)
date = models.ImportLog.objects.latest_in_category('prescribing').current_at
table = client.get_table(('prescribing_' + date.strftime('%Y_%m')))
sql = "SELECT * FROM {hscic}.prescribing_v2\n WHERE month = TIMESTAMP('{date}')"
substitutions = {'date': date}
table.insert_rows_from_query(sql, substitutions=substitutions) |
def find_ancestors(starting_resource, full_name):
ancestor_resources = [starting_resource]
resources = utils.get_resources_from_full_name(full_name)
for (resource_type, resource_id) in resources:
if ((resource_type == starting_resource.type) and (resource_id == starting_resource.id)):
continue
new_resource = resource_util.create_resource(resource_id, resource_type)
if new_resource:
ancestor_resources.append(new_resource)
return ancestor_resources |
def test_interference_graph_of_test_loop_test1_without_dead_code(construct_graph_test_loop_1, variable_x, variable_u, variable_v, aliased_variable_y):
(nodes, cfg) = construct_graph_test_loop_1
nodes[1].remove_instruction(Phi(variable_x[3], [variable_x[2], variable_x[4]]))
nodes[1].remove_instruction(Phi(variable_u[2], [variable_u[1], variable_u[3]]))
nodes[3].remove_instruction(Assignment(variable_u[4], Constant(0)))
interference_graph = InterferenceGraph(cfg)
assert (set(interference_graph.nodes) == (((set((variable_x[1:3] + [variable_x[4]])) | {variable_u[1], variable_u[3]}) | set(variable_v[1:4])) | set(aliased_variable_y[1:6])))
assert ((set(interference_graph.neighbors(variable_x[1])) == {aliased_variable_y[1]}) and (set(interference_graph.neighbors(variable_x[2])) == {aliased_variable_y[2]}) and (set(interference_graph.neighbors(variable_x[4])) == {aliased_variable_y[4], variable_v[2]}) and (set(interference_graph.neighbors(variable_u[1])) == {aliased_variable_y[2]}) and (set(interference_graph.neighbors(variable_u[3])) == {aliased_variable_y[4], variable_v[2]}) and (set(interference_graph.neighbors(variable_v[1])) == {aliased_variable_y[3]}) and (set(interference_graph.neighbors(variable_v[2])) == {aliased_variable_y[4], variable_x[4], variable_u[3], aliased_variable_y[5]}) and (set(interference_graph.neighbors(variable_v[3])) == {aliased_variable_y[5]}) and (set(interference_graph.neighbors(aliased_variable_y[1])) == {variable_x[1]}) and (set(interference_graph.neighbors(aliased_variable_y[2])) == {variable_x[2], variable_u[1]}) and (set(interference_graph.neighbors(aliased_variable_y[3])) == {variable_v[1]}) and (set(interference_graph.neighbors(aliased_variable_y[4])) == {variable_x[4], variable_u[3], variable_v[2]}) and (set(interference_graph.neighbors(aliased_variable_y[5])) == {variable_v[2], variable_v[3]})) |
class FinanceForecastEvent(BaseDocType):
id = Keyword()
timestamp = Date()
reportPeriod = Date()
securityId = Keyword()
type = Keyword()
description = Keyword()
preEPS = Float()
changeStart = Float()
change = Float()
class Meta():
index = 'finance_forecast_event'
doc_type = 'doc'
all = MetaField(enabled=False)
dynamic = MetaField('strict') |
def test_too_few_zvals(single_cell_roff_grid):
roff_grid = single_cell_roff_grid
roff_grid.split_enz = np.full(8, fill_value=4, dtype=np.uint8).tobytes()
roff_grid.zvals = np.ones(5, dtype=np.float32)
with pytest.raises(ValueError, match='size of zdata'):
roff_grid.xtgeo_zcorn() |
class OptionSeriesVennClusterZones(Options):
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def from_(self):
return self._config_get(None)
_.setter
def from_(self, num: float):
self._config(num, js_type=False)
def marker(self) -> 'OptionSeriesVennClusterZonesMarker':
return self._config_sub_data('marker', OptionSeriesVennClusterZonesMarker)
def to(self):
return self._config_get(None)
def to(self, num: float):
self._config(num, js_type=False) |
def test_missing_values_param_functionality():
df = {'Name': ['tom', 'nick', 'krish', 'jack'], 'City': ['London', 'Manchester', 'Liverpool', 'Bristol'], 'Age': [20, 21, 19, 18], 'Marks': [0.9, 0.8, 0.7, 0.6], 'dob': pd.date_range('2020-02-24', periods=4, freq='T'), 'const_feat_num': [1, 1, 1, np.nan], 'const_feat_cat': ['a', 'a', 'a', 'a'], 'quasi_feat_num': [1, 1, 1, 2], 'quasi_feat_cat': ['a', 'a', 'a', np.nan]}
df = pd.DataFrame(df)
transformer = DropConstantFeatures(missing_values='raise')
with pytest.raises(ValueError):
transformer.fit(df)
transformer = DropConstantFeatures(missing_values='ignore').fit(df)
constant = ['const_feat_num', 'const_feat_cat', 'quasi_feat_cat']
assert (transformer.features_to_drop_ == constant)
pd.testing.assert_frame_equal(df.drop(constant, axis=1), transformer.transform(df))
transformer = DropConstantFeatures(tol=0.7, missing_values='include').fit(df)
qconstant = ['const_feat_num', 'const_feat_cat', 'quasi_feat_num', 'quasi_feat_cat']
assert (transformer.features_to_drop_ == qconstant)
pd.testing.assert_frame_equal(df.drop(qconstant, axis=1), transformer.transform(df)) |
def main():
parser = argparse.ArgumentParser(description='LiteX SoC on Arty')
builder_args(parser)
soc_sdram_args(parser)
vivado_build_args(parser)
args = parser.parse_args()
cls = MinSoC
soc = cls(**soc_sdram_argdict(args))
builder = Builder(soc, **builder_argdict(args))
builder.build(**vivado_build_argdict(args)) |
def usage():
print('++')
print((((('| ' + sys.argv[0]) + ' Version ') + vernum) + ' |'))
print('| This program is free software; you can redistribute it and/or modify |')
print('| it under the terms of the GNU General Public License as published by |')
print('| the Free Software Foundation; either version 2 of the License, or |')
print('| (at your option) any later version. |')
print('| |')
print('| Author: Garcia Sebastian, |')
print('| Author: Veronica Valeros, vero. |')
print('| www.mateslab.com.ar - Argentina |')
print('++')
print()
print(('\nUsage: %s <options>' % sys.argv[0]))
print('Options:')
print(' -u, --url URL to start crawling.')
print(' -m, --max-amount-to-crawl Max deep to crawl. Using breadth first algorithm')
print(' -w, --write-to-file Save summary of crawling to a text file. Output directory is created automatically.')
print(' -s, --subdomains Also scan subdomains matching with url domain.')
print(' -r, --follow-redirect Do not follow redirect. By default follow redirection at main URL.')
print(" -f, --fetch-files Download there every file detected in 'Files' directory. Overwrite existing content.")
print(" -F, --file-extension Download files specified by comma separated extensions. This option also activates 'fetch-files' option. 'Ex.: -F pdf,xls,doc' ")
print(" -d, --docs-files Download docs files:xls,pdf,doc,docx,txt,odt,gnumeric,csv, etc. This option also activates 'fetch-files' option.")
print(" -E, --exclude-extensions Do not download files that matches with this extensions. Options '-f','-F' or '-d' needed.")
print(' -h, --help Show this help message and exit.')
print(' -V, --version Output version information and exit.')
print(' -v, --verbose Be verbose')
print(' -D, --debug Debug.')
print()
sys.exit(1) |
def base_keyword(name, explicit_prefix=False, require_whitespace=False):
base_kwd = regex_item(((name + '\\b') + ('(?=\\s)' if require_whitespace else '')))
if (explicit_prefix and (name in (reserved_vars + allow_explicit_keyword_vars))):
return combine((Optional(explicit_prefix.suppress()) + base_kwd))
else:
return base_kwd |
class RxMessageBroker(object):
def __init__(self, owner, backend: 'Backend'):
self.owner = owner
self.backend = backend
self.effective_log_level = backend.log_level
self.cond = Condition()
self.rx_connectable = dict()
self.node_io = dict()
self.disconnected = dict()
self.connected_bnd = dict()
self.connected_rx = dict()
self._publishers = []
self.subscribers = []
self.disposables = []
def __getattribute__(self, name):
attr = super(RxMessageBroker, self).__getattribute__(name)
if isinstance(attr, types.MethodType):
attr = thread_safe_wrapper(attr, self.cond)
return attr
def add_rx_objects(self, node_name, node=None, inputs=tuple(), outputs=tuple(), feedthrough=tuple(), state_inputs=tuple(), state_outputs=tuple(), targets=tuple(), node_inputs=tuple(), node_outputs=tuple(), disposable: rx.disposable.CompositeDisposable=None):
if disposable:
self.disposables.append(disposable)
if node:
ns = node.ns
else:
ns = self.node_io[node_name]['node'].ns
tick_address = (ns + '/engine/outputs/tick')
for i in outputs:
if (i['address'] == tick_address):
continue
assert (i['address'] not in self.rx_connectable), ('Non-unique output (%s). All output names must be unique.' % i['address'])
self.rx_connectable[i['address']] = dict(rx=i['msg'], source=i, node_name=node_name, rate=i['rate'])
if (node_name not in self.node_io):
assert (node is not None), 'No reference to Node "%s" was provided, during the first attempt to register it.'
self.node_io[node_name] = dict(node=node, inputs={}, outputs={}, feedthrough={}, state_inputs={}, state_outputs={}, targets={}, node_inputs={}, node_outputs={})
self.disconnected[node_name] = dict(inputs={}, feedthrough={}, state_inputs={}, targets={}, node_inputs={})
self.connected_bnd[node_name] = dict(inputs={}, feedthrough={}, state_inputs={}, targets={}, node_inputs={})
self.connected_rx[node_name] = dict(inputs={}, feedthrough={}, state_inputs={}, targets={}, node_inputs={})
n = dict(inputs={}, outputs={}, feedthrough={}, state_inputs={}, state_outputs={}, targets={}, node_inputs={}, node_outputs={})
for i in inputs:
address = i['address']
cname_address = f"{i['name']}:{address}"
self._assert_already_registered(cname_address, self.node_io[node_name], 'inputs')
self._assert_already_registered(cname_address, n, 'inputs')
n['inputs'][cname_address] = {'rx': i['msg'], 'disposable': None, 'source': i, 'dtype': i['dtype'], 'processor': i['processor'], 'window': i['window'], 'status': 'disconnected'}
n['inputs'][(cname_address + '/reset')] = {'rx': i['reset'], 'disposable': None, 'source': i, 'dtype': 'int64', 'status': 'disconnected'}
for i in outputs:
address = i['address']
cname_address = f"{i['name']}:{address}"
self._assert_already_registered(cname_address, self.node_io[node_name], 'outputs')
self._assert_already_registered(cname_address, n, 'outputs')
n['outputs'][cname_address] = {'rx': i['msg'], 'disposable': None, 'source': i, 'dtype': i['dtype'], 'rate': i['rate'], 'processor': i['processor'], 'status': ''}
n['outputs'][(cname_address + '/reset')] = {'rx': i['reset'], 'disposable': None, 'source': i, 'dtype': 'int64', 'status': ''}
i['msg_pub'] = self.backend.Publisher(i['address'], i['dtype'])
d = i['msg'].subscribe(on_next=i['msg_pub'].publish, on_error=(lambda e: print('Error : {0}'.format(e))))
self.disposables.append(d)
self._publishers.append(i['msg_pub'])
i['reset_pub'] = self.backend.Publisher((i['address'] + '/reset'), n['outputs'][(cname_address + '/reset')]['dtype'])
d = i['reset'].subscribe(on_next=i['reset_pub'].publish, on_error=(lambda e: print('Error : {0}'.format(e))))
self.disposables.append(d)
self._publishers.append(i['reset_pub'])
for i in feedthrough:
address = i['address']
cname_address = f"{i['feedthrough_to']}:{address}"
self._assert_already_registered(cname_address, self.node_io[node_name], 'feedthrough')
self._assert_already_registered(cname_address, n, 'feedthrough')
n['feedthrough'][cname_address] = {'rx': i['msg'], 'disposable': None, 'source': i, 'dtype': i['dtype'], 'processor': i['processor'], 'window': i['window'], 'status': 'disconnected'}
n['feedthrough'][(cname_address + '/reset')] = {'rx': i['reset'], 'disposable': None, 'source': i, 'dtype': 'int64', 'status': 'disconnected'}
for i in state_outputs:
address = i['address']
cname_address = f"{i['name']}:{address}"
self._assert_already_registered(cname_address, self.node_io[node_name], 'state_outputs')
self._assert_already_registered(cname_address, n, 'state_outputs')
n['state_outputs'][cname_address] = {'rx': i['msg'], 'disposable': None, 'source': i, 'dtype': i['dtype'], 'status': ''}
if ('processor' in i):
n['state_outputs'][cname_address]['processor'] = i['processor']
i['msg_pub'] = self.backend.Publisher(i['address'], i['dtype'])
d = i['msg'].subscribe(on_next=i['msg_pub'].publish, on_error=(lambda e: print('Error : {0}'.format(e))))
self.disposables.append(d)
self._publishers.append(i['msg_pub'])
for i in state_inputs:
address = i['address']
try:
cname_address = f"{i['name']}:{address}"
except KeyError:
cname_address = f'done_flag:{address}'
if ('msg' in i):
self._assert_already_registered((cname_address + '/set'), self.node_io[node_name], 'state_inputs')
self._assert_already_registered((cname_address + '/set'), n, 'state_inputs')
n['state_inputs'][(cname_address + '/set')] = {'rx': i['msg'], 'disposable': None, 'source': i, 'dtype': i['dtype'], 'processor': i['processor'], 'status': 'disconnected'}
if ((cname_address + '/done') not in n['state_outputs'].keys()):
self._assert_already_registered((cname_address + '/done'), self.node_io[node_name], 'state_inputs')
self._assert_already_registered((cname_address + '/done'), n, 'state_inputs')
n['state_inputs'][(cname_address + '/done')] = {'rx': i['done'], 'disposable': None, 'source': i, 'dtype': 'bool', 'status': 'disconnected'}
for i in targets:
address = i['address']
cname_address = f"{i['name']}:{address}"
self._assert_already_registered((cname_address + '/set'), self.node_io[node_name], 'targets')
self._assert_already_registered((cname_address + '/set'), n, 'targets')
n['targets'][(cname_address + '/set')] = {'rx': i['msg'], 'disposable': None, 'source': i, 'dtype': i['dtype'], 'processor': i['processor'], 'status': 'disconnected'}
for i in node_inputs:
address = i['address']
cname_address = f"{i['name']}:{address}"
self._assert_already_registered(cname_address, self.node_io[node_name], 'node_inputs')
self._assert_already_registered(cname_address, n, 'node_inputs')
n['node_inputs'][cname_address] = {'rx': i['msg'], 'disposable': None, 'source': i, 'dtype': i['dtype'], 'status': 'disconnected'}
for i in node_outputs:
address = i['address']
cname_address = f"{i['name']}:{address}"
self._assert_already_registered(cname_address, self.node_io[node_name], 'node_outputs')
self._assert_already_registered(cname_address, n, 'node_outputs')
n['node_outputs'][cname_address] = {'rx': i['msg'], 'disposable': None, 'source': i, 'dtype': i['dtype'], 'status': ''}
i['msg_pub'] = self.backend.Publisher(i['address'], i['dtype'])
d = i['msg'].subscribe(on_next=i['msg_pub'].publish, on_error=(lambda e: print('Error : {0}'.format(e))))
self.disposables.append(d)
self._publishers.append(i['msg_pub'])
for key in n.keys():
self.node_io[node_name][key].update(n[key])
for key in ('inputs', 'feedthrough', 'state_inputs', 'targets', 'node_inputs'):
self.disconnected[node_name][key].update(n[key].copy())
def print_io_status(self, node_names=None):
if (node_names is None):
node_names = self.node_io.keys()
elif isinstance(node_names, str):
node_names = [node_names]
for node_name in node_names:
cprint((('OWNER "%s"' % self.owner).ljust(15, ' ') + ('| OVERVIEW NODE "%s" ' % node_name).ljust(180, ' ')), attrs=['bold', 'underline'])
for key in ('inputs', 'feedthrough', 'state_inputs', 'targets', 'node_inputs', 'outputs', 'state_outputs', 'node_outputs'):
if (len(self.node_io[node_name][key]) == 0):
continue
for cname_address in self.node_io[node_name][key].keys():
color = None
if (key in ('outputs', 'node_outputs', 'state_outputs')):
color = 'cyan'
else:
if (cname_address in self.disconnected[node_name][key]):
color = 'red'
if (cname_address in self.connected_rx[node_name][key]):
assert (color is None), f'Duplicate connection status for address ({cname_address}).'
color = 'green'
if (cname_address in self.connected_bnd[node_name][key]):
assert (color is None), f'Duplicate connection status for address ({cname_address}).'
color = 'blue'
assert (color is not None), 'Address (cname_address) not found in self.(disconnected, connected_rx, connected_bnd).'
status = self.node_io[node_name][key][cname_address]['status']
entry = self.node_io[node_name][key][cname_address]
key_str = ('%s' % key).ljust(15, ' ')
address_str = ('| %s ' % cname_address).ljust(50, ' ')
dtype_str = ('| %s ' % entry['dtype']).ljust(10, ' ')
if ('processor' in entry):
processor_str = ('| %s ' % entry['processor'].__class__.__name__).ljust(23, ' ')
else:
processor_str = ('| %s ' % '').ljust(23, ' ')
if ('window' in entry):
window_str = ('| %s ' % entry['window']).ljust(8, ' ')
else:
window_str = ('| %s ' % '').ljust(8, ' ')
if ('rate' in entry):
rate_str = ('|' + ('%s' % entry['rate']).center(3, ' '))
else:
rate_str = ('|' + ''.center(3, ' '))
status_str = ('| %s' % status).ljust(60, ' ')
log_msg = ((((((key_str + rate_str) + address_str) + dtype_str) + processor_str) + window_str) + status_str)
cprint(log_msg, color)
print(' '.center(140, ' '))
def connect_io(self, print_status=True):
if (self.effective_log_level > DEBUG):
print_status = False
for (node_name, node) in self.disconnected.items():
num_disconnected = 0
for (_key, addresses) in node.items():
num_disconnected += len(addresses)
if (num_disconnected == 0):
continue
(print_status and cprint((('OWNER "%s"' % self.owner).ljust(15, ' ') + ('| CONNECTING NODE "%s" ' % node_name).ljust(180, ' ')), attrs=['bold', 'underline']))
for (key, addresses) in node.items():
for cname_address in list(addresses.keys()):
(_, address) = self._split_cname_address(cname_address)
entry = addresses[cname_address]
assert (cname_address not in self.connected_rx[node_name][key]), f'Address ({cname_address}) of this node ({node_name}) already connected via rx.'
assert (cname_address not in self.connected_bnd[node_name][key]), f'Address ({cname_address}) of this node ({node_name}) already connected via backend.'
if (address in self.rx_connectable.keys()):
color = 'green'
status = 'Rx'.ljust(4, ' ')
entry['rate'] = self.rx_connectable[address]['rate']
rate_str = f"|{str(entry['rate']).center(3, ' ')}"
node_str = f"| {self.rx_connectable[address]['node_name'].ljust(40, ' ')}"
dtype_str = f"| {self.rx_connectable[address]['source']['dtype']}".ljust(12, ' ')
processor_str = f"| {self.rx_connectable[address]['source']['processor'].__class__.__name__}".ljust(12, ' ')
status += ((node_str + dtype_str) + processor_str)
self.connected_rx[node_name][key][cname_address] = entry
T = self.rx_connectable[address]['rx']
else:
color = 'blue'
status = f'{self.backend.BACKEND} |'.ljust(5, ' ')
rate_str = ('|' + ''.center(3, ' '))
dtype = entry['dtype']
self.connected_bnd[node_name][key][cname_address] = entry
T = from_topic(self.backend, dtype, address, node_name, self.subscribers)
entry['disposable'] = T.subscribe(entry['rx'])
self.disposables.append(entry['disposable'])
entry['status'] = status
key_str = ('%s' % key).ljust(15, ' ')
address_str = ('| %s ' % cname_address).ljust(50, ' ')
dtype_str = ('| %s ' % entry['dtype']).ljust(10, ' ')
status_str = ('| Connected via %s' % status).ljust(60, ' ')
if ('processor' in entry):
processor_str = ('| %s ' % entry['processor'].__class__.__name__).ljust(23, ' ')
else:
processor_str = ('| %s ' % '').ljust(23, ' ')
if ('window' in entry):
window_str = ('| %s ' % entry['window']).ljust(8, ' ')
else:
window_str = ('| %s ' % '').ljust(8, ' ')
log_msg = ((((((key_str + rate_str) + address_str) + dtype_str) + processor_str) + window_str) + status_str)
(print_status and cprint(log_msg, color))
addresses.pop(cname_address)
(print_status and print(''.center(140, ' ')))
def _split_cname_address(self, cname_address):
res = cname_address.split(':')
if (len(res) == 2):
(cname, address) = res
else:
(cname, address) = (None, res[0])
return (cname, address)
def _assert_already_registered(self, name, d, component):
assert (name not in d[component]), f'Cannot re-register the same address ({name}) twice as "{component}".'
def shutdown(self):
self.backend.logdebug(f'[{self.owner}] RxMessageBroker.shutdown() called.')
[d.dispose() for d in self.disposables]
[pub.unregister() for pub in self._publishers]
[sub.unregister() for sub in self.subscribers] |
('value,expected', [param('abc', Key(key_or_group='abc'), id='abc'), param('abc/cde', Key(key_or_group='abc/cde'), id='abc/cde'), param('abc.cde', Key(key_or_group='abc.cde'), id='abc.cde'), param('ab-c/d-ef', Key(key_or_group='ab-c/d-ef'), id='ab-c/d-ef'), param('ab-c.d-ef', Key(key_or_group='ab-c.d-ef'), id='ab-c.d-ef'), param('$foo', Key(key_or_group='$foo'), id='dollar'), param('$foo.bar$.x$z', Key(key_or_group='$foo.bar$.x$z'), id='dollar_dotpath'), param('list.0', Key(key_or_group='list.0'), id='list.0'), param('package_or_', Key(key_or_group='package_or_group', package='pkg1'), id='package_or_'), param('package_or_', Key(key_or_group='package_or_group', package=''), id='package_or_'), param('package_or_$pkg1', Key(key_or_group='package_or_group', package='$pkg1'), id='package_dollar')])
def test_key(value: str, expected: Any) -> None:
ret = parse_rule(value, 'key')
assert (ret == expected) |
class TestSearchIndex(unittest.TestCase):
def test_jaccard(self):
sets = [[1, 2, 3], [3, 4, 5], [2, 3, 4], [5, 6, 7]]
index = SearchIndex(sets, similarity_func_name='jaccard', similarity_threshold=0.1)
results = index.query([3, 5, 4])
correct_results = set([(1, 1.0), (0, 0.2), (2, 0.5), (3, 0.2)])
self.assertEqual(set(results), correct_results)
def test_query_unique(self):
sets = [[1, 2, 3], [3, 4, 5]]
index = SearchIndex(sets, similarity_func_name='jaccard', similarity_threshold=0.1)
results = index.query([3, 4, 8])
correct_results = set([(0, (1 / 5)), (1, (2 / 4))])
self.assertEqual(set(results), correct_results)
def test_containment(self):
sets = [[1, 2, 3], [3, 4, 5], [2, 3, 4], [5, 6, 7]]
index = SearchIndex(sets, similarity_func_name='containment', similarity_threshold=0.1)
results = index.query([3, 5, 4])
correct_results = set([(1, 1.0), (0, (1.0 / 3.0)), (2, (2.0 / 3.0)), (3, (1.0 / 3.0))])
self.assertEqual(set(results), correct_results)
index = SearchIndex(sets, similarity_func_name='containment', similarity_threshold=0.5)
results = index.query([3, 5, 4])
correct_results = set([(1, 1.0), (2, (2.0 / 3.0))])
self.assertEqual(set(results), correct_results) |
class TestDatetime(unittest.TestCase):
def test_default(self):
obj = HasDatetimeTraits()
self.assertEqual(obj.simple_datetime, None)
self.assertEqual(obj.epoch, UNIX_EPOCH)
self.assertEqual(obj.alternative_epoch, NT_EPOCH)
def test_assign_datetime(self):
test_datetime = datetime.datetime(1975, 2, 13)
obj = HasDatetimeTraits()
obj.simple_datetime = test_datetime
self.assertEqual(obj.simple_datetime, test_datetime)
def test_assign_non_datetime(self):
obj = HasDatetimeTraits()
with self.assertRaises(TraitError) as exception_context:
obj.simple_datetime = '2021-02-05 12:00:00'
message = str(exception_context.exception)
self.assertIn('must be a datetime, but', message)
def test_assign_date(self):
obj = HasDatetimeTraits()
with self.assertRaises(TraitError) as exception_context:
obj.simple_datetime = datetime.date(1975, 2, 13)
message = str(exception_context.exception)
self.assertIn('must be a datetime, but', message)
self.assertIsNone(obj.simple_datetime)
def test_assign_none_with_allow_none_not_given(self):
obj = HasDatetimeTraits(simple_datetime=UNIX_EPOCH)
with self.assertRaises(TraitError) as exception_context:
obj.simple_datetime = None
self.assertEqual(obj.simple_datetime, UNIX_EPOCH)
message = str(exception_context.exception)
self.assertIn('must be a datetime, but', message)
def test_assign_none_with_allow_none_false(self):
obj = HasDatetimeTraits(none_prohibited=UNIX_EPOCH)
with self.assertRaises(TraitError) as exception_context:
obj.none_prohibited = None
self.assertEqual(obj.none_prohibited, UNIX_EPOCH)
message = str(exception_context.exception)
self.assertIn('must be a datetime, but', message)
def test_assign_none_with_allow_none_true(self):
obj = HasDatetimeTraits(none_allowed=UNIX_EPOCH)
self.assertIsNotNone(obj.none_allowed)
obj.none_allowed = None
self.assertIsNone(obj.none_allowed)
_traitsui
def test_get_editor(self):
obj = HasDatetimeTraits()
trait = obj.base_trait('epoch')
editor_factory = trait.get_editor()
self.assertIsInstance(editor_factory, traitsui.api.DatetimeEditor) |
class TestAndroidApp(BaseProjectManagementTest):
_GET_METADATA_URL = '
_SET_DISPLAY_NAME_URL = '
_GET_CONFIG_URL = '
_ADD_CERT_URL = '
_LIST_CERTS_URL = '
_DELETE_SHA_1_CERT_URL = '
_DELETE_SHA_256_CERT_URL = '
def android_app(self):
return project_management.android_app('1::android:deadbeef')
def test_get_metadata_no_display_name(self, android_app):
recorder = self._instrument_service(statuses=[200], responses=[ANDROID_APP_NO_DISPLAY_NAME_METADATA_RESPONSE])
metadata = android_app.get_metadata()
assert (metadata._name == 'projects/test-project-id/androidApps/1::android:deadbeef')
assert (metadata.app_id == '1::android:deadbeef')
assert (metadata.display_name is None)
assert (metadata.project_id == 'test-project-id')
assert (metadata.package_name == 'com.hello.world.android')
assert (len(recorder) == 1)
self._assert_request_is_correct(recorder[0], 'GET', TestAndroidApp._GET_METADATA_URL)
def test_get_metadata(self, android_app):
recorder = self._instrument_service(statuses=[200], responses=[ANDROID_APP_METADATA_RESPONSE])
metadata = android_app.get_metadata()
assert (metadata._name == 'projects/test-project-id/androidApps/1::android:deadbeef')
assert (metadata.app_id == '1::android:deadbeef')
assert (metadata.display_name == 'My Android App')
assert (metadata.project_id == 'test-project-id')
assert (metadata.package_name == 'com.hello.world.android')
assert (len(recorder) == 1)
self._assert_request_is_correct(recorder[0], 'GET', TestAndroidApp._GET_METADATA_URL)
def test_get_metadata_unknown_error(self, android_app):
recorder = self._instrument_service(statuses=[428], responses=['precondition required error'])
with pytest.raises(exceptions.UnknownError) as excinfo:
android_app.get_metadata()
message = 'Unexpected HTTP response with status: 428; body: precondition required error'
assert (str(excinfo.value) == message)
assert (excinfo.value.cause is not None)
assert (excinfo.value. is not None)
assert (len(recorder) == 1)
def test_get_metadata_not_found(self, android_app):
recorder = self._instrument_service(statuses=[404], responses=[NOT_FOUND_RESPONSE])
with pytest.raises(exceptions.NotFoundError) as excinfo:
android_app.get_metadata()
assert ('Failed to find the resource' in str(excinfo.value))
assert (excinfo.value.cause is not None)
assert (excinfo.value. is not None)
assert (len(recorder) == 1)
def test_set_display_name(self, android_app):
recorder = self._instrument_service(statuses=[200], responses=[json.dumps({})])
new_display_name = 'A new display name!'
android_app.set_display_name(new_display_name)
assert (len(recorder) == 1)
body = {'displayName': new_display_name}
self._assert_request_is_correct(recorder[0], 'PATCH', TestAndroidApp._SET_DISPLAY_NAME_URL, body)
def test_set_display_name_not_found(self, android_app):
recorder = self._instrument_service(statuses=[404], responses=[NOT_FOUND_RESPONSE])
new_display_name = 'A new display name!'
with pytest.raises(exceptions.NotFoundError) as excinfo:
android_app.set_display_name(new_display_name)
assert ('Failed to find the resource' in str(excinfo.value))
assert (excinfo.value.cause is not None)
assert (excinfo.value. is not None)
assert (len(recorder) == 1)
def test_get_config(self, android_app):
recorder = self._instrument_service(statuses=[200], responses=[TEST_APP_CONFIG_RESPONSE])
config = android_app.get_config()
assert (config == 'hello world')
assert (len(recorder) == 1)
self._assert_request_is_correct(recorder[0], 'GET', TestAndroidApp._GET_CONFIG_URL)
def test_get_config_not_found(self, android_app):
recorder = self._instrument_service(statuses=[404], responses=[NOT_FOUND_RESPONSE])
with pytest.raises(exceptions.NotFoundError) as excinfo:
android_app.get_config()
assert ('Failed to find the resource' in str(excinfo.value))
assert (excinfo.value.cause is not None)
assert (excinfo.value. is not None)
assert (len(recorder) == 1)
def test_get_sha_certificates(self, android_app):
recorder = self._instrument_service(statuses=[200], responses=[GET_SHA_CERTIFICATES_RESPONSE])
certs = android_app.get_sha_certificates()
assert (set(certs) == set([SHA_1_CERTIFICATE, SHA_256_CERTIFICATE]))
assert (len(recorder) == 1)
self._assert_request_is_correct(recorder[0], 'GET', TestAndroidApp._LIST_CERTS_URL)
def test_get_sha_certificates_not_found(self, android_app):
recorder = self._instrument_service(statuses=[404], responses=[NOT_FOUND_RESPONSE])
with pytest.raises(exceptions.NotFoundError) as excinfo:
android_app.get_sha_certificates()
assert ('Failed to find the resource' in str(excinfo.value))
assert (excinfo.value.cause is not None)
assert (excinfo.value. is not None)
assert (len(recorder) == 1)
def test_add_certificate_none_error(self, android_app):
with pytest.raises(ValueError):
android_app.add_sha_certificate(None)
def test_add_sha_1_certificate(self, android_app):
recorder = self._instrument_service(statuses=[200], responses=[json.dumps({})])
android_app.add_sha_certificate(project_management.SHACertificate('aaaa'))
assert (len(recorder) == 1)
body = {'shaHash': 'aaaa', 'certType': 'SHA_1'}
self._assert_request_is_correct(recorder[0], 'POST', TestAndroidApp._ADD_CERT_URL, body)
def test_add_sha_256_certificate(self, android_app):
recorder = self._instrument_service(statuses=[200], responses=[json.dumps({})])
android_app.add_sha_certificate(project_management.SHACertificate('aaaaaa1234'))
assert (len(recorder) == 1)
body = {'shaHash': 'aaaaaa1234', 'certType': 'SHA_256'}
self._assert_request_is_correct(recorder[0], 'POST', TestAndroidApp._ADD_CERT_URL, body)
def test_add_sha_certificates_already_exists(self, android_app):
recorder = self._instrument_service(statuses=[409], responses=[ALREADY_EXISTS_RESPONSE])
with pytest.raises(exceptions.AlreadyExistsError) as excinfo:
android_app.add_sha_certificate(project_management.SHACertificate('aaaa'))
assert ('The resource already exists' in str(excinfo.value))
assert (excinfo.value.cause is not None)
assert (excinfo.value. is not None)
assert (len(recorder) == 1)
def test_delete_certificate_none_error(self, android_app):
with pytest.raises(ValueError):
android_app.delete_sha_certificate(None)
def test_delete_sha_1_certificate(self, android_app):
recorder = self._instrument_service(statuses=[200], responses=[json.dumps({})])
android_app.delete_sha_certificate(SHA_1_CERTIFICATE)
assert (len(recorder) == 1)
self._assert_request_is_correct(recorder[0], 'DELETE', TestAndroidApp._DELETE_SHA_1_CERT_URL)
def test_delete_sha_256_certificate(self, android_app):
recorder = self._instrument_service(statuses=[200], responses=[json.dumps({})])
android_app.delete_sha_certificate(SHA_256_CERTIFICATE)
assert (len(recorder) == 1)
self._assert_request_is_correct(recorder[0], 'DELETE', TestAndroidApp._DELETE_SHA_256_CERT_URL)
def test_delete_sha_certificates_not_found(self, android_app):
recorder = self._instrument_service(statuses=[404], responses=[NOT_FOUND_RESPONSE])
with pytest.raises(exceptions.NotFoundError) as excinfo:
android_app.delete_sha_certificate(SHA_1_CERTIFICATE)
assert ('Failed to find the resource' in str(excinfo.value))
assert (excinfo.value.cause is not None)
assert (excinfo.value. is not None)
assert (len(recorder) == 1)
def test_raises_if_app_has_no_project_id(self):
def evaluate():
app = firebase_admin.initialize_app(testutils.MockCredential(), name='no_project_id')
with pytest.raises(ValueError):
project_management.android_app(app_id='1::android:deadbeef', app=app)
testutils.run_without_project_id(evaluate) |
class HTTPProxyTestCase(AioHTTPTestCase):
def setUp(self):
super().setUp()
self.endpoint = '/dns'
self.dnsq = dns.message.make_query(qname='foo.example.com', rdtype='A')
self.dnsq.id = 0
def get_args(self):
return ['--listen-port', '0', '--level', 'DEBUG', '--listen-address', '127.0.0.1', '--uri', '/dns', '--trusted']
async def get_application(self):
(parser, args) =
return |
def get_bert_feature(text, word2ph, device=config.bert_gen_config.device):
if ((sys.platform == 'darwin') and torch.backends.mps.is_available() and (device == 'cpu')):
device = 'mps'
if (not device):
device = 'cuda'
if (device not in models.keys()):
models[device] = DebertaV2Model.from_pretrained(LOCAL_PATH).to(device)
with torch.no_grad():
inputs = tokenizer(text, return_tensors='pt')
for i in inputs:
inputs[i] = inputs[i].to(device)
res = models[device](**inputs, output_hidden_states=True)
res = torch.cat(res['hidden_states'][(- 3):(- 2)], (- 1))[0].cpu()
word2phone = word2ph
phone_level_feature = []
for i in range(len(word2phone)):
repeat_feature = res[i].repeat(word2phone[i], 1)
phone_level_feature.append(repeat_feature)
phone_level_feature = torch.cat(phone_level_feature, dim=0)
return phone_level_feature.T |
class TimeMonitor(Monitor, ABC):
start: pydantic.NonNegativeFloat = pydantic.Field(0.0, title='Start Time', description='Time at which to start monitor recording.', units=SECOND)
stop: pydantic.NonNegativeFloat = pydantic.Field(None, title='Stop Time', description='Time at which to stop monitor recording. If not specified, record until end of simulation.', units=SECOND)
interval: pydantic.PositiveInt = pydantic.Field(None, title='Time Interval', description='Sampling rate of the monitor: number of time steps between each measurement. Set ``inverval`` to 1 for the highest possible resolution in time. Higher integer values downsample the data by measuring every ``interval`` time steps. This can be useful for reducing data storage as needed by the application.')
('interval', always=True)
def _warn_interval_default(cls, val, values):
if (val is None):
start = values.get('start')
stop = values.get('stop')
if ((start == 0.0) and (stop is None)):
log.warning("The monitor 'interval' field was left as its default value, which will set it to 1 internally. A value of 1 means that the data will be sampled at every time step, which may potentially produce more data than desired, depending on the use case. To reduce data storage, one may downsample the data by setting 'interval > 1' or by choosing alternative 'start' and 'stop' values for the time sampling. If you intended to use the highest resolution time sampling, you may suppress this warning by explicitly setting 'interval=1' in the monitor.")
val = 1
return val
('stop', always=True, allow_reuse=True)
def stop_greater_than_start(cls, val, values):
start = values.get('start')
if (val and (val < start)):
raise SetupError('Monitor start time is greater than stop time.')
return val
def time_inds(self, tmesh: ArrayFloat1D) -> Tuple[(int, int)]:
tmesh = np.array(tmesh)
(tind_beg, tind_end) = (0, 0)
if (tmesh.size == 0):
return (tind_beg, tind_end)
t_stop = self.stop
if (t_stop is None):
tind_end = int(tmesh.size)
t_stop = tmesh[(- 1)]
else:
tend = np.nonzero((tmesh <= t_stop))[0]
if (tend.size > 0):
tind_end = int((tend[(- 1)] + 1))
dt = (1e-20 if (np.array(tmesh).size < 2) else (tmesh[1] - tmesh[0]))
if ((np.abs((self.start - t_stop)) < dt) and (self.start <= tmesh[(- 1)])):
tind_beg = max((tind_end - 1), 0)
else:
tbeg = np.nonzero((tmesh[:tind_end] >= self.start))[0]
tind_beg = (tbeg[0] if (tbeg.size > 0) else tind_end)
return (tind_beg, tind_end)
def num_steps(self, tmesh: ArrayFloat1D) -> int:
(tind_beg, tind_end) = self.time_inds(tmesh)
return int(((tind_end - tind_beg) / self.interval)) |
class PopupControl(HasPrivateTraits):
control = Instance(wx.Window)
width = Int()
height = Int()
resizable = Bool(False)
value = Any()
closed = Event()
popup = Instance(wx.Window)
def __init__(self, **traits):
super().__init__(**traits)
style = wx.SIMPLE_BORDER
if self.resizable:
style = wx.RESIZE_BORDER
self.popup = popup = wx.Frame(None, (- 1), '', style=style)
popup.Bind(wx.EVT_ACTIVATE, self._on_close_popup)
self.create_control(popup)
self._position_control()
popup.Show()
def create_control(self):
raise NotImplementedError
def dispose(self):
pass
def _value_changed(self, value):
do_later(self._close_popup)
def _position_control(self):
(px, cy) = self.control.ClientToScreen(0, 0)
(cdx, cdy) = self.control.GetSize()
(pdx, pdy) = self.popup.GetSize()
(pdx, pdy) = (max(pdx, cdx, self.width), max(pdy, self.height))
py = (cy + cdy)
if ((py + pdy) > SystemMetrics().screen_height):
if ((cy - pdy) < 0):
bottom = (SystemMetrics().screen_height - py)
if (cy > bottom):
(py, pdy) = (0, cy)
else:
pdy = bottom
else:
py = (cy - pdy)
self.popup.SetSize(px, py, pdx, pdy)
def _on_close_popup(self, event):
if (not event.GetActive()):
self._close_popup()
def _close_popup(self):
self.popup.Unbind(wx.EVT_ACTIVATE)
self.dispose()
self.closed = True
self.popup.Destroy()
self.popup = self.control = None |
class DEP001MissingDependencyViolation(Violation):
error_code: ClassVar[str] = 'DEP001'
error_template: ClassVar[str] = "'{name}' imported but missing from the dependency definitions"
issue: Module
def get_error_message(self) -> str:
return self.error_template.format(name=self.issue.name) |
def test_bad_init_error(utils):
bad0 = {}
bad1 = {'elements_in': []}
bad2 = {'elements_in': None}
good = {'elements_in': ['selname']}
with pytest.raises(InvalidAnalyserConfigError, match="must contain an 'elements_in' indicating the analyser's input"):
no_elements_in = ErrorThrowingAnalyser(bad0, 'stub', LocalStorage(folder=utils.TEMP_ELEMENT_DIR))
with pytest.raises(InvalidAnalyserConfigError, match="The 'elements_in' must be a list containing at least one string"):
empty_elements_in = ErrorThrowingAnalyser(bad1, 'stub', LocalStorage(folder=utils.TEMP_ELEMENT_DIR))
with pytest.raises(InvalidAnalyserConfigError, match="The 'elements_in' must be a list containing at least one string"):
empty_elements_in = ErrorThrowingAnalyser(bad2, 'stub', LocalStorage(folder=utils.TEMP_ELEMENT_DIR))
with pytest.raises(InvalidAnalyserConfigError, match='You must provide a name for your analyser'):
badan2 = ErrorThrowingAnalyser(good, '', LocalStorage(folder=utils.TEMP_ELEMENT_DIR)) |
class TestParseVersion(TestFunction, unittest.TestCase):
cases = [(['1'], ((1, 0, 0), None, 0)), (['1.0'], ((1, 0, 0), None, 0)), (['1.0.0'], ((1, 0, 0), None, 0)), (['1.0.1'], ((1, 0, 1), None, 0)), (['1.2.3'], ((1, 2, 3), None, 0)), (['1.2.3r'], ((1, 2, 3), 'r', 0)), (['1.2.3r1'], ((1, 2, 3), 'r', 1)), (['1.2.3r12'], ((1, 2, 3), 'r', 12)), (['1.2r1'], ((1, 2, 0), 'r', 1)), (['1r1'], ((1, 0, 0), 'r', 1))]
def test_parse_version(self):
self.run_test(tutils.parse_version) |
.parametrize('arr,arr_type', [(numpy.zeros(0, dtype=numpy.float32), Floats1d), (numpy.zeros((0, 0), dtype=numpy.float32), Floats2d), (numpy.zeros((0, 0, 0), dtype=numpy.float32), Floats3d), (numpy.zeros((0, 0, 0, 0), dtype=numpy.float32), Floats4d), (numpy.zeros(0, dtype=numpy.int32), Ints1d), (numpy.zeros((0, 0), dtype=numpy.int32), Ints2d), (numpy.zeros((0, 0, 0), dtype=numpy.int32), Ints3d), (numpy.zeros((0, 0, 0, 0), dtype=numpy.int32), Ints4d)])
def test_array_validation_valid(arr, arr_type):
test_model = create_model('TestModel', arr=(arr_type, ...))
result = test_model(arr=arr)
assert numpy.array_equal(arr, result.arr) |
class OptionSeriesPictorialDataDatalabelsTextpath(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
class OptionPlotoptionsCylinderSonificationDefaultspeechoptionsMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('undefined')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('undefined')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('undefined')
def min(self, text: str):
self._config(text, js_type=False)
def within(self):
return self._config_get('undefined')
def within(self, text: str):
self._config(text, js_type=False) |
class OptionPlotoptionsBubbleSonificationContexttracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionPlotoptionsArcdiagramSonificationDefaultinstrumentoptionsMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionPlotoptionsVectorSonificationTracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class SkeletonDetector(object):
def __init__(self, model='cmu', image_size='432x368'):
assert (model in ['mobilenet_thin', 'cmu'])
(self._w, self._h) = _get_input_img_size_from_string(image_size)
self._model = model
self._resize_out_ratio = 4.0
self._config = _set_config()
self._tf_pose_estimator = TfPoseEstimator(get_graph_path(self._model), target_size=(self._w, self._h), tf_config=self._config)
self._prev_t = time.time()
self._cnt_image = 0
self._logger = _set_logger()
def detect(self, image):
self._cnt_image += 1
if (self._cnt_image == 1):
self._image_h = image.shape[0]
self._image_w = image.shape[1]
self._scale_h = ((1.0 * self._image_h) / self._image_w)
t = time.time()
humans = self._tf_pose_estimator.inference(image, resize_to_default=((self._w > 0) and (self._h > 0)), upsample_size=self._resize_out_ratio)
elapsed = (time.time() - t)
self._logger.info(('inference image in %.4f seconds.' % elapsed))
return humans
def draw(self, img_disp, humans):
img_disp = TfPoseEstimator.draw_humans(img_disp, humans, imgcopy=False)
if IS_DRAW_FPS:
cv2.putText(img_disp, 'fps = {:.1f}'.format((1.0 / (time.time() - self._prev_t))), (10, 30), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2)
self._prev_t = time.time()
def humans_to_skels_list(self, humans, scale_h=None):
if (scale_h is None):
scale_h = self._scale_h
skeletons = []
NaN = 0
for human in humans:
skeleton = ([NaN] * (18 * 2))
for (i, body_part) in human.body_parts.items():
idx = body_part.part_idx
skeleton[(2 * idx)] = body_part.x
skeleton[((2 * idx) + 1)] = (body_part.y * scale_h)
skeletons.append(skeleton)
return (skeletons, scale_h) |
class RelationshipMutualAuthentication(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'mutual_authentication': (RelationshipMutualAuthenticationMutualAuthentication,)}
_property
def discriminator():
return None
attribute_map = {'mutual_authentication': 'mutual_authentication'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def test_hover_parameter_eqnospace():
string = write_rpc_request(1, 'initialize', {'rootPath': str(test_dir)})
file_path = ((test_dir / 'hover') / 'parameters.f90')
string += hover_req(file_path, 11, 28)
(errcode, results) = run_request(string, fortls_args=['--sort_keywords'])
assert (errcode == 0)
ref_results = ['```fortran90\nINTEGER, PARAMETER :: var_no_space = 123\n```']
validate_hover(results, ref_results) |
.parametrize('request_1__group_by', ['a'])
.parametrize('request_2__group_by', ['b'])
def test_version_user_post(dashboard_user, request_1, request_2, endpoint):
response = dashboard_user.post('dashboard/api/version_user/{0}'.format(endpoint.id), json={'data': {'users': ['a', 'b'], 'versions': [request_1.version_requested, request_2.version_requested]}})
assert (response.status_code == 200)
data = response.json
assert (data['data'] == [[request_1.duration, 0], [0, request_2.duration]])
for (index, request) in enumerate([request_1, request_2]):
assert (data['versions'][index] == {'date': request.time_requested.strftime('%a, %d %b %Y %H:%M:%S GMT'), 'version': request.version_requested}) |
def viz_synth_sample(gt_mel, gt_pitch, predict_mel, predict_mel_len, gt_mel_len, vocoder, return_image=False):
predict_mel_len = predict_mel_len.item()
gt_mel_len = gt_mel_len.item()
pitch = (gt_pitch[:gt_mel_len] if (gt_pitch is not None) else None)
mel_target = gt_mel[:gt_mel_len].float().detach().T
mel_prediction = predict_mel[:predict_mel_len].float().detach().T
mels = [mel_prediction.cpu().numpy(), mel_target.cpu().numpy()]
titles = ['Sampled Spectrogram', 'Ground-Truth Spectrogram']
if (mel_prediction.shape == mel_target.shape):
mels.append((mel_prediction - mel_target).abs().cpu().numpy())
titles.append('Difference')
fig_mels = plot_mel(mels, titles)
wav_reconstruction = vocoder.spec2wav(mel_target, pitch)
wav_prediction = vocoder.spec2wav(mel_prediction, pitch)
try:
wav_reconstruction = loudness_norm(wav_reconstruction.cpu().float().numpy(), 44100, block_size=0.1)
except:
wav_reconstruction = wav_reconstruction.cpu().float().numpy()
try:
wav_prediction = loudness_norm(wav_prediction.cpu().float().numpy(), 44100, block_size=0.1)
except:
wav_prediction = wav_prediction.cpu().float().numpy()
wav_reconstruction = torch.from_numpy(wav_reconstruction)
wav_prediction = torch.from_numpy(wav_prediction)
if return_image:
f = io.BytesIO()
fig_mels.savefig(f, format='raw')
image_mels = np.reshape(np.frombuffer(f.getvalue(), dtype=np.uint8), newshape=(int(fig_mels.bbox.bounds[3]), int(fig_mels.bbox.bounds[2]), (- 1)))
plt.close(fig_mels)
else:
image_mels = fig_mels
return (image_mels, wav_reconstruction, wav_prediction) |
def test_reject_recursive_repeats_multithreaded():
_recursive_repeats
def recurse(sleep_now):
time.sleep(sleep_now)
try:
recurse(0.05)
return True
except ValueError:
return False
thd1 = spawn(recurse, 0)
thd2 = spawn(recurse, 0.02)
assert (thd2.get() and thd1.get()) |
def test_aliased_problems_10():
(x0, x1, x2, x3) = [Variable('x', Integer.int32_t(), i) for i in range(4)]
x0.is_aliased = True
(y0, y1, y2) = [Variable('y', Integer.int32_t(), i, is_aliased=True) for i in range(3)]
cfg = ControlFlowGraph()
cfg.add_nodes_from([(start := BasicBlock(0, instructions=[Assignment(ListOperation([]), Call(imp_function_symbol('scanf'), [UnaryOperation(OperationType.address, [y0.copy()])])), Assignment(y1.copy(), y0.copy())])), (loop_body := BasicBlock(1, instructions=[Phi(y2.copy(), [y1.copy(), x0.copy()]), Assignment(ListOperation([]), Call(imp_function_symbol('scanf'), [UnaryOperation(OperationType.address, [x0.copy()])])), Assignment(x2.copy(), x1.copy()), Branch(Condition(OperationType.greater, [x2.copy(), Constant(20)]))])), (end := BasicBlock(2, instructions=[Return([x2.copy()])]))])
cfg.add_edges_from([UnconditionalEdge(start, loop_body), TrueCase(loop_body, end), FalseCase(loop_body, loop_body)])
IdentityElimination().run(DecompilerTask('test', cfg))
assert (start.instructions == [Assignment(ListOperation([]), Call(imp_function_symbol('scanf'), [UnaryOperation(OperationType.address, [y0.copy()])]))])
assert (loop_body.instructions == [Phi(y2.copy(), [y0.copy(), x0.copy()]), Assignment(ListOperation([]), Call(imp_function_symbol('scanf'), [UnaryOperation(OperationType.address, [x0.copy()])])), Branch(Condition(OperationType.greater, [x1.copy(), Constant(20)]))])
assert (end.instructions == [Return([x1.copy()])]) |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = None
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'alertemail_setting': {'required': False, 'type': 'dict', 'default': None, 'options': {}}}
for attribute_name in module_spec['options']:
fields['alertemail_setting']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['alertemail_setting']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=False)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'alertemail_setting')
(is_error, has_changed, result, diff) = fortios_alertemail(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
('foremast.datapipeline.datapipeline.boto3.Session.client')
('foremast.datapipeline.datapipeline.get_details')
('foremast.datapipeline.datapipeline.get_properties')
def test_good_set_pipeline_definition(mock_get_properties, mock_get_details, mock_boto3):
generated = {'project': 'test'}
properties = copy.deepcopy(TEST_PROPERTIES)
mock_get_details.return_value.data = generated
mock_get_properties.return_value = properties
good_dp = AWSDataPipeline(app='test_app', env='test_env', region='us-east-1', prop_path='other')
good_dp.pipeline_id = '1'
assert good_dp.set_pipeline_definition() |
class OptionSeriesTreemapSonificationTracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def _circlify_level(data, target_enclosure, fields, level=1):
all_circles = []
if (not data):
return all_circles
circles = _handle(data, 1, fields)
packed = pack_A1_0([circle.r for circle in circles])
enclosure = enclose(packed)
assert (enclosure is not None)
for (circle, inner_circle) in zip(circles, packed):
circle.level = level
circle.circle = scale(inner_circle, target_enclosure, enclosure)
if (circle.ex and (fields.children in circle.ex)):
all_circles += _circlify_level(circle.ex[fields.children], circle.circle, fields, (level + 1))
elif __debug__:
for key in circle.ex:
if (key not in [fields.id, fields.datum, fields.children]):
log.warning("unexpected key '%s' in input is ignored", key)
all_circles.append(circle)
return all_circles |
def create_index(index_name=None):
index_name = (index_name or CASE_INDEX)
body = {}
aliases = {}
body.update({'settings': ANALYZER_SETTING})
if (index_name in INDEX_DICT.keys()):
delete_index(index_name)
(mapping, alias1, alias2) = INDEX_DICT.get(index_name)[:3]
body.update({'mappings': mapping})
if (alias1 and alias2):
aliases.update({alias1: {}})
aliases.update({alias2: {}})
body.update({'aliases': aliases})
es_client = create_es_client()
logger.info(" Creating index '{0}'...".format(index_name))
es_client.indices.create(index=index_name, body=body)
logger.info(" The index '{0}' is created successfully.".format(index_name))
if (alias1 and alias2):
logger.info(" The aliases for index '{0}': {1}, {2} are created successfully.".format(index_name, alias1, alias2))
else:
logger.error(" Invalid index '{0}'.".format(index_name)) |
class OptionSeriesNetworkgraphMarkerStates(Options):
def hover(self) -> 'OptionSeriesNetworkgraphMarkerStatesHover':
return self._config_sub_data('hover', OptionSeriesNetworkgraphMarkerStatesHover)
def inactive(self) -> 'OptionSeriesNetworkgraphMarkerStatesInactive':
return self._config_sub_data('inactive', OptionSeriesNetworkgraphMarkerStatesInactive)
def normal(self) -> 'OptionSeriesNetworkgraphMarkerStatesNormal':
return self._config_sub_data('normal', OptionSeriesNetworkgraphMarkerStatesNormal)
def select(self) -> 'OptionSeriesNetworkgraphMarkerStatesSelect':
return self._config_sub_data('select', OptionSeriesNetworkgraphMarkerStatesSelect) |
class EsRaceStore(RaceStore):
INDEX_PREFIX = 'rally-races-'
def __init__(self, cfg, client_factory_class=EsClientFactory, index_template_provider_class=IndexTemplateProvider):
super().__init__(cfg)
self.client = client_factory_class(cfg).create()
self.index_template_provider = index_template_provider_class(cfg)
def store_race(self, race):
doc = race.as_dict()
self.client.put_template('rally-races', self.index_template_provider.races_template())
self.client.index(index=self.index_name(race), item=doc, id=race.race_id)
def index_name(self, race):
race_timestamp = race.race_timestamp
return f'{EsRaceStore.INDEX_PREFIX}{race_timestamp:%Y-%m}'
def add_annotation(self):
def _at_midnight(race_timestamp):
TIMESTAMP_FMT = '%Y%m%dT%H%M%SZ'
date = datetime.datetime.strptime(race_timestamp, TIMESTAMP_FMT)
date = date.replace(hour=0, minute=0, second=0, tzinfo=datetime.timezone.utc)
return date.strftime(TIMESTAMP_FMT)
environment = self.environment_name
race_timestamp = _at_midnight(self._race_timestamp())
track = self._track()
chart_type = self._chart_type()
chart_name = self._chart_name()
message = self._message()
annotation_id = str(uuid.uuid4())
dry_run = self._dry_run()
if dry_run:
console.println(f'Would add annotation with message [{message}] for environment=[{environment}], race timestamp=[{race_timestamp}], track=[{track}], chart type=[{chart_type}], chart name=[{chart_name}]')
else:
if (not self.client.exists(index='rally-annotations')):
self.client.put_template('rally-annotations', self.index_template_provider.annotations_template())
self.client.create_index(index='rally-annotations')
self.client.index(index='rally-annotations', id=annotation_id, item={'environment': environment, 'race-timestamp': race_timestamp, 'track': track, 'chart': chart_type, 'chart-name': chart_name, 'message': message})
console.println(f'Successfully added annotation [{annotation_id}].')
def list_annotations(self):
environment = self.environment_name
track = self._track()
from_date = self._from_date()
to_date = self._to_date()
query = {'query': {'bool': {'filter': [{'term': {'environment': environment}}, {'range': {'race-timestamp': {'gte': from_date, 'lte': to_date, 'format': 'basic_date'}}}]}}}
if track:
query['query']['bool']['filter'].append({'term': {'track': track}})
query['sort'] = [{'race-timestamp': 'desc'}, {'track': 'asc'}, {'chart': 'asc'}]
query['size'] = self._max_results()
result = self.client.search(index='rally-annotations', body=query)
annotations = []
hits = result['hits']['total']
if (hits == 0):
console.println(f'No annotations found in environment [{environment}].')
else:
for hit in result['hits']['hits']:
src = hit['_source']
annotations.append([hit['_id'], src['race-timestamp'], src.get('track', ''), src.get('chart', ''), src.get('chart-name', ''), src['message']])
if annotations:
console.println('\nAnnotations:\n')
console.println(tabulate.tabulate(annotations, headers=['Annotation Id', 'Timestamp', 'Track', 'Chart Type', 'Chart Name', 'Message']))
def delete_annotation(self):
annotations = self._id().split(',')
environment = self.environment_name
if self._dry_run():
if (len(annotations) == 1):
console.println(f'Would delete annotation with id [{annotations[0]}] in environment [{environment}].')
else:
console.println(f'Would delete {len(annotations)} annotations: {annotations} in environment [{environment}].')
else:
for annotation_id in annotations:
result = self.client.delete(index='rally-annotations', id=annotation_id)
if (result['result'] == 'deleted'):
console.println(f'Successfully deleted [{annotation_id}].')
else:
console.println(f'Did not find [{annotation_id}] in environment [{environment}].')
def delete_race(self):
races = self._id().split(',')
environment = self.environment_name
if self._dry_run():
if (len(races) == 1):
console.println(f'Would delete race with id {races[0]} in environment {environment}.')
else:
console.println(f'Would delete {len(races)} races: {races} in environment {environment}.')
else:
for race_id in races:
selector = {'query': {'bool': {'filter': [{'term': {'environment': environment}}, {'term': {'race-id': race_id}}]}}}
self.client.delete_by_query(index='rally-races-*', body=selector)
self.client.delete_by_query(index='rally-metrics-*', body=selector)
result = self.client.delete_by_query(index='rally-results-*', body=selector)
if (result['deleted'] > 0):
console.println(f'Successfully deleted [{race_id}] in environment [{environment}].')
else:
console.println(f'Did not find [{race_id}] in environment [{environment}].')
def list(self):
track = self._track()
name = self._benchmark_name()
from_date = self._from_date()
to_date = self._to_date()
filters = [{'term': {'environment': self.environment_name}}, {'range': {'race-timestamp': {'gte': from_date, 'lte': to_date, 'format': 'basic_date'}}}]
query = {'query': {'bool': {'filter': filters}}, 'size': self._max_results(), 'sort': [{'race-timestamp': {'order': 'desc'}}]}
if track:
query['query']['bool']['filter'].append({'term': {'track': track}})
if name:
query['query']['bool']['filter'].append({'bool': {'should': [{'term': {'user-tags.benchmark-name': name}}, {'term': {'user-tags.name': name}}]}})
result = self.client.search(index=('%s*' % EsRaceStore.INDEX_PREFIX), body=query)
hits = result['hits']['total']
if isinstance(hits, dict):
hits = hits['value']
if (hits > 0):
return [Race.from_dict(v['_source']) for v in result['hits']['hits']]
else:
return []
def find_by_race_id(self, race_id):
query = {'query': {'bool': {'filter': [{'term': {'race-id': race_id}}]}}}
result = self.client.search(index=('%s*' % EsRaceStore.INDEX_PREFIX), body=query)
hits = result['hits']['total']
if isinstance(hits, dict):
hits = hits['value']
if (hits == 1):
return Race.from_dict(result['hits']['hits'][0]['_source'])
elif (hits > 1):
raise exceptions.RallyAssertionError(f'Expected exactly one race to match race id [{race_id}] but there were [{hits}] matches.')
else:
raise exceptions.NotFound(f'No race with race id [{race_id}]') |
class ByteStringDecoder(SingleDecoder):
is_dynamic = True
def decoder_fn(data):
return data
def read_data_from_stream(self, stream):
data_length = decode_uint_256(stream)
padded_length = ceil32(data_length)
data = stream.read(padded_length)
if self.strict:
if (len(data) < padded_length):
raise InsufficientDataBytes(f'Tried to read {padded_length} bytes, only got {len(data)} bytes')
padding_bytes = data[data_length:]
if (padding_bytes != (b'\x00' * (padded_length - data_length))):
raise NonEmptyPaddingBytes(f'Padding bytes were not empty: {repr(padding_bytes)}')
return data[:data_length]
def validate_padding_bytes(self, value, padding_bytes):
pass
_type_str('bytes')
def from_type_str(cls, abi_type, registry):
return cls() |
class Plugin(plugin.PluginProto):
PLUGIN_ID = 14
PLUGIN_NAME = 'Environment - SI7021/HTU21D'
PLUGIN_VALUENAME1 = 'Temperature'
PLUGIN_VALUENAME2 = 'Humidity'
def __init__(self, taskindex):
plugin.PluginProto.__init__(self, taskindex)
self.dtype = rpieGlobals.DEVICE_TYPE_I2C
self.vtype = rpieGlobals.SENSOR_TYPE_TEMP_HUM
self.readinprogress = 0
self.valuecount = 2
self.senddataoption = True
self.timeroption = True
self.timeroptional = False
self.formulaoption = True
self._nextdataservetime = 0
self.htu = None
def plugin_init(self, enableplugin=None):
plugin.PluginProto.plugin_init(self, enableplugin)
self.uservar[0] = 0
self.uservar[1] = 0
self.readinprogress = 0
if self.enabled:
try:
i2cl = self.i2c
except:
i2cl = (- 1)
try:
i2cport = gpios.HWPorts.geti2clist()
if (i2cl == (- 1)):
i2cl = int(i2cport[0])
except:
i2cport = []
if ((len(i2cport) > 0) and (i2cl > (- 1))):
self.htu = None
try:
self.htu = HTU21D(i2cl)
except Exception as e:
self.htu = None
if self.htu:
try:
self.initialized = self.htu.init
except:
self.htu = None
if (self.htu is None):
self.initialized = False
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ('HTU21D/Si7021 can not be initialized! ' + str(e)))
def plugin_read(self):
result = False
if (self.initialized and (self.readinprogress == 0) and self.enabled):
self.readinprogress = 1
try:
temp = self.htu.read_temperature()
hum = self.htu.read_humidity()
self.set_value(1, temp, False)
self.set_value(2, hum, False)
except Exception as e:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ('HTU21 read error! ' + str(e)))
self.enabled = False
self.plugin_senddata()
self._lastdataservetime = rpieTime.millis()
result = True
self.readinprogress = 0
return result |
def test_unlink(ipfs_mock, np_path, mocker):
ethpm.install_package(np_path, 'ipfs://testipfs-math')
mocker.spy(ethpm, 'remove_package')
cli_ethpm._unlink(np_path, 'math')
assert (ethpm.remove_package.call_count == 1)
assert np_path.joinpath('contracts/math').exists()
cli_ethpm._unlink(np_path, 'math')
assert (ethpm.remove_package.call_count == 2)
assert np_path.joinpath('contracts/math').exists() |
def test_reverse_iso8601_datetime():
dates = [('2011-01-01T00:00:00+00:00', datetime(2011, 1, 1, tzinfo=pytz.utc)), ('2011-01-01T23:59:59+00:00', datetime(2011, 1, 1, 23, 59, 59, tzinfo=pytz.utc)), ('2011-01-01T23:59:59.001000+00:00', datetime(2011, 1, 1, 23, 59, 59, 1000, tzinfo=pytz.utc)), ('2011-01-01T23:59:59+02:00', datetime(2011, 1, 1, 21, 59, 59, tzinfo=pytz.utc))]
for (date_string, expected) in dates:
(yield (assert_equal, inputs.datetime_from_iso8601(date_string), expected)) |
class OptionPlotoptionsWordcloudSonificationDefaultinstrumentoptionsMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesSolidgaugeDataDatalabelsTextpath(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
.parametrize('metric,expected', [(None, 1), ('r2', 1), (get_scorer('neg_mean_squared_error'), 0)], ids=['none', 'R2', 'MSE'])
def test_cross_val_score_vector(trend, metric, expected):
(coords, data) = trend[:2]
model = Vector([Trend(degree=1), Trend(degree=1)])
scores = cross_val_score(model, coords, (data, data), scoring=metric)
npt.assert_allclose(scores, expected, atol=1e-10) |
.AnalysisPluginTestConfig(plugin_class=AnalysisPlugin)
class TestCweCheckerFunctions():
def test_parse_cwe_checker_output(self, analysis_plugin):
test_data = '[\n {\n "name": "CWE676",\n "version": "0.1",\n "addresses": [\n ""\n ],\n "tids": [\n "instr__2"\n ],\n "symbols": [\n "FUN_00102ef0"\n ],\n "other": [\n [\n "dangerous_function",\n "strlen"\n ]\n ],\n "description": "(Use of Potentially Dangerous Function) FUN_00102ef0 () -> strlen"\n }\n ]'
result = analysis_plugin._parse_cwe_checker_output(test_data)
assert isinstance(result, dict)
assert (len(result.keys()) == 1)
assert isinstance(result['CWE676'], dict)
def test_is_supported_arch(self, analysis_plugin):
fo = FileObject()
test_data = 'ELF 64-bit LSB shared object, x86-64, version 1 (SYSV), dynamically linked, interpreter /lib64/ld-linux-x86-64.so.2, for GNU/Linux 2.6.32, BuildID[sha1]=8e756708f62592be105b5e8b423080d38ddc8391, stripped'
fo.processed_analysis = {'file_type': {'result': {'full': test_data}}}
assert analysis_plugin._is_supported_arch(fo) |
class CheckerThread(Thread):
def __init__(self, parent, url):
Thread.__init__(self)
self.daemon = True
self._armed = True
self._url = url
self._parent = parent
def shutdown(self):
if self.result:
self._armed = False
self.result.terminate()
def run(self):
self.result = subprocess.Popen([sys.executable, __file__, self._url], stderr=subprocess.PIPE)
self.result.wait()
if self._armed:
self._parent.on_net_stop()
self.result = None |
class TDMPCLearner(acme.Learner):
def __init__(self, spec: specs.EnvironmentSpec, networks: tdmpc_networks.TDMPCNetworks, random_key: jax.random.PRNGKeyArray, replay_client: reverb.Client, iterator: Iterator[TDMPCReplaySample], *, optimizer: optax.GradientTransformation, discount: float=0.99, min_std: float=0.05, per_beta: float=0.4, tau: float=0.01, loss_scale: Optional[LossScalesConfig]=None, rho: float=0.5, logger: Optional[loggers.Logger]=None, counter: Optional[counting.Counter]=None):
if (loss_scale is None):
loss_scale = LossScalesConfig()
self._discount = discount
self._min_std = min_std
self._tau = tau
self._loss_scale = loss_scale
self._rho = rho
self._networks = networks
self._optimizer = optimizer
self._per_beta = per_beta
self._replay_client = replay_client
(param_key, key) = jax.random.split(random_key)
params = tdmpc_networks.init_params(self._networks, spec, param_key)
opt_state = self._optimizer.init(params)
self._state = TrainingState(params=params, target_params=params, opt_state=opt_state, key=key, steps=0)
self._counter = (counter or counting.Counter())
self._logger = logger
def update_priorities(keys_and_priorities: Tuple[(jnp.ndarray, jnp.ndarray)]):
(keys, priorities) = keys_and_priorities
(keys, priorities) = tree.map_structure((lambda x: jax_utils.fetch_devicearray(x).reshape((((- 1),) + x.shape[2:]))), (keys, priorities))
replay_client.mutate_priorities(table=adders_reverb.DEFAULT_PRIORITY_TABLE, updates=dict(zip(keys, priorities)))
self._async_priority_updater = async_utils.AsyncExecutor(update_priorities)
self._iterator = iterator
self._timestamp = None
def save(self):
return self._state
def restore(self, state):
self._state = state
def _compute_loss(self, params: tdmpc_networks.TDMPCParams, target_params: tdmpc_networks.TDMPCParams, batch: reverb.ReplaySample, key: jax.random.PRNGKeyArray):
samples: adders_reverb.Step = jax_utils.batch_to_sequence(batch.data)
observations = samples.observation
actions = samples.action[:(- 1)]
rewards = jnp.expand_dims(samples.reward[:(- 1)], axis=(- 1))
def policy(params, obs, key):
return self._networks.pi(params, obs, self._min_std, key)
def next_core(action, z):
(next_z, reward) = self._networks.next(params, z, action)
return ((next_z, reward), next_z)
batched_policy = jax.vmap(policy, in_axes=(None, 0, 0))
batched_critic = jax.vmap(self._networks.q, (None, 0, 0))
batched_encoder = jax.vmap(self._networks.h, (None, 0))
horizon = (observations.shape[0] - 1)
z_prior = batched_encoder(params, observations)
z_target = batched_encoder(target_params, observations)
((online_z_posterior, reward_pred), _) = hk.static_unroll(next_core, actions, z_prior[0])
assert (online_z_posterior.shape[0] == horizon)
z_predictions = jnp.concatenate([z_prior[:1], online_z_posterior], axis=0)
(q1_t, q2_t) = batched_critic(params, z_predictions[:(- 1)], actions)
(key, policy_key) = jax.random.split(key)
policy_a_tp1 = batched_policy(params, z_prior[1:], jax.random.split(policy_key, z_prior[1:].shape[0]))
(q1_tp1, q2_tp2) = batched_critic(target_params, z_prior[1:], policy_a_tp1)
td_target = (rewards + jax.lax.stop_gradient((self._discount * jnp.minimum(q1_tp1, q2_tp2))))
consistency_loss = jnp.mean(_l2_loss(z_predictions[1:], z_target[1:]), axis=(- 1))
reward_loss = jnp.squeeze(_l2_loss(reward_pred, rewards), axis=(- 1))
value_loss = jnp.squeeze((_l2_loss(q1_t, td_target) + _l2_loss(q2_t, td_target)), axis=(- 1))
priorities = jnp.squeeze((_l1_loss(q1_t, td_target) + _l1_loss(q2_t, td_target)), axis=(- 1))
rhos = jnp.reshape(jnp.power(self._rho, jnp.arange(observations.shape[0])), ((- 1), 1))
chex.assert_equal_shape([reward_loss, value_loss, priorities, consistency_loss])
consistency_loss = jnp.sum((rhos[:(- 1)] * consistency_loss), axis=0)
reward_loss = jnp.sum((rhos[:(- 1)] * reward_loss), axis=0)
value_loss = jnp.sum((rhos[:(- 1)] * value_loss), axis=0)
priorities = jnp.sum((rhos[:(- 1)] * priorities), axis=0)
probabilities = batch.info.probability
importance_sampling_weights = (1 / probabilities).astype(jnp.float32)
importance_sampling_weights **= self._per_beta
importance_sampling_weights /= jnp.max(importance_sampling_weights)
model_loss = (((self._loss_scale.consistency * jnp.clip(consistency_loss, 0, 10000.0)) + (self._loss_scale.reward * jnp.clip(reward_loss, 0, 10000.0))) + (self._loss_scale.value * jnp.clip(value_loss, 0, 10000.0)))
weighted_model_loss = (importance_sampling_weights * model_loss)
weighted_model_loss = jnp.mean(weighted_model_loss)
weighted_model_loss = optax.scale_gradient(weighted_model_loss, (1.0 / horizon))
frozen_params = jax.lax.stop_gradient(params)
z_policy = jax.lax.stop_gradient(z_predictions)
policy_actions = batched_policy(params, z_policy, jax.random.split(key, z_predictions.shape[0]))
(policy_q1, policy_q2) = jax.vmap(self._networks.q, (None, 0, 0))(frozen_params, z_policy, policy_actions)
policy_q = jnp.squeeze(jnp.minimum(policy_q1, policy_q2), axis=(- 1))
policy_loss = (- policy_q)
chex.assert_rank([rhos, policy_q], 2)
chex.assert_equal_rank([rhos, policy_q])
policy_loss = jnp.mean(jnp.sum((rhos * policy_loss), axis=0))
total_loss = (weighted_model_loss + policy_loss)
metrics = {'total_loss': jnp.mean(total_loss), 'policy_loss': jnp.mean(policy_loss), 'model/model_loss': jnp.mean(model_loss), 'model/weighted_model_loss': jnp.mean(weighted_model_loss), 'model/consistentcy_loss': jnp.mean(consistency_loss), 'model/reward_loss': jnp.mean(reward_loss), 'model/critic_loss': jnp.mean(value_loss)}
return (total_loss, (priorities, metrics))
(jax.jit, static_argnums=(0,))
def _update(self, state, batch):
(key, random_key) = jax.random.split(state.key)
grad_fn = jax.value_and_grad(self._compute_loss, has_aux=True)
((_, (priorities, metrics)), gradients) = grad_fn(state.params, state.target_params, batch, key)
metrics['grad_norm'] = optax.global_norm(gradients)
(updates, opt_state) = self._optimizer.update(gradients, state.opt_state, state.params)
params = optax.apply_updates(state.params, updates)
target_params = optax.incremental_update(params, state.target_params, self._tau)
steps = (state.steps + 1)
new_state = TrainingState(params=params, target_params=target_params, opt_state=opt_state, key=random_key, steps=steps)
return (new_state, metrics, priorities)
def step(self):
prefetching_splits = next(self._iterator)
keys = prefetching_splits.host
samples = prefetching_splits.device
(self._state, metrics, priorities) = self._update(self._state, samples)
if self._replay_client:
self._async_priority_updater.put((keys, priorities))
timestamp = time.time()
elapsed_time = ((timestamp - self._timestamp) if self._timestamp else 0)
self._timestamp = timestamp
counts = self._counter.increment(steps=1, walltime=elapsed_time)
if (self._logger is not None):
self._logger.write({**counts, **metrics})
def get_variables(self, names):
variables = {'policy': self._state.params}
return [variables[name] for name in names] |
class ASDLParser(spark.GenericParser, object):
def __init__(self):
super(ASDLParser, self).__init__('module')
def typestring(self, tok):
return tok.type
def error(self, tok):
raise ASDLSyntaxError(tok.lineno, tok)
def p_module_0(self, arg):
(module, name, version, _0, _1) = arg
if (module.value != 'module'):
raise ASDLSyntaxError(module.lineno, msg=("expected 'module', found %s" % module))
return Module(name, None, version)
def p_module(self, arg):
(module, name, version, _0, definitions, _1) = arg
if (module.value != 'module'):
raise ASDLSyntaxError(module.lineno, msg=("expected 'module', found %s" % module))
return Module(name, definitions, version)
def p_version(self, arg):
(version, V) = arg
if (version.value != 'version'):
raise ASDLSyntaxError(version.lineno, msg=("expected 'version', found %" % version))
return V
def p_definition_0(self, arg):
(definition,) = arg
return definition
def p_definition_1(self, arg):
(definitions, definition) = arg
return (definitions + definition)
def p_definition(self, arg):
(id, _, type) = arg
return [Type(id, type)]
def p_type_0(self, arg):
(product,) = arg
return product
def p_type_1(self, arg):
(sum,) = arg
return Sum(sum)
def p_type_2(self, arg):
(sum, id, _0, attributes, _1) = arg
if (id.value != 'attributes'):
raise ASDLSyntaxError(id.lineno, msg=('expected attributes, found %s' % id))
if attributes:
attributes.reverse()
return Sum(sum, attributes)
def p_product(self, arg):
(_0, fields, _1) = arg
fields.reverse()
return Product(fields)
def p_sum_0(self, arg):
(constructor,) = arg
return [constructor]
def p_sum_1(self, arg):
(constructor, _, sum) = arg
return ([constructor] + sum)
def p_sum_2(self, arg):
(constructor, _, sum) = arg
return ([constructor] + sum)
def p_constructor_0(self, arg):
(id,) = arg
return Constructor(id)
def p_constructor_1(self, arg):
(id, _0, fields, _1) = arg
fields.reverse()
return Constructor(id, fields)
def p_fields_0(self, arg):
(field,) = arg
return [field]
def p_fields_1(self, arg):
(field, _, fields) = arg
return (fields + [field])
def p_field_0(self, arg):
(type,) = arg
return Field(type)
def p_field_1(self, arg):
(type, name) = arg
return Field(type, name)
def p_field_2(self, arg):
(type, _, name) = arg
return Field(type, name, seq=1)
def p_field_3(self, arg):
(type, _, name) = arg
return Field(type, name, opt=1)
def p_field_4(self, arg):
(type, _) = arg
return Field(type, seq=1)
def p_field_5(self, arg):
(type, _) = arg
return Field(type, opt=1) |
def test_edit_event_null_field(db, client, admin_jwt):
event = get_event(db)
assert (event.location_name == 'Amsterdam')
data = json.dumps({'data': {'type': 'event', 'id': str(event.id), 'attributes': {'location-name': None}}})
response = client.patch(f'/v1/events/{event.id}', content_type='application/vnd.api+json', headers=admin_jwt, data=data)
db.session.refresh(event)
assert (response.status_code == 200)
assert (event.location_name is None) |
_os(*metadata.platforms)
def main():
winword = 'C:\\Users\\Public\\winword.exe'
user32 = 'C:\\Windows\\System32\\user32.dll'
dll = 'C:\\Users\\Public\\wmiutils.dll'
ps1 = 'C:\\Users\\Public\\Invoke-ImageLoad.ps1'
wmiprvse = 'C:\\Users\\Public\\WmiPrvSE.exe'
common.copy_file(EXE_FILE, winword)
common.copy_file(user32, dll)
common.copy_file(PS1_FILE, ps1)
common.copy_file(EXE_FILE, wmiprvse)
common.log('Loading wmiutils.dll into fake winword')
common.execute([winword, '-c', f'Import-Module {ps1}; Invoke-ImageLoad {dll}'], timeout=10)
common.execute([wmiprvse, '/c', 'powershell'], timeout=1, kill=True)
common.remove_files(winword, dll, ps1) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.