code stringlengths 281 23.7M |
|---|
def extractNovelsformyBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Daughter of the emperor', 'Daughter of the emperor', 'translated'), ('Queen with a scalpel', 'Queen with a scalpel', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class BaseHandler(tornado.web.RequestHandler):
def __init__(self, *request, **kwargs):
self.include_host = False
super(BaseHandler, self).__init__(*request, **kwargs)
def get_current_user(self):
try:
return self.get_secure_cookie('user_password')
except Exception:
return None
def write_error(self, status_code, **kwargs):
logging.error(('ERROR: %s: %s' % (status_code, kwargs)))
if ('exc_info' in kwargs):
logging.info('Traceback: {}'.format(traceback.format_exception(*kwargs['exc_info'])))
if (self.settings.get('debug') and ('exc_info' in kwargs)):
logging.error('rendering error page')
exc_info = kwargs['exc_info']
try:
params = {'error': exc_info[1], 'trace_info': traceback.format_exception(*exc_info), 'request': self.request.__dict__}
self.render('error.html', **params)
logging.error('rendering complete')
except Exception as e:
logging.error(e) |
class OptionSeriesFunnel3dSonificationTracks(Options):
def activeWhen(self) -> 'OptionSeriesFunnel3dSonificationTracksActivewhen':
return self._config_sub_data('activeWhen', OptionSeriesFunnel3dSonificationTracksActivewhen)
def instrument(self):
return self._config_get('piano')
def instrument(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionSeriesFunnel3dSonificationTracksMapping':
return self._config_sub_data('mapping', OptionSeriesFunnel3dSonificationTracksMapping)
def midiName(self):
return self._config_get(None)
def midiName(self, text: str):
self._config(text, js_type=False)
def pointGrouping(self) -> 'OptionSeriesFunnel3dSonificationTracksPointgrouping':
return self._config_sub_data('pointGrouping', OptionSeriesFunnel3dSonificationTracksPointgrouping)
def roundToMusicalNotes(self):
return self._config_get(True)
def roundToMusicalNotes(self, flag: bool):
self._config(flag, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def type(self):
return self._config_get('instrument')
def type(self, text: str):
self._config(text, js_type=False) |
def test_circular_dependency_dfvs_2(circular_dependency_phi_functions, variable_v, variable_u):
list_of_phi_functions = circular_dependency_phi_functions
list_of_phi_functions[6].substitute(variable_v[6], variable_u[5])
graph = PhiDependencyGraph(list_of_phi_functions)
directed_fvs = graph.compute_directed_feedback_vertex_set_of()
assert (directed_fvs == InsertionOrderedSet([list_of_phi_functions[5], list_of_phi_functions[1]])) |
def customer_record(request, pk):
if request.user.is_authenticated:
customer_record = Record.objects.get(id=pk)
return render(request, 'record.html', {'customer_record': customer_record})
else:
messages.success(request, 'You Must Be Logged In To View That Page...')
return redirect('home') |
class SkillBar(Html.Html):
name = 'Skill Bars'
_option_cls = OptSliders.OptionsSkillbars
def __init__(self, page: primitives.PageModel, data, y_column, x_axis, title, width, height, html_code, options, profile, verbose: bool=False):
super(SkillBar, self).__init__(page, '', html_code=html_code, profile=profile, options=options, css_attrs={'width': width, 'height': height}, verbose=verbose)
self.add_title(title, options={'content_table': False})
self.innerPyHTML = page.ui.layouts.table(options={'header': False})
self.innerPyHTML.options.managed = False
self.options.value = y_column
self.options.label = x_axis
for rec in data:
value = page.ui.div(EntHtml4.NO_BREAK_SPACE).css({'width': ('%s%s' % (rec[y_column], options.get('unit', '%'))), 'margin-left': '2px', 'background': options.get('background', page.theme.success.light)})
value.options.managed = False
if options.get('values', False):
self.innerPyHTML += [rec[x_axis], value, ('%s%s' % (int(rec[y_column]), options.get('unit', 'px')))]
self.innerPyHTML[(- 1)][2].style.css.padding = '0 5px'
else:
self.innerPyHTML += [rec[x_axis], value]
self.innerPyHTML[(- 1)][1].attr['align'] = 'left'
self.innerPyHTML[(- 1)][0].style.css.padding = '0 5px'
self.innerPyHTML[(- 1)][1].style.css.width = '100%'
if options.get('borders', False):
self.innerPyHTML[(- 1)][1].style.css.border = ('1px solid %s' % page.theme.greys[4])
self.innerPyHTML[(- 1)][1][0].style.css.margin_left = 0
self.innerPyHTML.style.clear()
self.css({'margin': '5px 0'})
self.options.set_thresholds()
def options(self) -> OptSliders.OptionsSkillbars:
return super().options
def js(self) -> JsComponents.SkillBar:
if (self._js is None):
self._js = JsComponents.SkillBar(self, js_code=self.dom.varName, page=self.page)
return self._js
def __str__(self):
for row in self.innerPyHTML:
if (row is None):
break
percent = int(float(row[1][0].css('width')[:(- 1)]))
if (percent > self.options.thresholds[1]):
row[1][0].style.css.background = self.options.success
elif (percent > self.options.thresholds[0]):
row[1][0].style.css.background = self.options.warning
else:
row[1][0].style.css.background = self.options.danger
row[1][0].style.css.line_height = (row[1][0].style.css.line_height or 20)
row[1][0].style.css.font_factor((- 2))
if self.options.percentage:
row[1][0]._vals = [row[1][0].css('width')]
row[1][0].style.css.padding_left = 5
return ('<div %s>%s</div>' % (self.get_attrs(css_class_names=self.style.get_classes()), self.content)) |
def test_error_if_input_df_contains_na_in_transform(df_vartypes, df_na):
age_dict = {'Age': [0, 10, 20, 30, np.Inf]}
with pytest.raises(ValueError):
transformer = ArbitraryDiscretiser(binning_dict=age_dict)
transformer.fit(df_vartypes)
transformer.transform(df_na[['Name', 'City', 'Age', 'Marks', 'dob']]) |
class Query(object):
def DenomTrace(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/ibc.applications.transfer.v1.Query/DenomTrace', ibc_dot_applications_dot_transfer_dot_v1_dot_query__pb2.QueryDenomTraceRequest.SerializeToString, ibc_dot_applications_dot_transfer_dot_v1_dot_query__pb2.QueryDenomTraceResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def DenomTraces(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/ibc.applications.transfer.v1.Query/DenomTraces', ibc_dot_applications_dot_transfer_dot_v1_dot_query__pb2.QueryDenomTracesRequest.SerializeToString, ibc_dot_applications_dot_transfer_dot_v1_dot_query__pb2.QueryDenomTracesResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def Params(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/ibc.applications.transfer.v1.Query/Params', ibc_dot_applications_dot_transfer_dot_v1_dot_query__pb2.QueryParamsRequest.SerializeToString, ibc_dot_applications_dot_transfer_dot_v1_dot_query__pb2.QueryParamsResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
def DenomHash(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None):
return grpc.experimental.unary_unary(request, target, '/ibc.applications.transfer.v1.Query/DenomHash', ibc_dot_applications_dot_transfer_dot_v1_dot_query__pb2.QueryDenomHashRequest.SerializeToString, ibc_dot_applications_dot_transfer_dot_v1_dot_query__pb2.QueryDenomHashResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) |
class SchemaType(_common.FlyteIdlEntity):
class SchemaColumn(_common.FlyteIdlEntity):
class SchemaColumnType(object):
INTEGER = _types_pb2.SchemaType.SchemaColumn.INTEGER
FLOAT = _types_pb2.SchemaType.SchemaColumn.FLOAT
STRING = _types_pb2.SchemaType.SchemaColumn.STRING
DATETIME = _types_pb2.SchemaType.SchemaColumn.DATETIME
DURATION = _types_pb2.SchemaType.SchemaColumn.DURATION
BOOLEAN = _types_pb2.SchemaType.SchemaColumn.BOOLEAN
def __init__(self, name, type):
self._name = name
self._type = type
def name(self):
return self._name
def type(self):
return self._type
def to_flyte_idl(self):
return _types_pb2.SchemaType.SchemaColumn(name=self.name, type=self.type)
def from_flyte_idl(cls, proto):
return cls(name=proto.name, type=proto.type)
def __init__(self, columns):
self._columns = columns
def columns(self):
return self._columns
def to_flyte_idl(self):
return _types_pb2.SchemaType(columns=[c.to_flyte_idl() for c in self.columns])
def from_flyte_idl(cls, proto):
return cls(columns=[SchemaType.SchemaColumn.from_flyte_idl(c) for c in proto.columns]) |
class ReshapeTestCase(unittest.TestCase):
def _infer_shape(self, x, shape):
new_shape = list(shape)
cur_shape = x
unknown_idx = (- 1)
prod = 1
for (idx, v) in enumerate(new_shape):
if (v == (- 1)):
assert (unknown_idx == (- 1))
unknown_idx = idx
else:
prod *= v
numel = 1
for dim in cur_shape:
numel *= dim
if (unknown_idx == (- 1)):
assert (numel == prod), f'When there is no unknown index, we expect dim products to be equal, got current shape numel={numel!r} != new shape prod={prod!r}'
else:
if (prod <= 0):
raise RuntimeError(f'cannot reshape tensor {x} with shape {shape}')
assert ((numel % prod) == 0)
new_shape[unknown_idx] = (numel // prod)
return new_shape
def _test_reshape(self, batch_size=(1, 3), X_shape=(16, 32, 64), Y_shape=((- 1), 16, 16, 128), test_name='reshape', input_type='float16'):
target = detect_target()
X = Tensor(shape=[IntVar(values=list(batch_size), name='input_batch'), *X_shape], dtype=input_type, name='input_0', is_input=True)
shape = list(Y_shape)
OP1 = nn.AvgPool2d(kernel_size=3, stride=1, padding=1)
OP2 = nn.Reshape()
OP3 = nn.Reshape()
Y1 = OP1(X)
Y2 = OP2(Y1, shape)
Y = OP3(Y2, (shape + [1]))
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
module = compile_model(Y, target, './tmp', test_name)
for b in batch_size:
X_shape_pt = (X_shape[2], X_shape[0], X_shape[1])
X_pt = get_random_torch_tensor(shape=(b, *X_shape_pt), dtype=input_type)
OP_pt = torch.nn.AvgPool2d(kernel_size=3, stride=1, padding=1)
Y1_pt = OP_pt(X_pt).permute([0, 2, 3, 1])
Y2_pt = torch.reshape(Y1_pt, shape)
Y_pt = torch.reshape(Y2_pt, (shape + [1]))
x = X_pt.permute((0, 2, 3, 1)).contiguous()
y = torch.empty_like(Y_pt)
module.run_with_tensors([x], [y])
self.assertTrue(torch.allclose(Y_pt, y, atol=0.01, rtol=0.01))
def _test_reshape_single_op(self, X_shape=(16, 32, 64), Y_shape=((- 1), 16, 16, 128), test_name='reshape', check_name_retention=False, input_type='float16'):
target = detect_target()
X_shape = [(dim if isinstance(dim, IntVar) else IntImm(dim)) for dim in X_shape]
Y_shape = [(dim if isinstance(dim, IntVar) else IntImm(dim)) for dim in Y_shape]
X = Tensor(shape=X_shape, dtype=input_type, name='input_0', is_input=True)
OP = nn.Reshape()
Y = OP(X, Y_shape)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
module = compile_model(Y, target, './tmp', test_name)
x_shapes = list(itertools.product(*[var._attrs['values'] for var in X_shape]))
new_shapes = list(itertools.product(*[var._attrs['values'] for var in Y_shape]))
if (len(x_shapes) > len(new_shapes)):
assert (len(new_shapes) == 1)
new_shapes = (new_shapes * len(x_shapes))
y_shapes = [self._infer_shape(x_shape, new_shape) for (x_shape, new_shape) in zip(x_shapes, new_shapes)]
for (x_shape, y_shape) in zip(x_shapes, y_shapes):
X_pt = get_random_torch_tensor(x_shape, input_type)
Y_pt = torch.reshape(X_pt, y_shape)
y = torch.empty_like(Y_pt)
module.run_with_tensors([X_pt], [y])
self.assertTrue(torch.allclose(Y_pt, y, atol=0.01, rtol=0.01))
if check_name_retention:
self.assertTrue((1 == sum((('input_batch' == dim._attrs['name']) for dim in Y._attrs['shape']))))
def test_reshape(self):
self._test_reshape(test_name='reshape0')
self._test_reshape([4, 2], (4, 8, 8), ((- 1),), 'reshape1')
self._test_reshape([3, 1], (5, 4, 16), ((- 1), 8), 'reshape2')
self._test_reshape_single_op(X_shape=(IntVar(values=(1, 3), name='input_batch'), 16, 32, 64), Y_shape=((- 1), 16, 16, 128), test_name='reshape3')
self._test_reshape_single_op(X_shape=(1, 16, 32, 64), Y_shape=[1, 64, 16, 32], test_name='reshape4')
self._test_reshape_single_op(X_shape=(IntVar(values=(2, 4), name='input_batch'), 0, 8), Y_shape=(0, 2, 4), test_name='reshape1')
self._test_reshape_single_op(X_shape=(IntVar(values=(2, 4), name='input_batch'), 1, 120), Y_shape=(5, 4, (- 1), 3, 2), test_name='reshape_name', check_name_retention=True)
self._test_reshape_single_op(X_shape=(IntVar(values=(2, 4), name='input_batch'), 1, 120), Y_shape=(5, 4, IntVar(values=(2, 4), name='input_batch'), 3, (- 1)), test_name='reshape_name_unknown_static_dim', check_name_retention=True)
self._test_reshape_single_op(X_shape=(IntVar(values=(2, 4), name='input_batch'), 1, 120), Y_shape=(5, IntVar(values=(2, 4)), 3, 4, 2), test_name='reshape_name_no_unknown_dims', check_name_retention=True)
self._test_reshape_single_op(X_shape=(IntVar(values=(20, 40), name='input_batch'), 1, 12), Y_shape=(4, 2, IntVar(values=(2, 4)), 3, 5), test_name='reshape_unsqueeze_intvar_dim')
((detect_target().name() == 'rocm'), 'fp32 not supported in ROCm')
def test_reshape_float32(self):
self._test_reshape_single_op(input_type='float32', test_name='reshape_float32')
def _test_reshape_shape(self, in_shape, out_shape, target_shape):
X = Tensor(shape=in_shape, name='input_0', is_input=True)
OP = nn.Reshape()
Y = OP(X, target_shape)
y_shape = Y.shape()
self.assertEqual(len(y_shape), len(out_shape))
for (y, o) in zip(y_shape, out_shape):
self.assertEqual(y, o)
def test_reshape_shape_symbolic(self):
dummy_shape = Tensor(shape=[1, 2], name='dummy_shape', is_input=True)
var1 = IntVar(values=[2, 4], name='var1')
tensor1 = IntVarTensor(var1)
X_shape = [var1, IntImm(256)]
intvar = [ops.size()(dummy_shape, idx) for idx in range(2)]
target_shape = [(intvar[1] * tensor1), IntImm((- 1))]
outdim0 = IntVar(values=[4, 8])
outdim0._attrs['symbolic_value'] = (var1._attrs['symbolic_value'] * 2)
answer_shape = [outdim0, IntImm(128)]
self._test_reshape_shape(X_shape, answer_shape, target_shape) |
def test_clean_html_latex(cli, build_resources):
(books, tocs) = build_resources
path = books.joinpath('clean_cache')
result = cli.invoke(build, path.as_posix())
assert (result.exit_code == 0)
build_path = path.joinpath('_build')
assert build_path.exists()
os.mkdir(os.path.join(build_path, 'latex'))
assert build_path.joinpath('latex').exists()
assert build_path.joinpath('html').exists()
result = cli.invoke(clean, ('--html', '--latex', path.as_posix()))
assert (result.exit_code == 0)
assert build_path.exists()
assert (not build_path.joinpath('latex').exists())
assert (not build_path.joinpath('html').exists()) |
def get_minimal_attendee(db, user=None, owner=False, event_status='published'):
attendee = AttendeeOrderTicketSubFactory(order__user=(user if (not owner) else None), event__state=event_status)
if owner:
(role, _) = get_or_create(Role, name='owner', title_name='Owner')
UsersEventsRoles(user=user, event=attendee.event, role=role)
db.session.commit()
return attendee |
class InlineResponse2007(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'data': ([ServiceAuthorizationResponseData],)}
_property
def discriminator():
return None
attribute_map = {'data': 'data'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def test_centered_product_frame_with_given_mean(traces):
mean = _read('centered_product_given_mean.npz')
expected = _read('centered_product_result_given_mean_frame1_dist_10.npz')
frame = slice(None, 50)
result = scared.preprocesses.high_order.CenteredProduct(mean=mean, frame_1=frame, distance=10)(traces)
assert np.array_equal(expected, result) |
def test_base_tree(tree_node, tree_args, nuts):
nuts._multinomial_sampling = False
tree_args = tree_args._replace(log_slice=(torch.log1p((- torch.rand(()))) - tree_args.initial_energy))
tree = nuts._build_tree_base_case(root=tree_node, args=tree_args)
assert isinstance(tree, _Tree)
assert (torch.isclose(tree.log_weight, torch.tensor(float('-inf'))) or torch.isclose(tree.log_weight, torch.tensor(0.0)))
assert (tree.left == tree.right) |
class Polynomial(IntegrationTestFunction):
def __init__(self, expected_result=None, coeffs=[2], integration_dim=1, domain=None, is_complex=False, backend=None, integrand_dims=1):
super().__init__(expected_result, integration_dim, domain, is_complex, backend, integrand_dims)
if (backend == 'tensorflow'):
if is_complex:
coeffs = list(map(complex, coeffs))
else:
coeffs = list(map(float, coeffs))
if (not is_complex):
self.coeffs = anp.array(coeffs, like=self.domain, dtype=self.domain.dtype)
else:
self.coeffs = anp.array(coeffs, like=self.domain)
self.order = (len(coeffs) - 1)
self.f = self._poly
def _poly(self, x):
if (infer_backend(x) != 'tensorflow'):
exponentials = (x.reshape((x.shape + (1,))) ** anp.linspace(0, self.order, (self.order + 1), like=x, dtype=x.dtype))
assert (exponentials.dtype == x.dtype)
else:
ks = anp.array(range((self.order + 1)), dtype=x.dtype, like=x)
exponentials = (x.reshape((x.shape + (1,))) ** ks)
assert (exponentials.dtype == x.dtype)
if (exponentials.dtype != self.coeffs.dtype):
assert self.is_complex
exponentials = anp.cast(exponentials, self.coeffs.dtype)
exponentials = anp.multiply(exponentials, self.coeffs)
exponentials = anp.sum(exponentials, axis=2)
return anp.sum(exponentials, axis=1) |
.requires_eclipse
.usefixtures('use_tmpdir', 'init_eclrun_config')
def test_no_hdf5_output_by_default_with_ecl100(source_root):
shutil.copy(os.path.join(source_root, 'test-data/eclipse/SPE1.DATA'), 'SPE1.DATA')
econfig = ecl_config.Ecl100Config()
ecl_run.run(econfig, ['SPE1.DATA', '--version=2019.3'])
assert (not os.path.exists('SPE1.h5')) |
def extractAlbert325TranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_capture_serverless_s3_batch(event_s3_batch, context, elasticapm_client):
os.environ['AWS_LAMBDA_FUNCTION_NAME'] = 'test_func'
_serverless
def test_func(event, context):
with capture_span('test_span'):
time.sleep(0.01)
return
test_func(event_s3_batch, context)
assert (len(elasticapm_client.events[constants.TRANSACTION]) == 1)
transaction = elasticapm_client.events[constants.TRANSACTION][0]
assert (transaction['name'] == 'test_func')
assert (transaction['span_count']['started'] == 1) |
class Vectorizer():
def __init__(self, type_vectorizers=None, column_vectorizers=None):
self.type_vectorizers = ({} if (type_vectorizers is None) else type_vectorizers)
self.column_vectorizers = ({} if (column_vectorizers is None) else column_vectorizers)
def clear(self):
self.type_vectorizers = {}
self.column_vectorizers = {}
def contains_tsvector(self, tsvector_column):
if (not hasattr(tsvector_column.type, 'columns')):
return False
return any(((getattr(tsvector_column.table.c, column) in self) for column in tsvector_column.type.columns))
def __contains__(self, column):
try:
self[column]
return True
except KeyError:
return False
def __getitem__(self, column):
if (column in self.column_vectorizers):
return self.column_vectorizers[column]
type_class = column.type.__class__
if (type_class in self.type_vectorizers):
return self.type_vectorizers[type_class]
raise KeyError(column)
def __call__(self, type_or_column):
def outer(func):
(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
if (isclass(type_or_column) and issubclass(type_or_column, TypeEngine)):
self.type_vectorizers[type_or_column] = wrapper
elif isinstance(type_or_column, sa.Column):
self.column_vectorizers[type_or_column] = wrapper
elif isinstance(type_or_column, InstrumentedAttribute):
prop = type_or_column.property
if (not isinstance(prop, sa.orm.ColumnProperty)):
raise TypeError('Given InstrumentedAttribute does not wrap ColumnProperty. Only instances of ColumnProperty are supported for vectorizer.')
column = type_or_column.property.columns[0]
self.column_vectorizers[column] = wrapper
else:
raise TypeError('First argument should be either valid SQLAlchemy type, Column, ColumnProperty or InstrumentedAttribute object.')
return wrapper
return outer |
def iconButton_paintEvent(button: QtWidgets.QPushButton, pixmap: QtGui.QPixmap, event: QtGui.QPaintEvent):
QtWidgets.QPushButton.paintEvent(button, event)
pos_x = (5 + int((((30 - pixmap.width()) * 0.5) + 0.5)))
pos_y = ((button.height() - pixmap.height()) / 2)
painter = QtGui.QPainter(button)
painter.setRenderHint(QtGui.QPainter.Antialiasing, True)
painter.setRenderHint(QtGui.QPainter.SmoothPixmapTransform, True)
painter.drawPixmap(pos_x, pos_y, pixmap) |
class OptionPlotoptionsVariablepieSonificationContexttracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.use_numba
def test_laplace_equation_cartesian():
region = (2000.0, 10000.0, (- 3000.0), 5000.0)
points = vd.scatter_points(region, size=30, extra_coords=(- 1000.0), random_state=0)
masses = np.arange(points[0].size)
coordinates = vd.grid_coordinates(region=region, spacing=1000.0, extra_coords=0)
g_ee = point_gravity(coordinates, points, masses, field='g_ee')
g_nn = point_gravity(coordinates, points, masses, field='g_nn')
g_zz = point_gravity(coordinates, points, masses, field='g_zz')
npt.assert_allclose((g_ee + g_nn), (- g_zz)) |
def test_duration_validation():
class MyConfig(_ConfigBase):
microsecond = _DurationConfigValue('US', allow_microseconds=True)
millisecond = _DurationConfigValue('MS')
second = _DurationConfigValue('S')
minute = _DurationConfigValue('M')
default_unit_ms = _DurationConfigValue('DM', unitless_factor=0.001)
wrong_pattern = _DurationConfigValue('WRONG_PATTERN')
c = MyConfig({'US': '10us', 'MS': '-10ms', 'S': '5s', 'M': '17m', 'DM': '83', 'WRONG_PATTERN': '5 ms'})
assert (c.microsecond == timedelta(microseconds=10))
assert (c.millisecond == timedelta(milliseconds=(- 10)))
assert (c.second == timedelta(seconds=5))
assert (c.minute == timedelta(minutes=17))
assert (c.default_unit_ms == timedelta(milliseconds=83))
assert (c.wrong_pattern is None)
assert ('WRONG_PATTERN' in c.errors) |
def get_job_environment() -> 'JobEnvironment':
envs = get_job_environments()
if ('_TEST_CLUSTER_' in os.environ):
c = os.environ['_TEST_CLUSTER_']
assert (c in envs), f"Unknown $_TEST_CLUSTER_='{c}', available: {envs.keys()}."
return envs[c]
for env in envs.values():
if env.activated():
return env
raise RuntimeError(f"Could not figure out which environment the job is runnning in. Known environments: {', '.join(envs.keys())}.") |
def __check_single_track_as_recording(mb_recording, disc_track):
priority = 0
if (hasattr(disc_track, 'isrc') and (disc_track.isrc is not None)):
mb_isrc_list = mb_recording.get('isrc-list')
if (mb_isrc_list is not None):
if (disc_track.isrc in mb_isrc_list):
priority = (+ 1000)
else:
priority = (- 100)
return priority |
def deploy_storage_churn_contract(chain, nonce=0):
deploy_tx = chain.create_unsigned_transaction(nonce=nonce, gas_price=1234, gas=3000000, to=b'', value=0, data=decode_hex('fd5bf3fefd5bce01cb0dff59aadbdc1578063ef6537b5146100ef575b600080fd5b61007dfd5bd565bf35b6100bfafd5bb005b6100eddfd5be565b005b61011bfd5bb005bb3073ffffffffffffffffffffffffffffffffffffffff1663adbdffffffff1660e01bbfd5b505afc573d6000803e3d6000fd5bffffffffffffffffffffffffffffffffffffffff1663adbdffffffff1660e01bbffd5b505afd6000803e3d6000fd5bbbbb505b50565bbedbb565bb5056fea165627a7adba28fdce42aff2ea2bf5f72e20dcb00a6ed4094dd313bf8f5220029'))
chain.apply_transaction(deploy_tx.as_signed_transaction(FUNDED_ACCT))
chain.mine_block()
return generate_contract_address(FUNDED_ACCT.public_key.to_canonical_address(), nonce) |
class TestJzCzhz(util.ColorAssertsPyTest):
COLORS = [('red', 'color(--jzczhz 0.13438 0.16252 43.502)'), ('orange', 'color(--jzczhz 0.16937 0.12698 75.776)'), ('yellow', 'color(--jzczhz 0.2096 0.1378 102)'), ('green', 'color(--jzczhz 0.09203 0.10932 132.99)'), ('blue', 'color(--jzczhz 0.09577 0.19029 257.61)'), ('indigo', 'color(--jzczhz 0.06146 0.10408 287.05)'), ('violet', 'color(--jzczhz 0.16771 0.08468 319.37)'), ('white', 'color(--jzczhz 0.22207 0.0002 none)'), ('gray', 'color(--jzczhz 0.11827 0.00014 none)'), ('black', 'color(--jzczhz 0 0 none)'), ('color(--jzczhz 1 0.3 270)', 'color(--jzczhz 1 0.3 270)'), ('color(--jzczhz 1 0.3 270 / 0.5)', 'color(--jzczhz 1 0.3 270 / 0.5)'), ('color(--jzczhz 50% 50% 50% / 50%)', 'color(--jzczhz 0.5 0.25 180 / 0.5)'), ('color(--jzczhz none none none / none)', 'color(--jzczhz none none none / none)'), ('color(--jzczhz 0% 0% 0%)', 'color(--jzczhz 0 0 0)'), ('color(--jzczhz 100% 100% 100%)', 'color(--jzczhz 1 0.5 360 / 1)'), ('color(--jzczhz -100% -100% -100%)', 'color(--jzczhz -1 -0.5 -360 / 1)')]
.parametrize('color1,color2', COLORS)
def test_colors(self, color1, color2):
self.assertColorEqual(Color(color1).convert('jzczhz'), Color(color2)) |
def extractKieukieudaysCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('WATTT' in item['tags']):
return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix)
return False |
def run_tasks(tasks: Union[(str, Dict[(str, str)], List[Union[(Dict[(str, str)], str)]])]) -> None:
from src.task import run_task
logging.debug('run_tasks(%s)', tasks)
if isinstance(tasks, dict):
for k in tasks:
run_task(k, tasks[k])
elif isinstance(tasks, list):
for task in tasks:
if isinstance(task, dict):
for (k, t) in task.items():
run_task(k, t)
else:
run_task(task, task)
else:
run_task(tasks, tasks)
return |
class M3UConverter(FormatConverter):
title = _('M3U Playlist')
content_types = ['audio/x-mpegurl', 'audio/mpegurl']
def __init__(self):
FormatConverter.__init__(self, 'm3u')
def export_to_file(self, playlist, path, options=None):
with GioFileOutputStream(Gio.File.new_for_uri(path)) as stream:
stream.write('#EXTM3U\n')
if playlist.name:
stream.write('#PLAYLIST: {name}\n'.format(name=playlist.name))
for track in playlist:
title = [track.get_tag_display('title', join=True), track.get_tag_display('artist', join=True)]
length = int(round(float((track.get_tag_raw('__length') or (- 1)))))
track_path = track.get_loc_for_io()
track_path = self.get_track_export_path(path, track_path, options)
stream.write('#EXTINF:{length},{title}\n{path}\n'.format(length=length, title=' - '.join(title), path=track_path))
def import_from_file(self, path):
playlist = Playlist(name=self.name_from_path(path))
extinf = {}
lineno = 0
logger.debug('Importing M3U playlist: %s', path)
with GioFileInputStream(Gio.File.new_for_uri(path)) as stream:
for line in stream:
lineno += 1
line = line.strip()
if (not line):
continue
if line.upper().startswith('#PLAYLIST: '):
playlist.name = line[len('#PLAYLIST: '):]
elif line.startswith('#EXTINF:'):
extinf_line = line[len('#EXTINF:'):]
parts = extinf_line.split(',', 1)
length = 0
if ((len(parts) > 1) and (int(parts[0]) > 0)):
length = int(float(parts[0]))
extinf['__length'] = length
parts = parts[(- 1)].rsplit(' - ', 1)
extinf['title'] = parts[(- 1)]
if (len(parts) > 1):
extinf['artist'] = parts[0]
elif line.startswith('#'):
continue
else:
track = trax.Track(self.get_track_import_path(path, line))
if extinf:
for (tag, value) in extinf.items():
if (track.get_tag_raw(tag) is None):
try:
track.set_tag_raw(tag, value)
except Exception as e:
raise UnknownPlaylistTrackError(('line %s: %s' % (lineno, e)))
playlist.append(track)
extinf = {}
return playlist |
class FileEntry(FSEntry):
def __init__(self, *args, filetypes=[], defaultextension='', action, **kwargs):
super().__init__(*args, **kwargs)
self.defaultextension = defaultextension
self.filetypes = filetypes
if (action == 'save'):
self.ask_func = filedialog.asksaveasfilename
elif (action == 'open'):
self.ask_func = filedialog.askopenfilename
else:
raise TypeError('Unknown action type: {}'.format(action))
def ask(self):
filename = self.ask_func(initialdir=self.initialdir, filetypes=self.filetypes, defaultextension=self.defaultextension)
self.var.set(filename) |
.skipif((not has_hf_transformers), reason='requires huggingface transformers')
.parametrize('torch_device', TORCH_DEVICES)
.parametrize('with_torch_sdp', [False, True])
def test_encoder(torch_device, with_torch_sdp):
assert_encoder_output_equals_hf(RoBERTaEncoder, 'explosion-testing/roberta-test', torch_device, with_torch_sdp=with_torch_sdp) |
def test_handle_cancel_timer_failed(decider, mock_decision: DecisionStateMachine):
event = HistoryEvent()
event.event_id = DECISION_EVENT_ID
ret = decider.handle_cancel_timer_failed(event)
assert (ret is True)
mock_decision.handle_cancellation_failure_event.assert_called_once()
(args, kwargs) = mock_decision.handle_cancellation_failure_event.call_args_list[0]
assert (args[0] is event) |
class AddResponseHeaders(OptionTest):
parent: Test
VALUES: ClassVar[Sequence[Dict[(str, Dict[(str, Union[(str, bool)])])]]] = [{'foo': {'value': 'bar'}}, {'moo': {'value': 'arf'}}, {'zoo': {'append': True, 'value': 'bar'}}, {'xoo': {'append': False, 'value': 'dwe'}}, {'aoo': {'value': 'tyu'}}]
def config(self) -> Generator[(Union[(str, Tuple[(Node, str)])], None, None)]:
(yield ('add_response_headers: %s' % json.dumps(self.value)))
def check(self):
for r in self.parent.results:
lowercased_headers = {k.lower(): v for (k, v) in r.headers.items()}
for (k, v) in self.value.items():
actual = lowercased_headers.get(k.lower())
if isinstance(v, dict):
assert (actual == [v['value']]), ('expected %s: %s but got %s' % (k, v['value'], lowercased_headers))
else:
assert (actual == [v]), ('expected %s: %s but got %s' % (k, v, lowercased_headers)) |
def filter_firewall_internet_service_sld_data(json):
option_list = ['id', 'name']
json = remove_invalid_fields(json)
dictionary = {}
for attribute in option_list:
if ((attribute in json) and (json[attribute] is not None)):
dictionary[attribute] = json[attribute]
return dictionary |
def check_source(source, allowed_sources):
if (not allowed_sources):
allowed_sources = ['*']
elif type_util.is_string(allowed_sources):
allowed_sources = [allowed_sources]
elif type_util.is_list_or_tuple(allowed_sources):
allowed_sources = list(allowed_sources)
all_sources = ['*', 'all', 'auto']
for source_item in all_sources:
if (source_item in allowed_sources):
return
if (source not in allowed_sources):
raise ValueError(f"Invalid source: '{source}' (source not allowed).") |
def create_items_if_not_exist(order):
for item in order.get('line_items', []):
product_id = item['product_id']
variant_id = item.get('variant_id')
sku = item.get('sku')
product = ShopifyProduct(product_id, variant_id=variant_id, sku=sku)
if (not product.is_synced()):
product.sync_product() |
class Lab(Labish, Space):
CHANNELS = (Channel('l', 0.0, 1.0), Channel('a', 1.0, 1.0, flags=FLG_MIRROR_PERCENT), Channel('b', 1.0, 1.0, flags=FLG_MIRROR_PERCENT))
CHANNEL_ALIASES = {'lightness': 'l'}
def is_achromatic(self, coords: Vector) -> bool:
return (alg.rect_to_polar(coords[1], coords[2])[0] < ACHROMATIC_THRESHOLD)
def to_base(self, coords: Vector) -> Vector:
return lab_to_xyz(coords, util.xy_to_xyz(self.white()))
def from_base(self, coords: Vector) -> Vector:
return xyz_to_lab(coords, util.xy_to_xyz(self.white())) |
class OptionSeriesGaugeLabel(Options):
def boxesToAvoid(self):
return self._config_get(None)
def boxesToAvoid(self, value: Any):
self._config(value, js_type=False)
def connectorAllowed(self):
return self._config_get(False)
def connectorAllowed(self, flag: bool):
self._config(flag, js_type=False)
def connectorNeighbourDistance(self):
return self._config_get(24)
def connectorNeighbourDistance(self, num: float):
self._config(num, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get('undefined')
def formatter(self, value: Any):
self._config(value, js_type=False)
def maxFontSize(self):
return self._config_get(None)
def maxFontSize(self, num: float):
self._config(num, js_type=False)
def minFontSize(self):
return self._config_get(None)
def minFontSize(self, num: float):
self._config(num, js_type=False)
def onArea(self):
return self._config_get(None)
def onArea(self, flag: bool):
self._config(flag, js_type=False)
def style(self) -> 'OptionSeriesGaugeLabelStyle':
return self._config_sub_data('style', OptionSeriesGaugeLabelStyle)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False) |
class SupportAction(UserAction):
def apply_action(self):
sk = self.associated_card
assert isinstance(sk, Support)
cl = sk.associated_cards
src = self.source
tgt = self.target
lst = src.tags.get('daiyousei_spnum', 0)
n = len(cl)
if (lst < 3 <= (lst + n)):
g = self.game
g.process_action(Heal(src, src))
src.tags['daiyousei_spnum'] = (lst + n)
tgt.reveal(cl)
migrate_cards([self.associated_card], tgt.cards, unwrap=True)
self.cards = cl
return True |
def test_scheduler_device(test_device: Device):
if (test_device.type == 'cpu'):
warn('not running on CPU, skipping')
pytest.skip()
scheduler = DDIM(num_inference_steps=30, device=test_device)
x = randn(1, 4, 32, 32, device=test_device)
noise = randn(1, 4, 32, 32, device=test_device)
noised = scheduler.add_noise(x, noise, scheduler.steps[0])
assert (noised.device == test_device) |
def test_proj_data(tmp_path):
f = np.linspace(.0, .0, 10)
r = np.atleast_1d(5)
theta = np.linspace(0, np.pi, 10)
phi = np.linspace(0, (2 * np.pi), 20)
coords_tp = dict(r=r, theta=theta, phi=phi, f=f)
values_tp = ((1 + 1j) * np.random.random((len(r), len(theta), len(phi), len(f))))
scalar_field_tp = td.FieldProjectionAngleDataArray(values_tp, coords=coords_tp)
monitor_tp = td.FieldProjectionAngleMonitor(center=(1, 2, 3), size=(2, 2, 2), freqs=f, name='n2f_monitor_tp', phi=phi, theta=theta)
data_tp = td.FieldProjectionAngleData(monitor=monitor_tp, projection_surfaces=monitor_tp.projection_surfaces, Er=scalar_field_tp, Etheta=scalar_field_tp, Ephi=scalar_field_tp, Hr=scalar_field_tp, Htheta=scalar_field_tp, Hphi=scalar_field_tp)
x = np.linspace(0, 5, 10)
y = np.linspace(0, 10, 20)
z = np.atleast_1d(5)
coords_xy = dict(x=x, y=y, z=z, f=f)
values_xy = ((1 + 1j) * np.random.random((len(x), len(y), len(z), len(f))))
scalar_field_xy = td.FieldProjectionCartesianDataArray(values_xy, coords=coords_xy)
monitor_xy = td.FieldProjectionCartesianMonitor(center=(1, 2, 3), size=(2, 2, 2), freqs=f, name='n2f_monitor_xy', x=x, y=y, proj_axis=2, proj_distance=50)
data_xy = td.FieldProjectionCartesianData(monitor=monitor_xy, projection_surfaces=monitor_xy.projection_surfaces, Er=scalar_field_xy, Etheta=scalar_field_xy, Ephi=scalar_field_xy, Hr=scalar_field_xy, Htheta=scalar_field_xy, Hphi=scalar_field_xy)
ux = np.linspace(0, 0.4, 10)
uy = np.linspace(0, 0.6, 20)
r = np.atleast_1d(5)
coords_u = dict(ux=ux, uy=uy, r=r, f=f)
values_u = ((1 + 1j) * np.random.random((len(ux), len(uy), len(r), len(f))))
scalar_field_u = td.FieldProjectionKSpaceDataArray(values_u, coords=coords_u)
monitor_u = td.FieldProjectionKSpaceMonitor(center=(1, 2, 3), size=(2, 2, 2), freqs=f, name='n2f_monitor_u', ux=ux, uy=uy, proj_axis=2)
data_u = td.FieldProjectionKSpaceData(monitor=monitor_u, projection_surfaces=monitor_u.projection_surfaces, Er=scalar_field_u, Etheta=scalar_field_u, Ephi=scalar_field_u, Hr=scalar_field_u, Htheta=scalar_field_u, Hphi=scalar_field_u)
sim = td.Simulation(size=(7, 7, 9), grid_spec=td.GridSpec.auto(wavelength=5.0), monitors=[monitor_xy, monitor_u, monitor_tp], run_time=1e-12)
sim_data = td.SimulationData(simulation=sim, data=(data_xy, data_u, data_tp))
sim_data[monitor_xy.name]
sim_data.to_file(str((tmp_path / 'sim_data_n2f.hdf5')))
sim_data = td.SimulationData.from_file(str((tmp_path / 'sim_data_n2f.hdf5')))
x = np.linspace(0, 5, 10)
y = np.linspace(0, 10, 20)
z = np.atleast_1d(5)
coords_xy = dict(x=x, y=y, z=z, f=f)
values_xy = ((1 + 1j) * np.random.random((len(x), len(y), len(z), len(f))))
scalar_field_xy = td.FieldProjectionCartesianDataArray(values_xy, coords=coords_xy)
_ = td.FieldProjectionCartesianMonitor(center=(1, 2, 3), size=(2, 2, 2), freqs=f, name='exact_monitor_xy', x=x, y=y, proj_axis=2, proj_distance=50, far_field_approx=False)
_ = td.FieldProjectionCartesianData(monitor=monitor_xy, projection_surfaces=monitor_xy.projection_surfaces, Er=scalar_field_xy, Etheta=scalar_field_xy, Ephi=scalar_field_xy, Hr=scalar_field_xy, Htheta=scalar_field_xy, Hphi=scalar_field_xy) |
class Comment(AbstractCrudObject):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isComment = True
super(Comment, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
admin_creator = 'admin_creator'
application = 'application'
attachment = 'attachment'
can_comment = 'can_comment'
can_hide = 'can_hide'
can_like = 'can_like'
can_remove = 'can_remove'
can_reply_privately = 'can_reply_privately'
comment_count = 'comment_count'
created_time = 'created_time'
field_from = 'from'
id = 'id'
is_hidden = 'is_hidden'
is_private = 'is_private'
like_count = 'like_count'
live_broadcast_timestamp = 'live_broadcast_timestamp'
message = 'message'
message_tags = 'message_tags'
object = 'object'
parent = 'parent'
permalink_url = 'permalink_url'
private_reply_conversation = 'private_reply_conversation'
user_likes = 'user_likes'
class CommentPrivacyValue():
declined_by_admin_assistant = 'DECLINED_BY_ADMIN_ASSISTANT'
default_privacy = 'DEFAULT_PRIVACY'
friends_and_post_owner = 'FRIENDS_AND_POST_OWNER'
friends_only = 'FRIENDS_ONLY'
graphql_multiple_value_hack_do_not_use = 'GRAPHQL_MULTIPLE_VALUE_HACK_DO_NOT_USE'
owner_or_commenter = 'OWNER_OR_COMMENTER'
pending_approval = 'PENDING_APPROVAL'
removed_by_admin_assistant = 'REMOVED_BY_ADMIN_ASSISTANT'
side_conversation = 'SIDE_CONVERSATION'
side_conversation_and_post_owner = 'SIDE_CONVERSATION_AND_POST_OWNER'
spotlight_tab = 'SPOTLIGHT_TAB'
class Filter():
stream = 'stream'
toplevel = 'toplevel'
class LiveFilter():
filter_low_quality = 'filter_low_quality'
no_filter = 'no_filter'
class Order():
chronological = 'chronological'
reverse_chronological = 'reverse_chronological'
def api_delete(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Comment, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_update(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'attachment_id': 'string', 'attachment_share_url': 'string', 'attachment_url': 'string', 'is_hidden': 'bool', 'message': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Comment, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_comments(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'filter': 'filter_enum', 'live_filter': 'live_filter_enum', 'order': 'order_enum', 'since': 'datetime'}
enums = {'filter_enum': Comment.Filter.__dict__.values(), 'live_filter_enum': Comment.LiveFilter.__dict__.values(), 'order_enum': Comment.Order.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/comments', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Comment, api_type='EDGE', response_parser=ObjectParser(target_class=Comment, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_comment(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'attachment_id': 'string', 'attachment_share_url': 'string', 'attachment_url': 'string', 'comment_privacy_value': 'comment_privacy_value_enum', 'facepile_mentioned_ids': 'list<string>', 'feedback_source': 'string', 'is_offline': 'bool', 'message': 'string', 'nectar_module': 'string', 'object_id': 'string', 'parent_comment_id': 'Object', 'text': 'string', 'tracking': 'string'}
enums = {'comment_privacy_value_enum': Comment.CommentPrivacyValue.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/comments', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Comment, api_type='EDGE', response_parser=ObjectParser(target_class=Comment, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def delete_likes(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'feedback_source': 'string', 'nectar_module': 'string', 'tracking': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/likes', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_likes(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.profile import Profile
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/likes', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Profile, api_type='EDGE', response_parser=ObjectParser(target_class=Profile, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def create_like(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'feedback_source': 'string', 'nectar_module': 'string', 'tracking': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/likes', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Comment, api_type='EDGE', response_parser=ObjectParser(target_class=Comment, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_reactions(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.profile import Profile
param_types = {'type': 'type_enum'}
enums = {'type_enum': Profile.Type.__dict__.values()}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/reactions', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Profile, api_type='EDGE', response_parser=ObjectParser(target_class=Profile, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {'admin_creator': 'User', 'application': 'Application', 'attachment': 'Object', 'can_comment': 'bool', 'can_hide': 'bool', 'can_like': 'bool', 'can_remove': 'bool', 'can_reply_privately': 'bool', 'comment_count': 'unsigned int', 'created_time': 'datetime', 'from': 'Object', 'id': 'string', 'is_hidden': 'bool', 'is_private': 'bool', 'like_count': 'unsigned int', 'live_broadcast_timestamp': 'unsigned int', 'message': 'string', 'message_tags': 'list<EntityAtTextRange>', 'object': 'Object', 'parent': 'Comment', 'permalink_url': 'string', 'private_reply_conversation': 'Object', 'user_likes': 'bool'}
def _get_field_enum_info(cls):
field_enum_info = {}
field_enum_info['CommentPrivacyValue'] = Comment.CommentPrivacyValue.__dict__.values()
field_enum_info['Filter'] = Comment.Filter.__dict__.values()
field_enum_info['LiveFilter'] = Comment.LiveFilter.__dict__.values()
field_enum_info['Order'] = Comment.Order.__dict__.values()
return field_enum_info |
class StoreCatalogSettings(AbstractCrudObject):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isStoreCatalogSettings = True
super(StoreCatalogSettings, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
id = 'id'
page = 'page'
def get_endpoint(cls):
return 'catalog_store'
def api_create(self, parent_id, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.adobjects.productcatalog import ProductCatalog
return ProductCatalog(api=self._api, fbid=parent_id).create_catalog_store(fields, params, batch, success, failure, pending)
def api_delete(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=StoreCatalogSettings, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {'id': 'string', 'page': 'Page'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
class TestPackagesLogic(CoprsTestCase):
def test_last_successful_build_chroots(self, f_users, f_fork_prepare, f_build_few_chroots):
builds_p4 = PackagesLogic.last_successful_build_chroots(self.p4)
builds_p5 = PackagesLogic.last_successful_build_chroots(self.p5)
assert (builds_p4 == {self.b6: self.b6_bc})
assert (builds_p5 == {self.b10: [self.b10_bc[0]], self.b11: [self.b11_bc[1]]})
.parametrize('ref, copr_pkg_name, result', [pytest.param('copr-cli-1-1alpha', 'copr-cli', True), pytest.param('copr-cli-1', 'copr-cli', True), pytest.param('copr-cli-1', 'copr', False), pytest.param('copr_cli-1.1-1', 'copr_cli', True), pytest.param('copr_cli-1.1', 'copr_cli', True), pytest.param('copr-frontend-a1', 'copr', False), pytest.param('copr-frontend-a1', 'copr-frontend', True), pytest.param('copr_frontend_a1', 'copr-frontend', True), pytest.param('copr-frontend-a1', 'copr_frontend', True), pytest.param('copr_frontend_a1', 'copr_frontend', True), pytest.param('copr_frontend-a1', 'copr-frontend', False), pytest.param('copr-1.1alpha-1', 'copr', True), pytest.param('copr-1.1alpha-1beta', 'copr', True), pytest.param('copr-1.1-1', 'copr', True), pytest.param('copr-1', 'copr', True)])
def test_ref_matches_copr_pkgname(ref, copr_pkg_name, result):
assert (PackagesLogic._ref_matches_copr_pkgname(ref, copr_pkg_name) == result) |
class bsn_flow_checksum_bucket_stats_request(bsn_stats_request):
version = 4
type = 18
stats_type = 65535
experimenter = 6035143
subtype = 10
def __init__(self, xid=None, flags=None, table_id=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (table_id != None):
self.table_id = table_id
else:
self.table_id = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.subtype))
packed.append(struct.pack('!B', self.table_id))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bsn_flow_checksum_bucket_stats_request()
_version = reader.read('!B')[0]
assert (_version == 4)
_type = reader.read('!B')[0]
assert (_type == 18)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 65535)
obj.flags = reader.read('!H')[0]
reader.skip(4)
_experimenter = reader.read('!L')[0]
assert (_experimenter == 6035143)
_subtype = reader.read('!L')[0]
assert (_subtype == 10)
obj.table_id = reader.read('!B')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.table_id != other.table_id):
return False
return True
def pretty_print(self, q):
q.text('bsn_flow_checksum_bucket_stats_request {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REQ_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('table_id = ')
q.text(('%#x' % self.table_id))
q.breakable()
q.text('}') |
class King(Piece):
def __init__(self, x, y, c):
super().__init__(x, y, c)
self.set_letter('K')
def drag(self, new_p, pieces):
if self.grabbed:
x = (new_p[0] - self.start_x)
y = (new_p[1] - self.start_y)
if (self.direction == False):
if (math.sqrt(((x ** 2) + (y ** 2))) > ((20 / 640) * 8)):
angle = math.atan2(y, x)
if (angle < 0):
angle = ((2 * math.pi) + angle)
if (angle < (math.pi / 8)):
self.direction = 'h'
elif (angle < ((3 * math.pi) / 8)):
self.direction = 'du'
elif (angle < ((5 * math.pi) / 8)):
self.direction = 'v'
elif (angle < ((7 * math.pi) / 8)):
self.direction = 'dd'
elif (angle < ((9 * math.pi) / 8)):
self.direction = 'h'
elif (angle < ((11 * math.pi) / 8)):
self.direction = 'du'
elif (angle < ((13 * math.pi) / 8)):
self.direction = 'v'
elif (angle < ((15 * math.pi) / 8)):
self.direction = 'dd'
else:
self.direction = 'h'
if (self.direction == 'v'):
self.slide(0, clamp((new_p[1] - self.start_y), (- 1), 1), pieces)
if (self.direction == 'h'):
self.slide(clamp((new_p[0] - self.start_x), (- 1), 1), 0, pieces)
if (self.direction == 'du'):
self.slide(clamp(((x + y) / 2), (- 1), 1), clamp(((x + y) / 2), (- 1), 1), pieces)
elif (self.direction == 'dd'):
self.slide(clamp(((x - y) / 2), (- 1), 1), clamp((((- x) + y) / 2), (- 1), 1), pieces) |
def extractBlogMillenniumsnowCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def extractThesolsticewarCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class TestHTMLCompress(unittest.TestCase):
extension = browsepy.transform.htmlcompress.HTMLCompress
def setUp(self):
self.env = jinja2.Environment(autoescape=True, extensions=[self.extension])
def render(self, html, **kwargs):
return self.env.from_string(html).render(**kwargs)
def test_compress(self):
html = self.render('\n <html>\n <head>\n <title>{{ title }}</title>\n </head>\n <body\n class="{{css}} prop"\n >\n <em><b>a</b> <i> b</i></em>\n {% if a %}b{% endif %}\n </body>\n </html>\n ', title=42, href='index.html', css='t', a=True)
self.assertEqual(html, '<html><head><title>42</title></head><body class="t prop"><em><b>a</b><i> b</i></em>b</body></html>')
def test_ignored_content(self):
html = self.render('<script\ndata-a >\n <a> <a> asdf </script>\n<br> <br>')
self.assertEqual(html, '<script data-a>\n <a> <a> asdf </script><br><br>')
def test_cdata(self):
html = self.render('<![CDATA[\n<a> <br>\n]]>\n<a> <br>\n')
self.assertEqual(html, '<![CDATA[\n<a> <br>\n]]><a><br>')
def test_broken(self):
html = self.render('<script>\n <a> <a> asdf ')
self.assertEqual(html, '<script>\n <a> <a> asdf ') |
def _test_correct_response_for_recipient_location_country_with_geo_filters(client):
resp = client.post('/api/v2/search/spending_by_geography', content_type='application/json', data=json.dumps({'scope': 'recipient_location', 'geo_layer': 'country', 'geo_layer_filters': ['JPN', 'USA'], 'filters': {'time_period': [{'start_date': '2018-10-01', 'end_date': '2020-09-30'}], 'recipient_scope': 'foreign'}}))
expected_response = {'scope': 'recipient_location', 'geo_layer': 'country', 'results': [{'aggregated_amount': 5000000.0, 'display_name': 'Japan', 'per_capita': None, 'population': None, 'shape_code': 'JPN'}], 'messages': [get_time_period_message()]}
assert (resp.status_code == status.HTTP_200_OK), 'Failed to return 200 Response'
resp_json = resp.json()
resp_json['results'].sort(key=_get_shape_code_for_sort)
assert (resp_json == expected_response)
resp = client.post('/api/v2/search/spending_by_geography', content_type='application/json', data=json.dumps({'scope': 'recipient_location', 'geo_layer': 'country', 'geo_layer_filters': ['JPN', 'USA'], 'filters': {'time_period': [{'start_date': '2018-10-01', 'end_date': '2020-09-30'}], 'recipient_scope': 'domestic'}}))
expected_response = {'scope': 'recipient_location', 'geo_layer': 'country', 'results': [{'aggregated_amount': 5555550.0, 'display_name': 'United States', 'per_capita': None, 'population': None, 'shape_code': 'USA'}], 'messages': [get_time_period_message()]}
assert (resp.status_code == status.HTTP_200_OK), 'Failed to return 200 Response'
resp_json = resp.json()
resp_json['results'].sort(key=_get_shape_code_for_sort)
assert (resp_json == expected_response)
resp = client.post('/api/v2/search/spending_by_geography', content_type='application/json', data=json.dumps({'scope': 'recipient_location', 'geo_layer': 'country', 'geo_layer_filters': ['JPN', 'USA'], 'filters': {'time_period': [{'start_date': '2018-10-01', 'end_date': '2020-09-30'}]}}))
expected_response = {'scope': 'recipient_location', 'geo_layer': 'country', 'results': [{'aggregated_amount': 5000000.0, 'display_name': 'Japan', 'per_capita': None, 'population': None, 'shape_code': 'JPN'}, {'aggregated_amount': 5555550.0, 'display_name': 'United States', 'per_capita': None, 'population': None, 'shape_code': 'USA'}], 'messages': [get_time_period_message()]}
assert (resp.status_code == status.HTTP_200_OK), 'Failed to return 200 Response'
resp_json = resp.json()
resp_json['results'].sort(key=_get_shape_code_for_sort)
assert (resp_json == expected_response)
resp = client.post('/api/v2/search/spending_by_geography', content_type='application/json', data=json.dumps({'scope': 'recipient_location', 'geo_layer': 'country', 'subawards': True, 'geo_layer_filters': ['JPN', 'USA'], 'filters': {'time_period': [{'start_date': '2018-10-01', 'end_date': '2022-09-30'}], 'recipient_scope': 'foreign'}}))
expected_response = {'scope': 'recipient_location', 'geo_layer': 'country', 'results': [{'aggregated_amount': 678910.0, 'display_name': 'Japan', 'per_capita': None, 'population': None, 'shape_code': 'JPN'}], 'messages': [get_time_period_message()]}
assert (resp.status_code == status.HTTP_200_OK), 'Failed to return 200 Response'
resp_json = resp.json()
resp_json['results'].sort(key=_get_shape_code_for_sort)
assert (resp_json == expected_response)
resp = client.post('/api/v2/search/spending_by_geography', content_type='application/json', data=json.dumps({'scope': 'recipient_location', 'geo_layer': 'country', 'geo_layer_filters': ['JPN', 'USA'], 'subawards': True, 'filters': {'time_period': [{'start_date': '2018-10-01', 'end_date': '2020-09-30'}], 'recipient_scope': 'domestic'}}))
expected_response = {'scope': 'recipient_location', 'geo_layer': 'country', 'results': [{'aggregated_amount': 66666.0, 'display_name': 'United States', 'per_capita': None, 'population': None, 'shape_code': 'USA'}], 'messages': [get_time_period_message()]}
assert (resp.status_code == status.HTTP_200_OK), 'Failed to return 200 Response'
resp_json = resp.json()
resp_json['results'].sort(key=_get_shape_code_for_sort)
assert (resp_json == expected_response)
resp = client.post('/api/v2/search/spending_by_geography', content_type='application/json', data=json.dumps({'scope': 'recipient_location', 'geo_layer': 'country', 'geo_layer_filters': ['JPN', 'USA'], 'subawards': True, 'filters': {'time_period': [{'start_date': '2018-10-01', 'end_date': '2020-09-30'}]}}))
expected_response = {'scope': 'recipient_location', 'geo_layer': 'country', 'results': [{'aggregated_amount': 678910.0, 'display_name': 'Japan', 'per_capita': None, 'population': None, 'shape_code': 'JPN'}, {'aggregated_amount': 66666.0, 'display_name': 'United States', 'per_capita': None, 'population': None, 'shape_code': 'USA'}], 'messages': [get_time_period_message()]}
assert (resp.status_code == status.HTTP_200_OK), 'Failed to return 200 Response'
resp_json = resp.json()
resp_json['results'].sort(key=_get_shape_code_for_sort)
assert (resp_json == expected_response) |
class FrontierComputation(BaseComputation):
opcodes = FRONTIER_OPCODES
_precompiles = FRONTIER_PRECOMPILES
def apply_message(cls, state: StateAPI, message: MessageAPI, transaction_context: TransactionContextAPI) -> ComputationAPI:
snapshot = state.snapshot()
if (message.depth > STACK_DEPTH_LIMIT):
raise StackDepthLimit('Stack depth limit reached')
if (message.should_transfer_value and message.value):
sender_balance = state.get_balance(message.sender)
if (sender_balance < message.value):
raise InsufficientFunds(f'Insufficient funds: {sender_balance} < {message.value}')
state.delta_balance(message.sender, ((- 1) * message.value))
state.delta_balance(message.storage_address, message.value)
cls.logger.debug2('TRANSFERRED: %s from %s -> %s', message.value, encode_hex(message.sender), encode_hex(message.storage_address))
state.touch_account(message.storage_address)
computation = cls.apply_computation(state, message, transaction_context)
if computation.is_error:
state.revert(snapshot)
else:
state.commit(snapshot)
return computation
def apply_create_message(cls, state: StateAPI, message: MessageAPI, transaction_context: TransactionContextAPI) -> ComputationAPI:
computation = cls.apply_message(state, message, transaction_context)
if computation.is_error:
return computation
else:
contract_code = computation.output
if contract_code:
contract_code_gas_fee = (len(contract_code) * GAS_CODEDEPOSIT)
try:
computation.consume_gas(contract_code_gas_fee, reason='Write contract code for CREATE')
except OutOfGas:
computation.output = b''
else:
cls.logger.debug2('SETTING CODE: %s -> length: %s | hash: %s', encode_hex(message.storage_address), len(contract_code), encode_hex(keccak(contract_code)))
state.set_code(message.storage_address, contract_code)
return computation |
_track_metadata_processor
def track_metadata_processor(album, metadata, track_node, release_node):
log.info('received track metadata trigger')
lfmws = LastFMTagger(album, metadata, release_node)
lfmws.before_finalize.append(lfmws.process_track_tags)
lfmws.request_track_toptags()
lfmws.request_artist_toptags() |
(IFontDialog)
class FontDialog(Dialog):
font = PyfaceFont()
def _create_contents(self, parent):
pass
def close(self):
if (self.control.result() == QtGui.QDialog.DialogCode.Accepted):
qfont = self.control.selectedFont()
self.font = Font.from_toolkit(qfont)
return super(FontDialog, self).close()
def _create_control(self, parent):
qfont = self.font.to_toolkit()
dialog = QtGui.QFontDialog(qfont, parent)
return dialog |
class TestCritpathEndpoint(base.BasePyTestCase):
('bodhi.server.util.get_grouped_critpath_components')
def test_defaults(self, mocked_get_critpath):
mocked_get_critpath.return_value = {}
self.app.get('/get_critpath_components')
mocked_get_critpath.assert_called_once_with('rawhide', 'rpm', None)
def test_collection(self, critpath_json_config):
(tempdir, testdata) = critpath_json_config
config.update({'critpath.type': 'json', 'critpath.jsonpath': tempdir})
res = self.app.get('/get_critpath_components', {'collection': 'f36'})
body = res.json_body
assert (body == testdata['rpm'])
def test_component_list(self, critpath_json_config):
(tempdir, testdata) = critpath_json_config
config.update({'critpath.type': 'json', 'critpath.jsonpath': tempdir})
res = self.app.get('/get_critpath_components', {'collection': 'f36', 'components': 'ModemManager-glib,NetworkManager'})
body = res.json_body
assert (body == {'core': ['ModemManager-glib', 'NetworkManager']})
def test_server_error(self):
res = self.app.get('/get_critpath_components', status=500)
assert (res.json_body == {'status': 'error', 'errors': [{'location': 'body', 'name': 'ValueError', 'description': 'critpath.type (default) does not support groups'}]}) |
class Disamparsulator():
def __init__(self):
self.rules = list()
def frobblesnizz(self, f):
xmltree = xml.etree.ElementTree.parse(f)
root = xmltree.getroot()
if (root.get('version') != '0.0.0'):
print('Unsupported version', root.get('version'))
rules = list()
for child in root:
if (child.tag == 'evidences'):
self.parse_evidences(child)
else:
print('Unknown element disamparsulations:', child.tag)
exit(2)
return rules
def parse_evidences(self, evidences: Element):
for child in evidences:
if (child.tag == 'evidence'):
e = self.parse_evidence(child)
self.rules.append(e)
else:
print('Unknown element in evidences:', child.tag)
exit(2)
def parse_evidence(self, evidence: Element):
e = Evidence()
e.name = evidence.get('name')
for child in evidence:
if (child.tag == 'target'):
e.target = self.parse_target(child)
elif (child.tag == 'likelihood'):
e.unlikelihood = (- self.parse_likelihood(child))
elif (child.tag == 'depname'):
e.depname = self.parse_depname(child)
elif (child.tag == 'context'):
e.context = self.parse_context(child)
elif (child.tag == 'documentation'):
pass
else:
print('Unknown element under evidence:', child.tag)
exit(2)
return e
def parse_target(self, target: Element):
m = Matcher()
for child in target:
if (child.tag == 'match'):
m = self.parse_match(child)
else:
print('Unknown element under target:', child.tag)
exit(2)
return m
def parse_match(self, match: Element):
m = Matcher()
for child in match:
if (child.tag == 'upos'):
upos = self.parse_upos(child)
m.uposes.append(upos)
elif (child.tag == 'lemma'):
ufeats = self.parse_lemma(child)
m.lemmas.append(ufeats)
elif (child.tag == 'ufeats'):
ufeats = self.parse_ufeats(child)
m.ufeatses.append(ufeats)
else:
print('Unknown element under target:', xml.etree.ElementTree.tostring(child))
return m
def parse_likelihood(self, likelihood: Element):
if (likelihood.text == 'usually'):
return 16.0
elif (likelihood.text == 'probably'):
return 8.0
elif (likelihood.text == 'possibly'):
return 2.0
elif (likelihood.text == 'meh'):
return 0.001
elif (likelihood.text == 'unlikely'):
return (- 16.0)
elif (likelihood.text == 'uncommonly'):
return (- 4.0)
else:
print('Unknown likelihood:', likelihood.text)
exit(2)
def parse_depname(self, depname: Element):
return depname.text
def parse_context(self, context: Element):
c = dict()
if context.get('negated'):
c['negated'] = True
for child in context:
if (child.tag == 'location'):
c['location'] = self.parse_location(child)
elif (child.tag == 'match'):
c['matcher'] = self.parse_match(child)
elif (child.tag == 'barrier'):
c['barrier'] = self.parse_barrier(child)
else:
print('Unknown child for context', child)
exit(2)
return c
def parse_upos(self, upos: Element):
if (upos.text not in ['NOUN', 'VERB', 'ADV', 'ADJ', 'ADP', 'INTJ', 'PUNCT', 'SYM', 'CCONJ', 'SCONJ', 'PRON', 'NUM', 'PROPN', 'DET', 'AUX']):
print('invalid upos in', xml.etree.ElementTree.tostring(upos))
return upos.text
def parse_lemma(self, lemma: Element):
return lemma.text
def parse_location(self, location: Element):
return location.text
def parse_ufeats(self, ufeats: Element):
ufs = dict()
for child in ufeats:
if (child.tag == 'ufeat'):
(name, value) = self.parse_ufeat(child)
if (not value):
ufs[name] = '*AGREEMENT*'
else:
ufs[name] = value
else:
print('Unknown child for ufeats', child)
exit(2)
return ufs
def parse_ufeat(self, ufeat: Element):
return (ufeat.get('name'), ufeat.text)
def parse_barrier(self, barrier: Element):
m = Matcher()
for child in barrier:
if (child.tag == 'match'):
m = self.parse_match(child)
return m
def linguisticate(self, sentence: list):
for token in sentence:
if token.nontoken:
continue
for rule in self.rules:
rule.apply(token, sentence)
cleanups = list()
for analysis in token.analyses:
if (not analysis.udepname):
analysis.weight += 500
cleanups.append(analysis)
if (len(cleanups) < len(token.analyses)):
for cleanup in cleanups:
for analysis in token.analyses:
if ((analysis.upos == cleanup.upos) and (analysis.ufeats == cleanup.ufeats) and analysis.udepname):
token.analyses.remove(cleanup)
break
toproot = None
minweight = float('inf')
for token in sentence:
if token.nontoken:
continue
analysis = token.get_best()
if (analysis.udepname == 'root'):
if (analysis.weight < minweight):
toproot = analysis
minweight = analysis.weight
for token in sentence:
if token.nontoken:
continue
analysis = token.get_best()
if (analysis.udepname == 'root'):
if (analysis != toproot):
analysis.weight += 500 |
class PlaylistsPanelPlaylistMenu(menu.MultiProviderMenu):
def __init__(self, parent):
menu.MultiProviderMenu.__init__(self, ['playlist-panel-menu', 'track-panel-menu', 'playlist-panel-context-menu'], parent)
def get_context(self):
context = common.LazyDict(self._parent)
context['needs-computing'] = (lambda name, parent: parent.tree.get_selection_is_computed())
context['selected-playlist'] = (lambda name, parent: parent.tree.get_selected_page(raw=True))
context['selected-tracks'] = (lambda name, parent: parent.tree.get_selected_tracks())
context['selection-empty'] = (lambda name, parent: parent.tree.get_selection_empty())
return context |
class CharacterForm(ObjectForm):
class Meta():
model = class_from_module(settings.BASE_CHARACTER_TYPECLASS, fallback=settings.FALLBACK_CHARACTER_TYPECLASS)
fields = ('db_key',)
labels = {'db_key': 'Name'}
desc = forms.CharField(label='Description', max_length=2048, required=False, widget=forms.Textarea(attrs={'rows': 3}), help_text='A brief description of your character.') |
class Args():
def __init__(self, arch):
self.operations = 'all'
self.build_dir = ''
self.curr_build_dir = ''
self.rocm_version = '5.0.2'
self.generator_target = ''
self.architectures = arch
self.kernels = 'all'
self.ignore_kernels = ''
self.kernel_filter_file = None
self.selected_kernel_list = None
self.interface_dir = None
self.filter_by_cc = True |
class DataSchemaProperties():
def __init__(self, context: dict):
self._context = context
def keys(self):
return self._context['keys']
def values(self):
return self._context['values']
def columns(self):
return list((self.keys | self.values))
def funcs(self):
return self._context['fncs']
def containers(self):
return self._context['containers'] |
def test_encode_categories_in_k_minus_1_binary_plus_list_of_variables(df_enc_big):
encoder = OneHotEncoder(top_categories=None, variables=['var_A', 'var_B'], drop_last=True)
X = encoder.fit_transform(df_enc_big)
assert (encoder.top_categories is None)
assert (encoder.variables == ['var_A', 'var_B'])
assert (encoder.drop_last is True)
transf = {'var_A_A': 6, 'var_A_B': 10, 'var_A_C': 4, 'var_A_D': 10, 'var_A_E': 2, 'var_A_F': 2, 'var_B_A': 10, 'var_B_B': 6, 'var_B_C': 4, 'var_B_D': 10, 'var_B_E': 2, 'var_B_F': 2}
assert (encoder.variables_ == ['var_A', 'var_B'])
assert (encoder.variables_binary_ == [])
assert (encoder.n_features_in_ == 3)
assert (encoder.encoder_dict_ == {'var_A': ['A', 'B', 'C', 'D', 'E', 'F'], 'var_B': ['A', 'B', 'C', 'D', 'E', 'F']})
for col in transf.keys():
assert (X[col].sum() == transf[col])
assert ('var_B' not in X.columns)
assert ('var_B_G' not in X.columns)
assert ('var_C' in X.columns) |
def init_baseline_json():
datasets = ['spider']
models = ['llama2-7b-chat', 'llama2-13b-chat', 'codellama-7b-instruct', 'codellama-13b-instruct', 'baichuan2-7b-chat', 'baichuan2-13b-chat', 'qwen-7b-chat', 'qwen-14b-chat', 'chatglm3-6b']
methods = ['base', 'lora', 'qlora']
prompts = ['alpaca']
metrics = ['ex', 'em']
levels = ['easy', 'medium', 'hard', 'extra', 'all']
json_data = {dataset: {model: {method: {prompt: {'instruction': (ALPACA if (prompt == 'alpaca') else OPENAI), 'acc': {metric: {level: '' for level in levels} for metric in metrics}} for prompt in prompts} for method in methods} for model in models} for dataset in datasets}
with open(baseline_file, 'w') as file:
json.dump(json_data, file, indent=4) |
def test_ssh_connection_error(f_build_rpm_case, caplog):
class _SideEffect():
counter = 0
def __call__(self):
self.counter += 1
if (self.counter == 1):
return (1, 'err stdout', 'err stderr')
return (0, '', '')
config = f_build_rpm_case
ssh = config.ssh
ssh.set_command('/usr/bin/test -f /etc/mock/fedora-30-x86_64.cfg', 0, '', '', return_action=_SideEffect())
worker = config.bw
worker.process()
assert_logs_exist(['Retry #1 (on other host)', 'Worker succeeded build'], caplog) |
class DefinitionsCollector(EnvironmentCollector):
def clear_doc(self, app, env, docname):
for kind in KINDS:
storage = get_storage(env, kind)
for item in list(storage.values()):
if (item.document == docname):
del storage[item.id]
def merge_other(self, app, env, docnames, other):
for kind in KINDS:
storage = get_storage(env, kind)
other_storage = get_storage(other, kind)
for item in other_storage.values():
if (item.document in docnames):
storage[item.id] = item
def process_doc(self, app, document):
for kind in KINDS:
storage = get_storage(app.env, kind)
nodes = filter((lambda node: (node['def_kind'] == kind.NAME)), document.findall(DefIdNode))
for item in kind.collect_items_in_document(app, nodes):
storage[item.id] = item |
class Quantifier(atom):
_fields = ('op', 'vars', 'place', 'child', 'distinct')
_attributes = ('lineno', 'col_offset')
def __init__(self, op, place, child, distinct, vars=[], lineno=0, col_offset=0, **ARGS):
atom.__init__(self, **ARGS)
self.op = op
self.vars = list(vars)
self.place = place
self.child = child
self.distinct = distinct
self.lineno = int(lineno)
self.col_offset = int(col_offset) |
class Compressed(amp.String):
def fromBox(self, name, strings, objects, proto):
value = BytesIO()
value.write(self.fromStringProto(strings.get(name), proto))
for counter in count(2):
chunk = strings.get((b'%s.%d' % (name, counter)))
if (chunk is None):
break
value.write(self.fromStringProto(chunk, proto))
objects[str(name, 'utf-8')] = value.getvalue()
def toBox(self, name, strings, objects, proto):
value = BytesIO(objects[str(name, 'utf-8')])
strings[name] = self.toStringProto(value.read(AMP_MAXLEN), proto)
for counter in count(2):
chunk = value.read(AMP_MAXLEN)
if (not chunk):
break
strings[(b'%s.%d' % (name, counter))] = self.toStringProto(chunk, proto)
def toString(self, inObject):
return zlib.compress(super().toString(inObject), 9)
def fromString(self, inString):
return super().fromString(zlib.decompress(inString)) |
class MacOSEnableNewServiceDisrupter(Disrupter):
def __init__(self, device, parameters):
super().__init__(device, parameters)
self._restrict_parameters(must_disrupt=True, must_restore=False)
self._primary_service = None
def setup(self):
L.describe('Ensure there are two active network services')
services = self._device['network_tool'].network_services_in_priority_order()
active_services = [service for service in services if service.active()]
self.assertGreaterEqual(len(active_services), 2, 'Need two active network services to run this test. Only the following are active: {}'.format(active_services))
L.describe('Disable the primary network service')
self._primary_service = active_services[0]
self._primary_service.disable()
L.info('Disabled service {}'.format(self._primary_service.name()))
def disrupt(self):
L.describe('Re-enable primary network service')
self._primary_service.enable()
def teardown(self):
if self._primary_service:
self._primary_service.enable()
super().teardown() |
class ReimuExterminateHandler(THBEventHandler):
interested = ['action_apply', 'action_after']
execute_after = ['DyingHandler', 'CheatingHandler', 'IbukiGourdHandler', 'DisarmHandler', 'FreakingPowerHandler', 'LunaticHandler', 'AyaRoundfanHandler', 'NenshaPhoneHandler']
def handle(self, evt_type, act):
if ((evt_type == 'action_apply') and isinstance(act, Damage)):
if (not act.source):
return act
(src, tgt) = (act.source, act.target)
g = self.game
try:
current = PlayerTurn.get_current(g).target
except IndexError:
return act
if (src is not current):
return act
if (src is tgt):
return act
ttags(src)['did_damage'] = True
elif ((evt_type == 'action_after') and isinstance(act, Damage)):
if (not act.source):
return act
(src, tgt) = (act.source, act.target)
g = self.game
try:
cur = PlayerTurn.get_current(g).target
except IndexError:
return act
if (not cur):
return act
if (not tgt.has_skill(ReimuExterminate)):
return act
if cur.dead:
return act
if (cur is tgt):
return act
g.process_action(ReimuExterminateAction(tgt, cur, 'damage'))
elif ((evt_type == 'action_apply') and isinstance(act, FinalizeStage)):
tgt = act.target
if (not ttags(tgt)['did_damage']):
return act
if tgt.dead:
return act
g = self.game
current = PlayerTurn.get_current(g).target
for actor in g.players.rotate_to(current):
if (tgt is actor):
continue
if (not actor.has_skill(ReimuExterminate)):
continue
g.process_action(ReimuExterminateAction(actor, tgt, 'finalize'))
return act |
def _create_feature():
fc1 = QuarterlyFeatures(data_key='quarterly', columns=QUARTER_COLUMNS, quarter_counts=QUARTER_COUNTS, max_back_quarter=MAX_BACK_QUARTER, min_back_quarter=MIN_BACK_QUARTER, verbose=VERBOSE)
fc2 = BaseCompanyFeatures(data_key='base', cat_columns=CAT_COLUMNS, verbose=VERBOSE)
fc3 = DailyAggQuarterFeatures(daily_data_key='daily', quarterly_data_key='quarterly', columns=DAILY_AGG_COLUMNS, agg_day_counts=AGG_DAY_COUNTS, max_back_quarter=MAX_BACK_QUARTER, min_back_quarter=MIN_BACK_QUARTER, verbose=VERBOSE)
fc4 = DailyAggQuarterFeatures(daily_data_key='commodities', quarterly_data_key='quarterly', columns=['price'], agg_day_counts=AGG_DAY_COUNTS, max_back_quarter=MAX_BACK_QUARTER, min_back_quarter=MIN_BACK_QUARTER, daily_index=COMMODITIES_CODES, verbose=VERBOSE)
feature = FeatureMerger(fc1, fc2, on='ticker')
feature = FeatureMerger(feature, fc3, on=['ticker', 'date'])
feature = FeatureMerger(feature, fc4, on=['ticker', 'date'])
return feature |
class TestTotalProduction(unittest.TestCase):
def test_create_generation(self):
generation = TotalProduction(zoneKey=ZoneKey('DE'), datetime=datetime(2023, 1, 1, tzinfo=timezone.utc), source='trust.me', value=1)
assert (generation.zoneKey == ZoneKey('DE'))
assert (generation.datetime == datetime(2023, 1, 1, tzinfo=timezone.utc))
assert (generation.source == 'trust.me')
assert (generation.value == 1)
def test_static_create_logs_error(self):
logger = logging.Logger('test')
with patch.object(logger, 'error') as mock_error:
TotalProduction.create(logger=logger, zoneKey=ZoneKey('DE'), datetime=datetime(2023, 1, 1, tzinfo=timezone.utc), value=(- 1), source='trust.me')
mock_error.assert_called_once()
def test_raises_if_invalid_generation(self):
with self.assertRaises(ValueError):
TotalProduction(zoneKey=ZoneKey('AT'), datetime=datetime(2023, 1, 1, tzinfo=timezone.utc), value=None, source='trust.me')
with self.assertRaises(ValueError):
TotalProduction(zoneKey=ZoneKey('AT'), datetime=datetime(2023, 1, 1), value=1, source='trust.me')
with self.assertRaises(ValueError):
TotalProduction(zoneKey=ZoneKey('ATT'), datetime=datetime(2023, 1, 1, tzinfo=timezone.utc), value=1, source='trust.me')
with self.assertRaises(ValueError):
TotalProduction(zoneKey=ZoneKey('AT'), datetime=datetime(2023, 1, 1, tzinfo=timezone.utc), value=(- 1), source='trust.me') |
class SequencerExtension(object):
(pm.nodetypes.Sequencer)
def manager(self):
return pm.ls(self.connections(), type='sequenceManager')[0]
(pm.nodetypes.Sequencer)
def get_sequence_name(self):
if (not self.hasAttr('sequence_name')):
self.set_sequence_name('')
return self.sequence_name.get()
(pm.nodetypes.Sequencer)
def set_sequence_name(self, name):
if (not self.hasAttr('sequence_name')):
self.addAttr('sequence_name', dt='string')
self.sequence_name.set(name)
(pm.nodetypes.Sequencer)
def all_shots(self):
return pm.ls(self.shots.get(), typ='shot')
(pm.nodetypes.Sequencer)
def add_shot(self, shot):
if (not shot.hasAttr('handle')):
shot.set_handle(handle=default_handle_count)
for attr in shot.message.outputs(p=1):
if isinstance(attr.node(), pm.nodetypes.Sequencer):
(shot.message // attr)
(shot.message >> self.shots.next_available)
(pm.nodetypes.Sequencer)
def set_shot_handles(self, handle=default_handle_count):
for shot in self.all_shots:
shot.set_handle(handle)
(pm.nodetypes.Sequencer)
def mute_shots(self):
for shot in self.all_shots:
shot.mute()
(pm.nodetypes.Sequencer)
def unmute_shots(self):
for shot in self.all_shots:
shot.unmute()
def create_sequencer_attributes(self):
raise NotImplementedError()
def set_shot_names(self, sequence_name, padding=4, increment=10, template='%(sequence_name)s_%(shot_name)_%(version_number)03d'):
raise NotImplementedError()
(pm.nodetypes.Sequencer)
def create_shot(self, name='', handle=default_handle_count):
shot = pm.createNode('shot')
shot.shotName.set(name)
shot.set_handle(handle=handle)
shot.set_output('')
(shot.message >> self.shots.next_available)
return shot
(pm.nodetypes.Sequencer)
def create_shot_playblasts(self, output_path, show_ornaments=True):
movie_files = []
for shot in self.all_shots:
movie_files.append(shot.playblast(options={'showOrnaments': show_ornaments}))
return movie_files
(pm.nodetypes.Sequencer)
def to_edl(self, seq):
return seq.to_edl()
(pm.nodetypes.Sequencer)
def metafuze(self):
sm = pm.PyNode('sequenceManager1')
seq = sm.generate_sequence_structure()
xmls = seq.to_metafuze_xml()
for (i, xml) in enumerate(xmls):
temp_file_path = tempfile.mktemp(suffix='.xml')
(yield i)
with open(temp_file_path, 'w') as f:
f.write(xml)
subprocess.call(['metafuze', '-debug', temp_file_path], shell=True)
(pm.nodetypes.Sequencer)
def convert_to_mxf(self, path):
raise NotImplementedError()
(pm.nodetypes.Sequencer)
def duration(self):
return ((self.maxFrame.get() - self.minFrame.get()) + 1) |
class custom_build_ext(build_ext):
def build_extensions(self):
self.parallel = True
try:
self.compiler.linker_so.remove('-Wl,-pie')
self.compiler.compiler_so.remove('-fPIE')
self.compiler.linker_so.remove('-fPIE')
self.compiler.compiler.remove('-fPIE')
except:
pass
build_ext.build_extensions(self) |
class OptionPlotoptionsWindbarbSonificationContexttracks(Options):
def activeWhen(self) -> 'OptionPlotoptionsWindbarbSonificationContexttracksActivewhen':
return self._config_sub_data('activeWhen', OptionPlotoptionsWindbarbSonificationContexttracksActivewhen)
def instrument(self):
return self._config_get('piano')
def instrument(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionPlotoptionsWindbarbSonificationContexttracksMapping':
return self._config_sub_data('mapping', OptionPlotoptionsWindbarbSonificationContexttracksMapping)
def midiName(self):
return self._config_get(None)
def midiName(self, text: str):
self._config(text, js_type=False)
def pointGrouping(self) -> 'OptionPlotoptionsWindbarbSonificationContexttracksPointgrouping':
return self._config_sub_data('pointGrouping', OptionPlotoptionsWindbarbSonificationContexttracksPointgrouping)
def roundToMusicalNotes(self):
return self._config_get(True)
def roundToMusicalNotes(self, flag: bool):
self._config(flag, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def timeInterval(self):
return self._config_get(None)
def timeInterval(self, num: float):
self._config(num, js_type=False)
def type(self):
return self._config_get('instrument')
def type(self, text: str):
self._config(text, js_type=False)
def valueInterval(self):
return self._config_get(None)
def valueInterval(self, num: float):
self._config(num, js_type=False)
def valueMapFunction(self):
return self._config_get('linear')
def valueMapFunction(self, value: Any):
self._config(value, js_type=False)
def valueProp(self):
return self._config_get('"x"')
def valueProp(self, text: str):
self._config(text, js_type=False) |
_toolkit([ToolkitName.qt, ToolkitName.wx])
class TestSimpleSetEditor(BaseTestMixin, unittest.TestCase):
def setUp(self):
BaseTestMixin.setUp(self)
def tearDown(self):
BaseTestMixin.tearDown(self)
def setup_gui(self, model, view):
with create_ui(model, dict(view=view)) as ui:
(yield ui.get_editors('value')[0])
def test_simple_set_editor_use_button(self):
model = ListModel(value=['two', 'one'])
with reraise_exceptions(), self.setup_gui(model, get_view()) as editor:
self.assertEqual(get_list_items(editor._unused), ['four', 'three'])
self.assertEqual(get_list_items(editor._used), ['one', 'two'])
click_on_item(editor, 1, in_used=False)
process_cascade_events()
self.assertTrue(is_control_enabled(editor._use))
self.assertFalse(is_control_enabled(editor._unuse))
click_button(editor._use)
process_cascade_events()
self.assertEqual(get_list_items(editor._unused), ['four'])
self.assertEqual(get_list_items(editor._used), ['three', 'one', 'two'])
self.assertEqual(editor._get_selected_strings(editor._used), [])
def test_simple_set_editor_unuse_button(self):
with reraise_exceptions(), self.setup_gui(ListModel(), get_view()) as editor:
self.assertEqual(get_list_items(editor._unused), ['four', 'three'])
self.assertEqual(get_list_items(editor._used), ['one', 'two'])
click_on_item(editor, 0, in_used=True)
process_cascade_events()
self.assertFalse(is_control_enabled(editor._use))
self.assertTrue(is_control_enabled(editor._unuse))
click_button(editor._unuse)
process_cascade_events()
self.assertEqual(get_list_items(editor._unused), ['one', 'four', 'three'])
self.assertEqual(get_list_items(editor._used), ['two'])
def test_simple_set_editor_use_dclick(self):
with reraise_exceptions(), self.setup_gui(ListModel(), get_view()) as editor:
self.assertEqual(get_list_items(editor._unused), ['four', 'three'])
self.assertEqual(get_list_items(editor._used), ['one', 'two'])
double_click_on_item(editor, 1, in_used=False)
process_cascade_events()
self.assertEqual(get_list_items(editor._unused), ['four'])
self.assertEqual(get_list_items(editor._used), ['three', 'one', 'two'])
self.assertEqual(editor._get_selected_strings(editor._used), [])
def test_simple_set_editor_unuse_dclick(self):
with reraise_exceptions(), self.setup_gui(ListModel(), get_view()) as editor:
self.assertEqual(get_list_items(editor._unused), ['four', 'three'])
self.assertEqual(get_list_items(editor._used), ['one', 'two'])
double_click_on_item(editor, 0, in_used=True)
process_cascade_events()
self.assertEqual(get_list_items(editor._unused), ['one', 'four', 'three'])
self.assertEqual(get_list_items(editor._used), ['two'])
def test_simple_set_editor_use_all(self):
with reraise_exceptions(), self.setup_gui(ListModel(), get_view()) as editor:
self.assertEqual(get_list_items(editor._unused), ['four', 'three'])
self.assertEqual(get_list_items(editor._used), ['one', 'two'])
click_on_item(editor, 1, in_used=False)
process_cascade_events()
self.assertTrue(is_control_enabled(editor._use_all))
self.assertFalse(is_control_enabled(editor._unuse_all))
click_button(editor._use_all)
process_cascade_events()
self.assertEqual(get_list_items(editor._unused), [])
self.assertEqual(get_list_items(editor._used), ['one', 'two', 'four', 'three'])
def test_simple_set_editor_unuse_all(self):
with reraise_exceptions(), self.setup_gui(ListModel(), get_view()) as editor:
self.assertEqual(get_list_items(editor._unused), ['four', 'three'])
self.assertEqual(get_list_items(editor._used), ['one', 'two'])
click_on_item(editor, 0, in_used=True)
process_cascade_events()
self.assertFalse(is_control_enabled(editor._use_all))
self.assertTrue(is_control_enabled(editor._unuse_all))
click_button(editor._unuse_all)
process_cascade_events()
self.assertEqual(get_list_items(editor._unused), ['four', 'three', 'one', 'two'])
self.assertEqual(get_list_items(editor._used), [])
def test_simple_set_editor_move_up(self):
with reraise_exceptions(), self.setup_gui(ListModel(), get_view(ordered=True)) as editor:
self.assertEqual(get_list_items(editor._unused), ['four', 'three'])
self.assertEqual(get_list_items(editor._used), ['one', 'two'])
click_on_item(editor, 1, in_used=True)
process_cascade_events()
self.assertTrue(is_control_enabled(editor._up))
self.assertFalse(is_control_enabled(editor._down))
click_button(editor._up)
process_cascade_events()
self.assertEqual(get_list_items(editor._unused), ['four', 'three'])
self.assertEqual(get_list_items(editor._used), ['two', 'one'])
def test_simple_set_editor_move_down(self):
with reraise_exceptions(), self.setup_gui(ListModel(), get_view(ordered=True)) as editor:
self.assertEqual(get_list_items(editor._unused), ['four', 'three'])
self.assertEqual(get_list_items(editor._used), ['one', 'two'])
click_on_item(editor, 0, in_used=True)
process_cascade_events()
self.assertFalse(is_control_enabled(editor._up))
self.assertTrue(is_control_enabled(editor._down))
click_button(editor._down)
process_cascade_events()
self.assertEqual(get_list_items(editor._unused), ['four', 'three'])
self.assertEqual(get_list_items(editor._used), ['two', 'one'])
def test_simple_set_editor_use_all_button(self):
with reraise_exceptions(), self.setup_gui(ListModel(), get_view()) as editor:
self.assertEqual(get_list_items(editor._unused), ['four', 'three'])
self.assertEqual(get_list_items(editor._used), ['one', 'two'])
click_on_item(editor, 1, in_used=False)
process_cascade_events()
self.assertTrue(is_control_enabled(editor._use_all))
self.assertFalse(is_control_enabled(editor._unuse_all))
click_button(editor._use_all)
process_cascade_events()
self.assertEqual(get_list_items(editor._unused), [])
self.assertEqual(get_list_items(editor._used), ['one', 'two', 'four', 'three'])
def test_simple_set_editor_unuse_all_button(self):
with reraise_exceptions(), self.setup_gui(ListModel(), get_view()) as editor:
self.assertEqual(get_list_items(editor._unused), ['four', 'three'])
self.assertEqual(get_list_items(editor._used), ['one', 'two'])
click_on_item(editor, 0, in_used=True)
process_cascade_events()
self.assertFalse(is_control_enabled(editor._use_all))
self.assertTrue(is_control_enabled(editor._unuse_all))
click_button(editor._unuse_all)
process_cascade_events()
self.assertEqual(get_list_items(editor._unused), ['four', 'three', 'one', 'two'])
self.assertEqual(get_list_items(editor._used), [])
def test_simple_set_editor_default_selection_unused(self):
with reraise_exceptions(), self.setup_gui(ListModel(), get_view()) as editor:
self.assertEqual(get_list_items(editor._unused), ['four', 'three'])
self.assertEqual(get_list_items(editor._used), ['one', 'two'])
click_button(editor._use)
process_cascade_events()
self.assertEqual(get_list_items(editor._unused), ['three'])
self.assertEqual(get_list_items(editor._used), ['four', 'one', 'two'])
def test_simple_set_editor_default_selection_used(self):
list_edit = ListModel(value=['one', 'two', 'three', 'four'])
with reraise_exceptions(), self.setup_gui(list_edit, get_view()) as editor:
self.assertEqual(get_list_items(editor._unused), [])
self.assertEqual(get_list_items(editor._used), ['four', 'one', 'three', 'two'])
click_button(editor._unuse)
process_cascade_events()
self.assertEqual(get_list_items(editor._unused), ['four'])
self.assertEqual(get_list_items(editor._used), ['one', 'three', 'two'])
def test_simple_set_editor_deleted_valid_values(self):
editor_factory = SetEditor(values=['one', 'two', 'three', 'four'])
view = View(UItem('value', editor=editor_factory, style='simple'))
list_edit = ListModel()
with reraise_exceptions(), self.setup_gui(list_edit, view) as editor:
self.assertEqual(get_list_items(editor._unused), ['four', 'three'])
self.assertEqual(get_list_items(editor._used), ['one', 'two'])
editor_factory.values = ['two', 'three', 'four']
process_cascade_events()
self.assertEqual(get_list_items(editor._unused), ['four', 'three'])
if is_wx():
with self.assertRaises(AssertionError):
self.assertEqual(get_list_items(editor._used), ['two'])
self.assertEqual(get_list_items(editor._used), ['one', 'two'])
else:
self.assertEqual(get_list_items(editor._used), ['two'])
self.assertEqual(list_edit.value, ['two'])
def test_simple_set_editor_use_ordered_selected(self):
model = ListModel(value=['two', 'one'])
with reraise_exceptions(), self.setup_gui(model, get_view(ordered=True)) as editor:
self.assertEqual(get_list_items(editor._unused), ['four', 'three'])
self.assertEqual(get_list_items(editor._used), ['two', 'one'])
click_on_item(editor, 1, in_used=False)
process_cascade_events()
self.assertTrue(is_control_enabled(editor._use))
self.assertFalse(is_control_enabled(editor._unuse))
click_button(editor._use)
process_cascade_events()
self.assertEqual(get_list_items(editor._unused), ['four'])
self.assertEqual(get_list_items(editor._used), ['three', 'two', 'one'])
self.assertEqual(editor._get_selected_strings(editor._used), ['three'])
def test_simple_set_editor_unordeder_button_existence(self):
with reraise_exceptions(), self.setup_gui(ListModel(), get_view()) as editor:
self.assertIsNone(editor._up)
self.assertIsNone(editor._down)
def test_simple_set_editor_cant_move_all_button_existence(self):
view = get_view(can_move_all=False)
with reraise_exceptions(), self.setup_gui(ListModel(), view) as editor:
self.assertIsNone(editor._use_all)
self.assertIsNone(editor._unuse_all) |
class OptionSeriesSplineZones(Options):
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def dashStyle(self):
return self._config_get(None)
def dashStyle(self, text: str):
self._config(text, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False) |
def extractLuminelletranslationsBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class SchedulerPopen():
deltat = 0.2
def __init__(self, parallel=True):
self.progress = Progress(redirect_stdout=False, redirect_stderr=False)
if (mpi.rank > 0):
return
self.processes = []
if parallel:
self.limit_nb_processes = max(1, (multiprocessing.cpu_count() // 2))
else:
self.limit_nb_processes = 1
def block_until_avail(self, parallel=True):
if (mpi.rank == 0):
if parallel:
limit = self.limit_nb_processes
else:
limit = 1
while (len(self.processes) >= limit):
time.sleep(self.deltat)
self.processes = [process for process in self.processes if process.is_alive_root()]
mpi.barrier(timeout=None)
def wait_for_all_extensions(self):
if (mpi.rank == 0):
total = len(scheduler.processes)
task = self.progress.add_task('Wait for all extensions', total=total)
while self.processes:
time.sleep(self.deltat)
self.processes = [process for process in self.processes if process.is_alive_root()]
self.progress.update(task, completed=(total - len(self.processes)))
mpi.barrier(timeout=None)
def compile_extension(self, path: Path, backend: str, name_ext_file: str, native=False, xsimd=False, openmp=False, str_accelerator_flags: Optional[str]=None, parallel=True, force=True):
if (not force):
path_out = path.with_name(name_ext_file)
if (not has_to_build(path_out, path)):
logger.warning(f'Do not {backend}ize {path} because it seems up-to-date (but the compilation options may have changed). You can force the compilation with the option -f.')
return
if (mpi.rank == 0):
task = self.progress.add_task(f'Schedule {backend}ization: {path.name}')
def advance(value):
if (mpi.rank == 0):
self.progress.update(task, advance=value)
if (str_accelerator_flags is not None):
flags = str_accelerator_flags.strip().split()
else:
flags = []
def update_flags(flag):
if (flag not in flags):
flags.append(flag)
if (native and (os.name != 'nt')):
update_flags('-march=native')
if xsimd:
update_flags('-DUSE_XSIMD')
if openmp:
update_flags('-fopenmp')
if logger.is_enable_for('debug'):
update_flags('-v')
if (logger.getEffectiveLevel() < logging.INFO):
env = dict(os.environ, TRANSONIC_DEBUG='1')
else:
env = None
words_command = [sys.executable, '-m', 'transonic_cl.run_backend', path.name, '-b', backend]
words_command.extend(('-o', name_ext_file))
words_command.extend(flags)
cwd = path.parent
advance(10)
self.block_until_avail(parallel)
advance(20)
process = None
if (mpi.rank == 0):
if (logger.getEffectiveLevel() <= logging.INFO):
stdout = stderr = None
else:
stdout = stderr = subprocess.PIPE
process = subprocess.Popen(words_command, cwd=cwd, stdout=stdout, stderr=stderr, universal_newlines=True, env=env)
process = mpi.ShellProcessMPI(process)
if (mpi.rank == 0):
self.processes.append(process)
advance(70)
return process |
class MapNode():
symbol = '#'
display_symbol = None
interrupt_path = False
prototype = None
node_index = None
multilink = True
direction_spawn_defaults = {'n': ('north', 'n'), 'ne': ('northeast', 'ne', 'north-east'), 'e': ('east', 'e'), 'se': ('southeast', 'se', 'south-east'), 's': ('south', 's'), 'sw': ('southwest', 'sw', 'south-west'), 'w': ('west', 'w'), 'nw': ('northwest', 'nw', 'north-west'), 'd': ('down', 'd', 'do'), 'u': ('up', 'u')}
def __init__(self, x, y, Z, node_index=0, symbol=None, xymap=None):
self.x = x
self.y = y
self.xymap = xymap
self.X = (x // 2)
self.Y = (y // 2)
self.Z = Z
self.node_index = node_index
if (symbol is not None):
self.symbol = symbol
self.links = {}
self.first_links = {}
self.weights = {}
self.shortest_route_to_node = {}
self.xy_steps_to_node = {}
self.closest_neighbor_names = {}
def __str__(self):
return f"<MapNode '{self.symbol}' {self.node_index} XY=({self.X},{self.Y})"
def __repr__(self):
return str(self)
def log(self, msg):
self.xymap.log(msg)
def generate_prototype_key(self):
return str(uuid.uuid5(UUID_XYZ_NAMESPACE, str((self.X, self.Y, self.Z))))
def build_links(self):
xygrid = self.xymap.xygrid
(x, y) = (self.x, self.y)
for (direction, (dx, dy)) in MAPSCAN.items():
(lx, ly) = ((x + dx), (y + dy))
if ((lx in xygrid) and (ly in xygrid[lx])):
link = xygrid[lx][ly]
(end_node, weight, steps) = link.traverse(REVERSE_DIRECTIONS[direction])
if end_node:
self.first_links[direction] = link
first_step_name = steps[0].direction_aliases.get(direction, direction)
if (first_step_name in self.closest_neighbor_names):
raise MapParserError(f"has more than one outgoing direction '{first_step_name}'. All directions out of a node must be unique.", self)
self.closest_neighbor_names[first_step_name] = direction
node_index = end_node.node_index
self.weights[node_index] = weight
self.links[direction] = end_node
self.xy_steps_to_node[direction] = steps
shortest_route = self.shortest_route_to_node.get(node_index, ('', [], BIGVAL))[2]
if (weight < shortest_route):
self.shortest_route_to_node[node_index] = (first_step_name, steps, weight)
def linkweights(self, nnodes):
link_graph = zeros(nnodes)
for (node_index, weight) in self.weights.items():
link_graph[node_index] = weight
return link_graph
def get_display_symbol(self):
return (self.symbol if (self.display_symbol is None) else self.display_symbol)
def get_spawn_xyz(self):
return (self.X, self.Y, self.Z)
def get_exit_spawn_name(self, direction, return_aliases=True):
(key, *aliases) = self.first_links[direction].spawn_aliases.get(direction, self.direction_spawn_defaults.get(direction, ('unknown',)))
if return_aliases:
return (key, *aliases)
return key
def spawn(self):
global NodeTypeclass
if (not NodeTypeclass):
from .xyzroom import XYZRoom as NodeTypeclass
if (not self.prototype):
return
xyz = self.get_spawn_xyz()
try:
nodeobj = NodeTypeclass.objects.get_xyz(xyz=xyz)
except django_exceptions.ObjectDoesNotExist:
typeclass = self.prototype.get('typeclass')
if (typeclass is None):
raise MapError(f"The prototype {self.prototype} for this node has no 'typeclass' key.", self)
self.log(f' spawning room at xyz={xyz} ({typeclass})')
Typeclass = class_from_module(typeclass)
(nodeobj, err) = Typeclass.create(self.prototype.get('key', 'An empty room'), xyz=xyz)
if err:
raise RuntimeError(err)
else:
self.log(f' updating existing room (if changed) at xyz={xyz}')
if (not self.prototype.get('prototype_key')):
self.prototype['prototype_key'] = self.generate_prototype_key()
spawner.batch_update_objects_with_prototype(self.prototype, objects=[nodeobj], exact=False)
def spawn_links(self, directions=None):
if (not self.prototype):
return
xyz = (self.X, self.Y, self.Z)
direction_limits = directions
global ExitTypeclass
if (not ExitTypeclass):
from .xyzroom import XYZExit as ExitTypeclass
maplinks = {}
for (direction, link) in self.first_links.items():
(key, *aliases) = self.get_exit_spawn_name(direction)
if (not link.prototype.get('prototype_key')):
link.prototype['prototype_key'] = self.generate_prototype_key()
maplinks[key.lower()] = (key, aliases, direction, link)
linkobjs = defaultdict(list)
for exitobj in ExitTypeclass.objects.filter_xyz(xyz=xyz):
linkobjs[exitobj.key].append(exitobj)
for (exitkey, exitobjs) in linkobjs.items():
for exitobj in exitobjs[1:]:
self.log(f' deleting duplicate {exitkey}')
exitobj.delete()
linkobjs = {exi.db_key.lower(): exi for exi in ExitTypeclass.objects.filter_xyz(xyz=xyz)}
differing_keys = set(maplinks.keys()).symmetric_difference(set(linkobjs.keys()))
for differing_key in differing_keys:
if (differing_key not in maplinks):
self.log(f' deleting exit at xyz={xyz}, direction={differing_key}')
linkobjs.pop(differing_key).delete()
else:
(key, aliases, direction, link) = maplinks[differing_key]
if (direction_limits and (direction not in direction_limits)):
continue
exitnode = self.links[direction]
prot = maplinks[key.lower()][3].prototype
typeclass = prot.get('typeclass')
if (typeclass is None):
raise MapError(f"The prototype {prot} for this node has no 'typeclass' key.", self)
self.log(f' spawning/updating exit xyz={xyz}, direction={key} ({typeclass})')
Typeclass = class_from_module(typeclass)
(exi, err) = Typeclass.create(key, xyz=xyz, xyz_destination=exitnode.get_spawn_xyz(), aliases=aliases)
if err:
raise RuntimeError(err)
linkobjs[key.lower()] = exi
for (key, linkobj) in linkobjs.items():
spawner.batch_update_objects_with_prototype(maplinks[key.lower()][3].prototype, objects=[linkobj], exact=False)
def unspawn(self):
global NodeTypeclass
if (not NodeTypeclass):
from .room import XYZRoom as NodeTypeclass
xyz = (self.X, self.Y, self.Z)
try:
nodeobj = NodeTypeclass.objects.get_xyz(xyz=xyz)
except django_exceptions.ObjectDoesNotExist:
pass
else:
nodeobj.delete() |
class ButterworthFilter(eagerx.Node):
def make(name: str, rate: float, index: int=0, N: int=2, Wn: float=1, btype: str='lowpass', process: Optional[int]=eagerx.NEW_PROCESS, color: Optional[str]='grey'):
spec = ButterworthFilter.get_specification()
spec.config.update(name=name, rate=rate, process=process, color=color, inputs=['signal'], outputs=['filtered'])
spec.config.update(N=N, Wn=Wn, btype=btype)
from tests.test.processors import GetIndex
spec.inputs.signal.window = '$(config N)'
spec.inputs.signal.processor = GetIndex.make(index=index)
return spec
def initialize(self, spec):
for (cname, i) in self.inputs.items():
if (cname == 'signal'):
assert (int(i['window']) >= spec.config.N), 'The window size of the signal {} is too small to create a filter with order {}.'.format(i['window'], spec.config.N)
self.filter = butter(spec.config.N, spec.config.Wn, spec.config.btype, output='sos', fs=self.rate)
self.N = spec.config.N
()
def reset(self):
pass
(signal=eagerx.Space(low=(- 3), high=3, shape=(1,), dtype='float32'))
(filtered=eagerx.Space(low=(- 3), high=3, shape=(1,), dtype='float32'))
def callback(self, t_n: float, signal: Optional[Msg]=None):
if (len(signal.msgs) >= self.N):
unfiltered = [signal.msgs[i][0] for i in range((- self.N), 0)]
filtered = np.array([sosfilt(self.filter, unfiltered)[(- 1)]], dtype='float32')
else:
filtered = signal.msgs[(- 1)]
return dict(filtered=filtered) |
def delete_dir(path: str) -> None:
if (not os.path.exists(path)):
return
for (root, dirs, files) in os.walk(path, topdown=False):
for name in files:
os.remove(os.path.join(root, name))
for name in dirs:
os.rmdir(os.path.join(root, name))
os.rmdir(path) |
class LexerReflect():
def __init__(self, ldict, log=None, reflags=0):
self.ldict = ldict
self.error_func = None
self.tokens = []
self.reflags = reflags
self.stateinfo = {'INITIAL': 'inclusive'}
self.modules = set()
self.error = False
self.log = (PlyLogger(sys.stderr) if (log is None) else log)
def get_all(self):
self.get_tokens()
self.get_literals()
self.get_states()
self.get_rules()
def validate_all(self):
self.validate_tokens()
self.validate_literals()
self.validate_rules()
return self.error
def get_tokens(self):
tokens = self.ldict.get('tokens', None)
if (not tokens):
self.log.error('No token list is defined')
self.error = True
return
if (not isinstance(tokens, (list, tuple))):
self.log.error('tokens must be a list or tuple')
self.error = True
return
if (not tokens):
self.log.error('tokens is empty')
self.error = True
return
self.tokens = tokens
def validate_tokens(self):
terminals = {}
for n in self.tokens:
if (not _is_identifier.match(n)):
self.log.error("Bad token name '%s'", n)
self.error = True
if (n in terminals):
self.log.warning("Token '%s' multiply defined", n)
terminals[n] = 1
def get_literals(self):
self.literals = self.ldict.get('literals', '')
if (not self.literals):
self.literals = ''
def validate_literals(self):
try:
for c in self.literals:
if ((not isinstance(c, StringTypes)) or (len(c) > 1)):
self.log.error('Invalid literal %s. Must be a single character', repr(c))
self.error = True
except TypeError:
self.log.error('Invalid literals specification. literals must be a sequence of characters')
self.error = True
def get_states(self):
self.states = self.ldict.get('states', None)
if self.states:
if (not isinstance(self.states, (tuple, list))):
self.log.error('states must be defined as a tuple or list')
self.error = True
else:
for s in self.states:
if ((not isinstance(s, tuple)) or (len(s) != 2)):
self.log.error("Invalid state specifier %s. Must be a tuple (statename,'exclusive|inclusive')", repr(s))
self.error = True
continue
(name, statetype) = s
if (not isinstance(name, StringTypes)):
self.log.error('State name %s must be a string', repr(name))
self.error = True
continue
if (not ((statetype == 'inclusive') or (statetype == 'exclusive'))):
self.log.error("State type for state %s must be 'inclusive' or 'exclusive'", name)
self.error = True
continue
if (name in self.stateinfo):
self.log.error("State '%s' already defined", name)
self.error = True
continue
self.stateinfo[name] = statetype
def get_rules(self):
tsymbols = [f for f in self.ldict if (f[:2] == 't_')]
self.toknames = {}
self.funcsym = {}
self.strsym = {}
self.ignore = {}
self.errorf = {}
self.eoff = {}
for s in self.stateinfo:
self.funcsym[s] = []
self.strsym[s] = []
if (len(tsymbols) == 0):
self.log.error('No rules of the form t_rulename are defined')
self.error = True
return
for f in tsymbols:
t = self.ldict[f]
(states, tokname) = _statetoken(f, self.stateinfo)
self.toknames[f] = tokname
if hasattr(t, '__call__'):
if (tokname == 'error'):
for s in states:
self.errorf[s] = t
elif (tokname == 'eof'):
for s in states:
self.eoff[s] = t
elif (tokname == 'ignore'):
line = t.__code__.co_firstlineno
file = t.__code__.co_filename
self.log.error("%s:%d: Rule '%s' must be defined as a string", file, line, t.__name__)
self.error = True
else:
for s in states:
self.funcsym[s].append((f, t))
elif isinstance(t, StringTypes):
if (tokname == 'ignore'):
for s in states:
self.ignore[s] = t
if ('\\' in t):
self.log.warning("%s contains a literal backslash '\\'", f)
elif (tokname == 'error'):
self.log.error("Rule '%s' must be defined as a function", f)
self.error = True
else:
for s in states:
self.strsym[s].append((f, t))
else:
self.log.error('%s not defined as a function or string', f)
self.error = True
for f in self.funcsym.values():
f.sort(key=(lambda x: x[1].__code__.co_firstlineno))
for s in self.strsym.values():
s.sort(key=(lambda x: len(x[1])), reverse=True)
def validate_rules(self):
for state in self.stateinfo:
for (fname, f) in self.funcsym[state]:
line = f.__code__.co_firstlineno
file = f.__code__.co_filename
module = inspect.getmodule(f)
self.modules.add(module)
if isinstance(f, types.MethodType):
reqargs = 2
else:
reqargs = 1
nargs = f.__code__.co_argcount
if (nargs > reqargs):
self.log.error("%s:%d: Rule '%s' has too many arguments", file, line, f.__name__)
self.error = True
continue
if (nargs < reqargs):
self.log.error("%s:%d: Rule '%s' requires an argument", file, line, f.__name__)
self.error = True
continue
if (not _get_regex(f)):
self.log.error("%s:%d: No regular expression defined for rule '%s'", file, line, f.__name__)
self.error = True
continue
try:
c = re.compile(f'(?P<{fname}>{_get_regex(f)})', (re.VERBOSE | self.reflags))
if c.match(''):
self.log.error("%s:%d: Regular expression for rule '%s' matches empty string", file, line, f.__name__)
self.error = True
except re.error as e:
self.log.error("%s:%d: Invalid regular expression for rule '%s'. %s", file, line, f.__name__, e)
if ('#' in _get_regex(f)):
self.log.error("%s:%d. Make sure '#' in rule '%s' is escaped with '\\#'", file, line, f.__name__)
self.error = True
for (name, r) in self.strsym[state]:
tokname = self.toknames[name]
if (tokname == 'error'):
self.log.error("Rule '%s' must be defined as a function", name)
self.error = True
continue
if ((tokname not in self.tokens) and (tokname.find('ignore_') < 0)):
self.log.error("Rule '%s' defined for an unspecified token %s", name, tokname)
self.error = True
continue
try:
c = re.compile(('(?P<%s>%s)' % (name, r)), (re.VERBOSE | self.reflags))
if c.match(''):
self.log.error("Regular expression for rule '%s' matches empty string", name)
self.error = True
except re.error as e:
self.log.error("Invalid regular expression for rule '%s'. %s", name, e)
if ('#' in r):
self.log.error("Make sure '#' in rule '%s' is escaped with '\\#'", name)
self.error = True
if ((not self.funcsym[state]) and (not self.strsym[state])):
self.log.error("No rules defined for state '%s'", state)
self.error = True
efunc = self.errorf.get(state, None)
if efunc:
f = efunc
line = f.__code__.co_firstlineno
file = f.__code__.co_filename
module = inspect.getmodule(f)
self.modules.add(module)
if isinstance(f, types.MethodType):
reqargs = 2
else:
reqargs = 1
nargs = f.__code__.co_argcount
if (nargs > reqargs):
self.log.error("%s:%d: Rule '%s' has too many arguments", file, line, f.__name__)
self.error = True
if (nargs < reqargs):
self.log.error("%s:%d: Rule '%s' requires an argument", file, line, f.__name__)
self.error = True
for module in self.modules:
self.validate_module(module)
def validate_module(self, module):
try:
(lines, linen) = inspect.getsourcelines(module)
except OSError:
return
fre = re.compile('\\s*def\\s+(t_[a-zA-Z_0-9]*)\\(')
sre = re.compile('\\s*(t_[a-zA-Z_0-9]*)\\s*=')
counthash = {}
linen += 1
for line in lines:
m = fre.match(line)
if (not m):
m = sre.match(line)
if m:
name = m.group(1)
prev = counthash.get(name)
if (not prev):
counthash[name] = linen
else:
filename = inspect.getsourcefile(module)
self.log.error('%s:%d: Rule %s redefined. Previously defined on line %d', filename, linen, name, prev)
self.error = True
linen += 1 |
class WafExclusion(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'data': (WafExclusionData,)}
_property
def discriminator():
return None
attribute_map = {'data': 'data'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class Task():
def __init__(self, idx: typing.Union[(int, str)], cmd: str, name: str, **kwargs) -> None:
self._finished = False
self._is_timeout = False
self._failed = False
self._idx = idx
self._cmd = cmd
self._name = name
self._ret = None
self._assigned_dev = None
self._proc = None
self._timestamp = 0
self._stdout = ''
self._stderr = ''
self._kwargs = kwargs
def __call__(self, dev_id: int) -> None:
self._assigned_dev = dev_id
use_shell = False
if ('shell' in self._kwargs):
use_shell = self._kwargs['shell']
env = os.environ.copy()
if ('dev_flag' in self._kwargs):
env[self._kwargs['dev_flag']] = str(dev_id)
self._proc = subprocess.Popen(self._cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env, shell=use_shell)
self._timestamp = time.time()
def is_running(self) -> bool:
return (self._proc is not None)
def is_finished(self) -> bool:
return self._finished
def is_timeout(self) -> bool:
return self._is_timeout
def poll(self, current_time, timeout) -> bool:
if (self.is_running() is False):
return False
step = (current_time - self._timestamp)
if (step > timeout):
self._proc.kill()
self._finished = True
self._is_timeout = True
self._failed = True
return True
if (self._proc.poll() is not None):
self._finished = True
return self._finished
def pull(self, fproc: typing.Callable) -> None:
if self._failed:
return None
self._stdout = self._proc.stdout.read().decode('utf-8')
self._stderr = self._proc.stderr.read().decode('utf-8')
fproc(self)
def is_failed(self) -> bool:
return self._failed
def assigned_dev(self) -> int:
return self._assigned_dev
def __del__(self) -> None:
if self._proc:
if self._proc.stdout:
self._proc.stdout.close()
if self._proc.stderr:
self._proc.stderr.close() |
class OptionSeriesDependencywheelSonificationContexttracks(Options):
def activeWhen(self) -> 'OptionSeriesDependencywheelSonificationContexttracksActivewhen':
return self._config_sub_data('activeWhen', OptionSeriesDependencywheelSonificationContexttracksActivewhen)
def instrument(self):
return self._config_get('piano')
def instrument(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionSeriesDependencywheelSonificationContexttracksMapping':
return self._config_sub_data('mapping', OptionSeriesDependencywheelSonificationContexttracksMapping)
def midiName(self):
return self._config_get(None)
def midiName(self, text: str):
self._config(text, js_type=False)
def pointGrouping(self) -> 'OptionSeriesDependencywheelSonificationContexttracksPointgrouping':
return self._config_sub_data('pointGrouping', OptionSeriesDependencywheelSonificationContexttracksPointgrouping)
def roundToMusicalNotes(self):
return self._config_get(True)
def roundToMusicalNotes(self, flag: bool):
self._config(flag, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def timeInterval(self):
return self._config_get(None)
def timeInterval(self, num: float):
self._config(num, js_type=False)
def type(self):
return self._config_get('instrument')
def type(self, text: str):
self._config(text, js_type=False)
def valueInterval(self):
return self._config_get(None)
def valueInterval(self, num: float):
self._config(num, js_type=False)
def valueMapFunction(self):
return self._config_get('linear')
def valueMapFunction(self, value: Any):
self._config(value, js_type=False)
def valueProp(self):
return self._config_get('"x"')
def valueProp(self, text: str):
self._config(text, js_type=False) |
def observations_for_obs_keys(res: LibresFacade, obs_keys: List[str]) -> List[Dict[(str, Any)]]:
observations = []
for key in obs_keys:
observation = res.config.observations[key]
obs = {'name': key, 'values': list(observation.observations.values.flatten()), 'errors': list(observation['std'].values.flatten())}
if ('time' in observation.coords):
obs['x_axis'] = _prepare_x_axis(observation.time.values.flatten())
else:
obs['x_axis'] = _prepare_x_axis(observation['index'].values.flatten())
observations.append(obs)
return observations |
def test_nonce_manual(BrownieTester, accounts):
assert (accounts[0].nonce == 0)
c = accounts[0].deploy(BrownieTester, True, nonce=0)
assert (type(c) == ProjectContract)
assert (accounts[0].nonce == 1)
c = accounts[0].deploy(BrownieTester, True, nonce=1)
assert (type(c) == ProjectContract) |
class AbstractStorage(Storage):
def __init__(self, label='abstract_storage'):
self.array = z3.Array(label, z3.BitVecSort(svm_utils.VECTOR_LEN), z3.BitVecSort(svm_utils.VECTOR_LEN))
def store(self, index, value):
self.array = z3.Store(self.array, index, value)
def load(self, index):
return z3.Select(self.array, index)
def __copy__(self):
clone = AbstractStorage()
clone.array = self.array
return clone |
class OptionSeriesDependencywheelSonificationDefaultspeechoptionsActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
(['animation', 'previs', 'shot previs'], publisher_type=POST_PUBLISHER_TYPE)
def generate_playblast(progress_controller=None):
if (progress_controller is None):
progress_controller = ProgressControllerBase()
shots = pm.ls(type='shot')
progress_controller.maximum = (len(shots) + 2)
for shot in shots:
if shot.hasAttr('handle'):
shot.handle.set(0)
progress_controller.increment()
sp = auxiliary.Playblaster()
sp.batch_mode = True
try:
video_files = sp.playblast()
progress_controller.increment()
sp.upload_outputs(sp.version, video_files)
except RuntimeError:
pass
progress_controller.increment()
progress_controller.complete() |
class TestSuperFencesCustom(util.MdCase):
extension = ['pymdownx.superfences']
extension_configs = {'pymdownx.superfences': {'custom_fences': [{'name': 'test', 'class': 'test', 'format': custom_format, 'validator': custom_validator}]}}
def test_failure(self):
self.check_markdown('\n ```test\n test\n ```\n ', '\n <div class="highlight"><pre><span></span><code>test\n </code></pre></div>\n ', True)
def test_bad_options(self):
self.check_markdown('\n ```test bad="bad"\n test\n ```\n ', '\n <p><code>test bad="bad"\n test</code></p>\n ', True)
def test_bad_option_value(self):
self.check_markdown('\n ```test opt="B"\n test\n ```\n ', '\n <p><code>test opt="B"\n test</code></p>\n ', True)
def test_bad_option_no_value(self):
self.check_markdown('\n ```test opt\n test\n ```\n ', '\n <p><code>test opt\n test</code></p>\n ', True)
def test_custom_options(self):
self.check_markdown('\n ```test opt="A"\n test\n ```\n ', '\n <div lang="test" class_name="class-test", option="A">test</div>\n ', True) |
def test_encode_something(simple_attribute):
(some_type, _, expected_value) = simple_attribute
class SomeClass():
d: Dict[(str, some_type)] = field(default_factory=dict)
l: List[Tuple[(some_type, some_type)]] = field(default_factory=list)
t: Dict[(str, Optional[some_type])] = field(default_factory=dict)
b = SomeClass()
b.d.update({'hey': expected_value})
b.l.append((expected_value, expected_value))
b.t.update({'hey': None, 'hey2': expected_value})
assert (pyrallis.decode(SomeClass, pyrallis.encode(b)) == b) |
def select(what, key):
if (what == 1):
selected_key = key[0:16]
elif (what == 2):
selected_key = key[1:]
elif (what == 3):
selected_key = key[2:]
selected_key.append(key[0])
elif (what == 4):
selected_key = key[3:]
selected_key.extend(key[:2])
elif (what == 5):
selected_key = key[4:]
selected_key.extend(key[:3])
elif (what == 6):
selected_key = key[5:]
selected_key.extend(key[:4])
elif (what == 7):
selected_key = key[6:]
selected_key.extend(key[:5])
elif (what == 8):
selected_key = key[7:]
selected_key.extend(key[:6])
elif (what == 9):
selected_key = key[8:]
selected_key.extend(key[:7])
elif (what == 10):
selected_key = key[9:]
selected_key.extend(key[:8])
elif (what == 11):
selected_key = key[10:]
selected_key.extend(key[:9])
elif (what == 12):
selected_key = key[11:]
selected_key.extend(key[:10])
elif (what == 13):
selected_key = key[12:]
selected_key.extend(key[:11])
elif (what == 14):
selected_key = key[13:]
selected_key.extend(key[:12])
elif (what == 15):
selected_key = key[14:]
selected_key.extend(key[:13])
elif (what == 16):
selected_key = key[15:]
selected_key.extend(key[:14])
elif (what == 17):
selected_key = key[16:]
selected_key.extend(key[:15])
else:
log.error('select what: {} is not supported'.format(what))
return None
emsg = 'select selected_key len is {}, it should be {}'.format(len(selected_key), Ar_KEY_LEN)
assert (len(selected_key) == Ar_KEY_LEN), emsg
return selected_key |
def run_cmd(cmd, cwd='.', raise_on_error=True):
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, encoding='utf-8')
try:
(stdout, stderr) = process.communicate()
except OSError as e:
raise RunCommandException(str(e))
result = munch.Munch(cmd=cmd, stdout=stdout.strip(), stderr=stderr.strip(), returncode=process.returncode)
log.debug(result)
if (result.returncode != 0):
raise RunCommandException(result.stderr)
return result |
class LiteEthPHYMII(LiteXModule):
dw = 8
tx_clk_freq = .0
rx_clk_freq = .0
def __init__(self, clock_pads, pads, with_hw_init_reset=True):
self.crg = LiteEthPHYMIICRG(clock_pads, pads, with_hw_init_reset)
self.tx = ClockDomainsRenamer('eth_tx')(LiteEthPHYMIITX(pads))
self.rx = ClockDomainsRenamer('eth_rx')(LiteEthPHYMIIRX(pads))
(self.sink, self.source) = (self.tx.sink, self.rx.source)
if hasattr(pads, 'mdc'):
self.mdio = LiteEthPHYMDIO(pads) |
class OptionSeriesAreasplinerangeSonificationContexttracksMappingHighpass(Options):
def frequency(self) -> 'OptionSeriesAreasplinerangeSonificationContexttracksMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionSeriesAreasplinerangeSonificationContexttracksMappingHighpassFrequency)
def resonance(self) -> 'OptionSeriesAreasplinerangeSonificationContexttracksMappingHighpassResonance':
return self._config_sub_data('resonance', OptionSeriesAreasplinerangeSonificationContexttracksMappingHighpassResonance) |
class Context():
def __init__(self, default=NoDefault):
self.__dict__['_local'] = threading.local()
self.__dict__['_default'] = default
def __getattr__(self, attr):
if attr.startswith('_'):
raise AttributeError
try:
stack = self._local.stack
except AttributeError:
stack = []
for i in range((len(stack) - 1), (- 1), (- 1)):
if (attr in stack[i][0]):
return stack[i][0][attr]
if (self._default is NoDefault):
raise AttributeError(('The attribute %s has not been set on %r' % (attr, self)))
return self._default
def __setattr__(self, attr, value):
raise AttributeError('You can only set attributes on context objects with the .set() method')
def set(self, **kw):
state_id = next(_restore_ids)
try:
stack = self._local.stack
except AttributeError:
stack = self._local.stack = [({}, (- 1))]
restorer = RestoreState(self, state_id)
stack.append((kw, state_id))
return restorer
def _restore(self, state_id):
try:
stack = self._local.stack
except AttributeError:
raise ContextRestoreError(('Tried to restore context %r (to state ID %s) but no variables have been set in context' % (self, state_id)))
if (stack[(- 1)][1] == (- 1)):
raise ContextRestoreError(('Out of order restoration of context %r (to state ID %s); the stack state is empty' % (self, state_id)))
if (stack[(- 1)][1] != state_id):
raise ContextRestoreError(('Out of order restoration of context %r (to state ID %s) when last state is %s' % (self, state_id, stack[(- 1)][1])))
stack.pop()
def set_default(self, **kw):
try:
stack = self._local.stack
except AttributeError:
stack = self._local.stack = [({}, (- 1))]
stack[0][0].update(kw)
def __repr__(self):
try:
stack = self._local.stack
except AttributeError:
stack = []
myid = hex(abs(id(self)))[2:]
if (not stack):
return ('<%s %s (empty)>' % (self.__class__.__name__, myid))
cur = {}
for (kw, _state_id) in stack:
cur.update(kw)
keys = sorted(cur)
varlist = []
for key in keys:
rep = repr(cur[key])
if (len(rep) > 10):
rep = ((rep[:9] + '...') + rep[(- 1)])
varlist.append(('%s=%s' % (key, rep)))
return ('<%s %s %s>' % (self.__class__.__name__, myid, ' '.join(varlist))) |
def test_mac_address_parsing():
s = 'AA-F2-C9-A6-B3-4F AB:F2:C9:A6:B3:4F ACF2.C9A6.B34F'
iocs = find_iocs(s)
assert (len(iocs['mac_addresses']) == 3)
assert ('AA-F2-C9-A6-B3-4F' in iocs['mac_addresses'])
assert ('AB:F2:C9:A6:B3:4F' in iocs['mac_addresses'])
assert ('ACF2.C9A6.B34F' in iocs['mac_addresses'])
s = 'aa-f2-c9-a6-b3-4f ab:f2:c9:a6:b3:4f acf2.c9a6.b34f'
iocs = find_iocs(s)
assert (len(iocs['mac_addresses']) == 3)
assert ('aa-f2-c9-a6-b3-4f' in iocs['mac_addresses'])
assert ('ab:f2:c9:a6:b3:4f' in iocs['mac_addresses'])
assert ('acf2.c9a6.b34f' in iocs['mac_addresses']) |
def set_bake_color_space_int(bake_mode):
preferences = bpy.context.preferences.addons[__package__].preferences
if ('normal_' in bake_mode):
if ((preferences.bake_color_space_def == 'ASTANDARD') or (preferences.bake_color_space_def == 'APBR')):
return 3
else:
return 1
elif (preferences.bake_color_space_def == 'STANDARD'):
return 0
elif (preferences.bake_color_space_def == 'PBR'):
if ((op_bake.modes[bake_mode].material != '') or ((bake_mode == 'transmission') and (not preferences.bool_clean_transmission)) or (bake_mode in {'diffuse', 'base_color', 'sss_color', 'emission', 'environment', 'combined'})):
return 0
return 1
elif (preferences.bake_color_space_def == 'ASTANDARD'):
return 2
elif (preferences.bake_color_space_def == 'APBR'):
if ((op_bake.modes[bake_mode].material != '') or ((bake_mode == 'transmission') and (not preferences.bool_clean_transmission)) or (bake_mode in {'diffuse', 'base_color', 'sss_color', 'emission', 'environment', 'combined'})):
return 2
return 3 |
def upgrade():
op.drop_column('email_notification', 'next_event')
op.drop_column('email_notification', 'new_paper')
op.drop_column('email_notification', 'session_accept_reject')
op.drop_column('email_notification', 'session_schedule')
op.add_column('email_notification', sa.Column('next_event', sa.Integer(), nullable=True))
op.add_column('email_notification', sa.Column('new_paper', sa.Integer(), nullable=True))
op.add_column('email_notification', sa.Column('session_accept_reject', sa.Integer(), nullable=True))
op.add_column('email_notification', sa.Column('session_schedule', sa.Integer(), nullable=True)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.