code stringlengths 281 23.7M |
|---|
def are_files_equal(file1, file2, delta=1, skip=0):
equal = True
if delta:
mismatches = 0
with open(file1) as textfile1, open(file2) as textfile2:
for (i, (x, y)) in enumerate(zip(textfile1, textfile2)):
if (i < skip):
continue
if (x != y):
if delta:
mismatches += 1
if (mismatches > delta):
equal = False
break
else:
equal = False
break
return equal |
class Grammar(OrderedDict):
def __init__(self, rules='', **more_rules):
decorated_custom_rules = {k: (expression(v, k, self) if is_callable(v) else v) for (k, v) in more_rules.items()}
(exprs, first) = self._expressions_from_rules(rules, decorated_custom_rules)
super().__init__(exprs.items())
self.default_rule = first
def default(self, rule_name):
new = self._copy()
new.default_rule = new[rule_name]
return new
def _copy(self):
new = Grammar.__new__(Grammar)
super(Grammar, new).__init__(self.items())
new.default_rule = self.default_rule
return new
def _expressions_from_rules(self, rules, custom_rules):
tree = rule_grammar.parse(rules)
return RuleVisitor(custom_rules).visit(tree)
def parse(self, text, pos=0):
self._check_default_rule()
return self.default_rule.parse(text, pos=pos)
def match(self, text, pos=0):
self._check_default_rule()
return self.default_rule.match(text, pos=pos)
def _check_default_rule(self):
if (not self.default_rule):
raise RuntimeError("Can't call parse() on a Grammar that has no default rule. Choose a specific rule instead, like some_grammar['some_rule'].parse(...).")
def __str__(self):
exprs = ([self.default_rule] if self.default_rule else [])
exprs.extend((expr for expr in self.values() if (expr is not self.default_rule)))
return '\n'.join((expr.as_rule() for expr in exprs))
def __repr__(self):
return 'Grammar({!r})'.format(str(self)) |
_group.command('upload-job')
('job-file', type=click.Path(exists=True, dir_okay=False))
('--overwrite', '-o', is_flag=True, help='Overwrite job if exists by name')
_context
def upload_job(ctx: click.Context, job_file, overwrite):
es_client: Elasticsearch = ctx.obj['es']
ml_client = MlClient(es_client)
with open(job_file, 'r') as f:
job = json.load(f)
def safe_upload(func):
try:
func(name, body)
except (elasticsearch.ConflictError, elasticsearch.RequestError) as err:
if (isinstance(err, elasticsearch.RequestError) and (err.error != 'resource_already_exists_exception')):
client_error(str(err), err, ctx=ctx)
if overwrite:
ctx.invoke(delete_job, job_name=name, job_type=job_type)
func(name, body)
else:
client_error(str(err), err, ctx=ctx)
try:
job_type = job['type']
name = job['name']
body = job['body']
if (job_type == 'anomaly_detection'):
safe_upload(ml_client.put_job)
elif (job_type == 'data_frame_analytic'):
safe_upload(ml_client.put_data_frame_analytics)
elif (job_type == 'datafeed'):
safe_upload(ml_client.put_datafeed)
else:
client_error(f'Unknown ML job type: {job_type}')
click.echo(f'Uploaded {job_type} job: {name}')
except KeyError as e:
client_error(f'{job_file} missing required info: {e}') |
(scope='function')
def system_provide_service_operations_support_optimization(db: Session) -> System:
system_provide_service_operations_support_optimization = System.create(db=db, data={'fides_key': f'system_key-f{uuid4()}', 'name': f'system-{uuid4()}', 'description': 'fixture-made-system', 'organization_fides_key': 'default_organization', 'system_type': 'Service'})
PrivacyDeclaration.create(db=db, data={'name': 'Optimize and improve support operations in order to provide the service', 'system_id': system_provide_service_operations_support_optimization.id, 'data_categories': ['user.device.cookie_id'], 'data_use': 'essential.service.operations.improve', 'data_subjects': ['customer'], 'dataset_references': None, 'egress': None, 'ingress': None})
db.refresh(system_provide_service_operations_support_optimization)
return system_provide_service_operations_support_optimization |
class OptionPlotoptionsArearangeSonificationDefaultspeechoptions(Options):
def activeWhen(self) -> 'OptionPlotoptionsArearangeSonificationDefaultspeechoptionsActivewhen':
return self._config_sub_data('activeWhen', OptionPlotoptionsArearangeSonificationDefaultspeechoptionsActivewhen)
def language(self):
return self._config_get('en-US')
def language(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionPlotoptionsArearangeSonificationDefaultspeechoptionsMapping':
return self._config_sub_data('mapping', OptionPlotoptionsArearangeSonificationDefaultspeechoptionsMapping)
def pointGrouping(self) -> 'OptionPlotoptionsArearangeSonificationDefaultspeechoptionsPointgrouping':
return self._config_sub_data('pointGrouping', OptionPlotoptionsArearangeSonificationDefaultspeechoptionsPointgrouping)
def preferredVoice(self):
return self._config_get(None)
def preferredVoice(self, text: str):
self._config(text, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def type(self):
return self._config_get('speech')
def type(self, text: str):
self._config(text, js_type=False) |
def fix_and_parse_json(data):
if (not data):
return None
res = None
try:
data = bytes2str(data)
fixed = fix_json_str(data)
res = json.loads(fixed)
return res
except Exception as e:
print(f'[ERROR]: cannot parse json string: {data}, error: {e}')
return res |
def load_profiles_list(pq_both=False):
profiles = []
profiles = ['G0-A', 'G0-M', 'G1-A', 'G1-B', 'G1-C', 'G2-A', 'G3-A', 'G3-M', 'G3-H', 'G4-A', 'G4-B', 'G4-M', 'G4-H', 'G5-A', 'G6-A', 'H0-A', 'H0-B', 'H0-C', 'H0-H', 'H0-G', 'H0-L', 'L0-A', 'L1-A', 'L2-A', 'L2-M', 'BL-H', 'WB-H']
if (not pq_both):
return profiles
profiles = [(prof + pq) for pq in ['_pload', '_qload'] for prof in profiles]
return profiles |
class TestInet(unittest.TestCase):
def test_ip_proto(self):
eq_(IPPROTO_IP, 0)
eq_(IPPROTO_HOPOPTS, 0)
eq_(IPPROTO_ICMP, 1)
eq_(IPPROTO_TCP, 6)
eq_(IPPROTO_UDP, 17)
eq_(IPPROTO_ROUTING, 43)
eq_(IPPROTO_FRAGMENT, 44)
eq_(IPPROTO_AH, 51)
eq_(IPPROTO_ICMPV6, 58)
eq_(IPPROTO_NONE, 59)
eq_(IPPROTO_DSTOPTS, 60)
eq_(IPPROTO_SCTP, 132) |
def CreateGemmRCROperator(manifest):
operation_kind = library.GemmKind.Gemm
a_element_desc = library.TensorDesc(library.DataType.f16, library.LayoutType.RowMajor)
b_element_desc = library.TensorDesc(library.DataType.f16, library.LayoutType.ColumnMajor)
c_element_desc = library.TensorDesc(library.DataType.f16, library.LayoutType.RowMajor)
element_op = library.TensorOperation.PassThrough
tile_descriptions = [gemm.TileDesc(256, 256, 128, 32, 8, 8, 32, 32, 4, 2), gemm.TileDesc(256, 128, 256, 32, 8, 8, 32, 32, 2, 4), gemm.TileDesc(128, 128, 128, 32, 8, 8, 32, 32, 4, 2), gemm.TileDesc(256, 128, 128, 32, 8, 8, 32, 32, 2, 2), gemm.TileDesc(128, 128, 64, 32, 8, 8, 32, 32, 2, 2), gemm.TileDesc(128, 64, 128, 32, 8, 8, 32, 32, 2, 2), gemm.TileDesc(64, 64, 64, 32, 8, 8, 32, 32, 2, 2), gemm.TileDesc(256, 128, 64, 32, 8, 8, 32, 32, 2, 1), gemm.TileDesc(256, 64, 128, 32, 8, 8, 32, 32, 1, 2), gemm.TileDesc(128, 128, 32, 32, 8, 8, 32, 32, 2, 1), gemm.TileDesc(128, 32, 128, 32, 8, 8, 32, 32, 1, 2), gemm.TileDesc(64, 64, 32, 32, 8, 8, 32, 32, 2, 1), gemm.TileDesc(64, 32, 64, 32, 8, 8, 32, 32, 1, 2)]
block_descriptions = []
c_block_descriptions = []
for t in tile_descriptions:
block_transfer = (- 1)
c_block_transfer = (- 1)
if (t.block_size == 256):
block_transfer = [4, 64, 1]
c_block_transfer = gemm.CBlockTransferDesc(1, 1, [1, 32, 1, 8], 8)
if (t.block_size == 128):
block_transfer = [4, 32, 1]
if (t.n_per_block == 128):
c_block_transfer = gemm.CBlockTransferDesc(1, 1, [1, 16, 1, 8], 8)
else:
c_block_transfer = gemm.CBlockTransferDesc(1, 1, [1, 32, 1, 4], 8)
if (t.block_size == 64):
block_transfer = [4, 16, 1]
c_block_transfer = gemm.CBlockTransferDesc(1, 1, [1, 16, 1, 4], 8)
assert ((block_transfer != (- 1)) and (c_block_transfer != (- 1)) and ('Cannot determine block_transfer_size with block_size ' + str(t.block_size)))
block_descriptions.append(gemm.BlockTransferDesc(block_transfer, [1, 0, 2], [1, 0, 2], 2, 8, 8, 1))
c_block_descriptions.append(c_block_transfer)
gemm_specialization = [gemm.GemmSpecialization.GemmDefault, gemm.GemmSpecialization.MNKPadding]
operations = []
for gemm_spec in gemm_specialization:
for (tile_desc, block_desc, c_block_desc) in zip(tile_descriptions, block_descriptions, c_block_descriptions):
new_operation = gemm.GemmOperation(operation_kind=operation_kind, extra_kind=element_op, xdl_op_type=gemm.XdlOpType.DeviceGemmXdl_CShuffle, A=a_element_desc, B=b_element_desc, C=c_element_desc, a_elem_op=element_op, b_elem_op=element_op, epilogue_functor=element_op, gemm_specialization=gemm_spec, tile_desc=tile_desc, a_block_transfer=block_desc, b_block_transfer=block_desc, c_block_transfer=c_block_desc)
manifest.append(new_operation)
operations.append(new_operation)
return operations |
class OptionPlotoptionsColumnrangeLabelStyle(Options):
def fontSize(self):
return self._config_get('0.8em')
def fontSize(self, num: float):
self._config(num, js_type=False)
def fontWeight(self):
return self._config_get('bold')
def fontWeight(self, text: str):
self._config(text, js_type=False) |
class Telegram(plugins.Plugin):
__author__ = ''
__version__ = '1.0.0'
__license__ = 'GPL3'
__description__ = 'Periodically sent messages to Telegram about the recent activity of pwnagotchi'
def on_loaded(self):
logging.info('telegram plugin loaded.')
def on_internet_available(self, agent):
config = agent.config()
display = agent.view()
last_session = agent.last_session
if (last_session.is_new() and (last_session.handshakes > 0)):
try:
import telegram
except ImportError:
logging.error("Couldn't import telegram")
return
logging.info('Detected new activity and internet, time to send a message!')
picture = '/root/pwnagotchi.png'
display.on_manual_mode(last_session)
display.image().save(picture, 'png')
display.update(force=True)
try:
logging.info('Connecting to Telegram...')
message = Voice(lang=config['main']['lang']).on_last_session_tweet(last_session)
bot = telegram.Bot(self.options['bot_token'])
if (self.options['send_picture'] is True):
bot.sendPhoto(chat_id=self.options['chat_id'], photo=open(picture, 'rb'))
logging.info('telegram: picture sent')
if (self.options['send_message'] is True):
bot.sendMessage(chat_id=self.options['chat_id'], text=message, disable_web_page_preview=True)
logging.info(('telegram: message sent: %s' % message))
last_session.save_session_id()
display.set('status', 'Telegram notification sent!')
display.update(force=True)
except Exception:
logging.exception('Error while sending on Telegram') |
def test_cli_ethpm(cli_tester, testproject):
cli_tester.monkeypatch.setattr('brownie._cli.ethpm._list', cli_tester.mock_subroutines)
args = (testproject._path,)
kwargs = {}
parameters = (args, kwargs)
cli_tester.run_and_test_parameters('ethpm list', parameters)
cli_tester.run_and_test_parameters('ethpm foo', parameters)
assert (cli_tester.mock_subroutines.called is True)
assert (cli_tester.mock_subroutines.call_count == 1) |
def combined_mongo_postgresql_graph(postgres_config: ConnectionConfig, mongo_config: ConnectionConfig) -> Tuple[(GraphDataset, GraphDataset)]:
postgres_dataset = integration_db_dataset('postgres_example', postgres_config.key)
mongo_addresses = Collection(name='address', fields=[ScalarField(name='_id', primary_key=True), ScalarField(name='id', references=[(FieldAddress('postgres_example', 'customer', 'address_id'), 'from')]), ScalarField(name='street', data_type_converter=str_converter), ScalarField(name='city', data_type_converter=str_converter), ScalarField(name='state', data_type_converter=str_converter), ScalarField(name='zip', data_type_converter=str_converter)])
mongo_orders = Collection(name='orders', fields=[ScalarField(name='_id', primary_key=True), ScalarField(name='customer_id', references=[(FieldAddress('postgres_example', 'customer', 'id'), 'from')]), ScalarField(name='payment_card_id', data_type_converter=str_converter)])
aircraft = Collection(name='aircraft', fields=[ScalarField(name='_id', data_type_converter=ObjectIdTypeConverter(), is_array=False, primary_key=True), ScalarField(name='model', data_type_converter=StringTypeConverter(), is_array=False), ScalarField(name='planes', data_type_converter=StringTypeConverter(), is_array=True, references=[(FieldAddress('mongo_test', 'flights', 'plane'), 'from')])], after=set())
conversations = Collection(name='conversations', fields=[ScalarField(name='_id', data_type_converter=ObjectIdTypeConverter(), is_array=False, primary_key=True), ObjectField(name='thread', data_type_converter=ObjectTypeConverter(), is_array=False, fields={'comment': ScalarField(name='comment', data_type_converter=StringTypeConverter(), is_array=False), 'message': ScalarField(name='message', data_type_converter=StringTypeConverter(), is_array=False), 'chat_name': ScalarField(name='chat_name', data_type_converter=StringTypeConverter(), is_array=False), 'ccn': ScalarField(name='ccn', data_type_converter=StringTypeConverter(), is_array=False)})], after=set())
customer_details = Collection(name='customer_details', fields=[ScalarField(name='_id', data_type_converter=NoOpTypeConverter(), is_array=False, primary_key=True), ScalarField(name='birthday', data_type_converter=StringTypeConverter(), is_array=False), ScalarField(name='children', data_type_converter=StringTypeConverter(), is_array=True), ObjectField(name='comments', data_type_converter=ObjectTypeConverter(), is_array=True, fields={'name': ScalarField(name='comment_id', data_type_converter=StringTypeConverter(), is_array=False, references=[(FieldAddress('mongo_test', 'conversations', 'thread', 'comment'), 'to')])}), ScalarField(name='customer_id', data_type_converter=NoOpTypeConverter(), is_array=False, references=[(FieldAddress('postgres_example', 'customer', 'id'), 'from')]), ObjectField(name='emergency_contacts', data_type_converter=ObjectTypeConverter(), is_array=True, fields={'name': ScalarField(name='name', data_type_converter=StringTypeConverter(), is_array=False), 'relationship': ScalarField(name='relationship', data_type_converter=StringTypeConverter(), is_array=False), 'phone': ScalarField(name='phone', data_type_converter=StringTypeConverter(), is_array=False)}), ScalarField(name='gender', data_type_converter=StringTypeConverter(), is_array=False), ScalarField(name='travel_identifiers', data_type_converter=StringTypeConverter(), is_array=True), ObjectField(name='workplace_info', data_type_converter=ObjectTypeConverter(), is_array=False, fields={'employer': ScalarField(name='employer', data_type_converter=StringTypeConverter(), is_array=False), 'position': ScalarField(name='position', data_type_converter=StringTypeConverter(), is_array=False), 'direct_reports': ScalarField(name='direct_reports', data_type_converter=StringTypeConverter(), is_array=True)})], after=set())
customer_feedback = Collection(name='customer_feedback', fields=[ScalarField(name='_id', data_type_converter=ObjectIdTypeConverter(), is_array=False, primary_key=True), ObjectField(name='customer_information', data_type_converter=ObjectTypeConverter(), is_array=False, fields={'email': ScalarField(name='email', data_type_converter=StringTypeConverter(), is_array=False, identity='email'), 'phone': ScalarField(name='phone', data_type_converter=StringTypeConverter(), is_array=False), 'internal_customer_id': ScalarField(name='internal_customer_id', data_type_converter=StringTypeConverter(), is_array=False)}), ScalarField(name='date', data_type_converter=StringTypeConverter(), is_array=False), ScalarField(name='message', data_type_converter=StringTypeConverter(), is_array=False), ScalarField(name='rating', data_type_converter=IntTypeConverter(), is_array=False)], after=set())
employee = Collection(name='employee', fields=[ScalarField(name='_id', data_type_converter=ObjectIdTypeConverter(), is_array=False, primary_key=True), ScalarField(name='email', data_type_converter=StringTypeConverter(), is_array=False, identity='email'), ScalarField(name='id', data_type_converter=NoOpTypeConverter(), is_array=False, references=[(FieldAddress('mongo_test', 'flights', 'pilots'), 'from')], primary_key=True), ScalarField(name='name', data_type_converter=StringTypeConverter(), is_array=False)], after=set())
flights = Collection(name='flights', fields=[ScalarField(name='_id', data_type_converter=ObjectIdTypeConverter(), is_array=False, primary_key=True), ScalarField(name='date', data_type_converter=NoOpTypeConverter(), is_array=False), ScalarField(name='flight_no', data_type_converter=NoOpTypeConverter(), is_array=False), ObjectField(name='passenger_information', data_type_converter=ObjectTypeConverter(), is_array=False, fields={'passenger_ids': ScalarField(name='passenger_ids', data_type_converter=StringTypeConverter(), is_array=True, references=[(FieldAddress('mongo_test', 'customer_details', 'travel_identifiers'), 'from')]), 'full_name': ScalarField(name='full_name', data_type_converter=StringTypeConverter(), is_array=False)}), ScalarField(name='pilots', data_type_converter=StringTypeConverter(), is_array=True), ScalarField(name='plane', data_type_converter=IntTypeConverter(), is_array=False)], after=set())
internal_customer_profile = Collection(name='internal_customer_profile', fields=[ScalarField(name='_id', data_type_converter=ObjectIdTypeConverter(), is_array=False, primary_key=True), ObjectField(name='customer_identifiers', data_type_converter=ObjectTypeConverter(), is_array=False, fields={'internal_id': ScalarField(name='internal_id', data_type_converter=StringTypeConverter(), is_array=False, references=[(FieldAddress('mongo_test', 'customer_feedback', 'customer_information', 'internal_customer_id'), 'from')]), 'derived_emails': ScalarField(name='derived_emails', data_type_converter=StringTypeConverter(), is_array=True, identity='email'), 'derived_phone': ScalarField(name='derived_phone', data_type_converter=StringTypeConverter(), is_array=True, identity='phone_number', return_all_elements=True)}), ScalarField(name='derived_interests', data_type_converter=StringTypeConverter(), is_array=True)], after=set())
rewards = Collection(name='rewards', fields=[ScalarField(name='_id', data_type_converter=ObjectIdTypeConverter(), is_array=False, primary_key=True), ObjectField(name='owner', data_type_converter=StringTypeConverter(), is_array=True, identity='email', return_all_elements=True, fields={'phone': ScalarField(return_all_elements=True, name='phone', data_type_converter=StringTypeConverter(), is_array=False, references=[(FieldAddress('mongo_test', 'internal_customer_profile', 'customer_identifiers', 'derived_phone'), 'from')]), 'shopper_name': ScalarField(return_all_elements=True, name='shopper_name', data_type_converter=NoOpTypeConverter(), is_array=False)}), ScalarField(name='points', data_type_converter=StringTypeConverter(), is_array=False), ScalarField(name='expiration_date', data_type_converter=NoOpTypeConverter(), is_array=False)], after=set())
mongo_dataset = GraphDataset(name='mongo_test', collections=[mongo_addresses, mongo_orders, aircraft, conversations, customer_details, customer_feedback, employee, flights, internal_customer_profile, rewards], connection_key=mongo_config.key)
return (mongo_dataset, postgres_dataset) |
def check_if_arguments_can_be_encoded(function_abi: ABIFunction, abi_codec: codec.ABIEncoder, args: Sequence[Any], kwargs: Dict[(str, Any)]) -> bool:
try:
arguments = merge_args_and_kwargs(function_abi, args, kwargs)
except TypeError:
return False
if (len(function_abi.get('inputs', [])) != len(arguments)):
return False
try:
(types, aligned_args) = get_aligned_abi_inputs(function_abi, arguments)
except TypeError:
return False
return all((abi_codec.is_encodable(_type, arg) for (_type, arg) in zip(types, aligned_args))) |
class OptionSeriesStreamgraphSonificationTracks(Options):
def activeWhen(self) -> 'OptionSeriesStreamgraphSonificationTracksActivewhen':
return self._config_sub_data('activeWhen', OptionSeriesStreamgraphSonificationTracksActivewhen)
def instrument(self):
return self._config_get('piano')
def instrument(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionSeriesStreamgraphSonificationTracksMapping':
return self._config_sub_data('mapping', OptionSeriesStreamgraphSonificationTracksMapping)
def midiName(self):
return self._config_get(None)
def midiName(self, text: str):
self._config(text, js_type=False)
def pointGrouping(self) -> 'OptionSeriesStreamgraphSonificationTracksPointgrouping':
return self._config_sub_data('pointGrouping', OptionSeriesStreamgraphSonificationTracksPointgrouping)
def roundToMusicalNotes(self):
return self._config_get(True)
def roundToMusicalNotes(self, flag: bool):
self._config(flag, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def type(self):
return self._config_get('instrument')
def type(self, text: str):
self._config(text, js_type=False) |
class OrderingFilter(BaseFilterBackend):
ordering_param = api_settings.ORDERING_PARAM
ordering_fields = None
ordering_title = _('Ordering')
ordering_description = _('Which field to use when ordering the results.')
template = 'rest_framework/filters/ordering.html'
def get_ordering(self, request, queryset, view):
params = request.query_params.get(self.ordering_param)
if params:
fields = [param.strip() for param in params.split(',')]
ordering = self.remove_invalid_fields(queryset, fields, view, request)
if ordering:
return ordering
return self.get_default_ordering(view)
def get_default_ordering(self, view):
ordering = getattr(view, 'ordering', None)
if isinstance(ordering, str):
return (ordering,)
return ordering
def get_default_valid_fields(self, queryset, view, context={}):
if hasattr(view, 'get_serializer_class'):
try:
serializer_class = view.get_serializer_class()
except AssertionError:
serializer_class = None
else:
serializer_class = getattr(view, 'serializer_class', None)
if (serializer_class is None):
msg = "Cannot use %s on a view which does not have either a 'serializer_class', an overriding 'get_serializer_class' or 'ordering_fields' attribute."
raise ImproperlyConfigured((msg % self.__class__.__name__))
model_class = queryset.model
model_property_names = [attr for attr in dir(model_class) if (isinstance(getattr(model_class, attr), property) and (attr != 'pk'))]
return [((field.source.replace('.', '__') or field_name), field.label) for (field_name, field) in serializer_class(context=context).fields.items() if ((not getattr(field, 'write_only', False)) and (not (field.source == '*')) and (field.source not in model_property_names))]
def get_valid_fields(self, queryset, view, context={}):
valid_fields = getattr(view, 'ordering_fields', self.ordering_fields)
if (valid_fields is None):
return self.get_default_valid_fields(queryset, view, context)
elif (valid_fields == '__all__'):
valid_fields = [(field.name, field.verbose_name) for field in queryset.model._meta.fields]
valid_fields += [(key, key.title().split('__')) for key in queryset.query.annotations]
else:
valid_fields = [((item, item) if isinstance(item, str) else item) for item in valid_fields]
return valid_fields
def remove_invalid_fields(self, queryset, fields, view, request):
valid_fields = [item[0] for item in self.get_valid_fields(queryset, view, {'request': request})]
def term_valid(term):
if term.startswith('-'):
term = term[1:]
return (term in valid_fields)
return [term for term in fields if term_valid(term)]
def filter_queryset(self, request, queryset, view):
ordering = self.get_ordering(request, queryset, view)
if ordering:
return queryset.order_by(*ordering)
return queryset
def get_template_context(self, request, queryset, view):
current = self.get_ordering(request, queryset, view)
current = (None if (not current) else current[0])
options = []
context = {'request': request, 'current': current, 'param': self.ordering_param}
for (key, label) in self.get_valid_fields(queryset, view, context):
options.append((key, ('%s - %s' % (label, _('ascending')))))
options.append((('-' + key), ('%s - %s' % (label, _('descending')))))
context['options'] = options
return context
def to_html(self, request, queryset, view):
template = loader.get_template(self.template)
context = self.get_template_context(request, queryset, view)
return template.render(context)
def get_schema_fields(self, view):
assert (coreapi is not None), 'coreapi must be installed to use `get_schema_fields()`'
if (coreapi is not None):
warnings.warn('CoreAPI compatibility is deprecated and will be removed in DRF 3.17', RemovedInDRF317Warning)
assert (coreschema is not None), 'coreschema must be installed to use `get_schema_fields()`'
return [coreapi.Field(name=self.ordering_param, required=False, location='query', schema=coreschema.String(title=force_str(self.ordering_title), description=force_str(self.ordering_description)))]
def get_schema_operation_parameters(self, view):
return [{'name': self.ordering_param, 'required': False, 'in': 'query', 'description': force_str(self.ordering_description), 'schema': {'type': 'string'}}] |
def sys_exit_after(func):
(func)
def wrapped(*args, **kwargs):
try:
return func(*args, **kwargs)
except OneCommandRunnerBaseException as e:
logging.exception(e)
sys.exit(e.exit_code.value)
except Exception as e:
logging.exception(e)
sys.exit(OneCommandRunnerExitCode.ERROR.value)
else:
sys.exit(OneCommandRunnerExitCode.SUCCESS.value)
return wrapped |
def get_subreddit_undone(submissions: list, subreddit, times_checked=0, similarity_scores=None):
if (times_checked and settings.config['ai']['ai_similarity_enabled']):
print('Sorting based on similarity for a different date filter and thread limit..')
submissions = sort_by_similarity(submissions, keywords=settings.config['ai']['ai_similarity_enabled'])
if (not exists('./video_creation/data/videos.json')):
with open('./video_creation/data/videos.json', 'w+') as f:
json.dump([], f)
with open('./video_creation/data/videos.json', 'r', encoding='utf-8') as done_vids_raw:
done_videos = json.load(done_vids_raw)
for (i, submission) in enumerate(submissions):
if already_done(done_videos, submission):
continue
if submission.over_18:
try:
if (not settings.config['settings']['allow_nsfw']):
print_substep('NSFW Post Detected. Skipping...')
continue
except AttributeError:
print_substep('NSFW settings not defined. Skipping NSFW post...')
if submission.stickied:
print_substep('This post was pinned by moderators. Skipping...')
continue
if ((submission.num_comments <= int(settings.config['reddit']['thread']['min_comments'])) and (not settings.config['settings']['storymode'])):
print_substep(f"This post has under the specified minimum of comments ({settings.config['reddit']['thread']['min_comments']}). Skipping...")
continue
if settings.config['settings']['storymode']:
if (not submission.selftext):
print_substep('You are trying to use story mode on post with no post text')
continue
elif (len(submission.selftext) > (settings.config['settings']['storymode_max_length'] or 2000)):
print_substep(f"Post is too long ({len(submission.selftext)}), try with a different post. ({settings.config['settings']['storymode_max_length']} character limit)")
continue
elif (len(submission.selftext) < 30):
continue
if (settings.config['settings']['storymode'] and (not submission.is_self)):
continue
if (similarity_scores is not None):
return (submission, similarity_scores[i].item())
return submission
print('all submissions have been done going by top submission order')
VALID_TIME_FILTERS = ['day', 'hour', 'month', 'week', 'year', 'all']
index = (times_checked + 1)
if (index == len(VALID_TIME_FILTERS)):
print('All submissions have been done.')
return get_subreddit_undone(subreddit.top(time_filter=VALID_TIME_FILTERS[index], limit=(50 if (int(index) == 0) else (index + (1 * 50)))), subreddit, times_checked=index) |
class BadgeFormFactory(BaseFactory):
class Meta():
model = BadgeForms
event = factory.RelatedFactory(EventFactoryBasic)
event_id = 1
badge_id = common.string_
badge_size = common.string_
badge_color = common.string_
badge_image_url = common.string_
badge_orientation = common.string_ |
class meter(action):
type = 29
def __init__(self, meter_id=None):
if (meter_id != None):
self.meter_id = meter_id
else:
self.meter_id = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!H', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.meter_id))
length = sum([len(x) for x in packed])
packed[1] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = meter()
_type = reader.read('!H')[0]
assert (_type == 29)
_len = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
obj.meter_id = reader.read('!L')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.meter_id != other.meter_id):
return False
return True
def pretty_print(self, q):
q.text('meter {')
with q.group():
with q.indent(2):
q.breakable()
q.text('meter_id = ')
q.text(('%#x' % self.meter_id))
q.breakable()
q.text('}') |
def extractKuronochandesuyoWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('Since I reincarnated' in item['tags']):
return buildReleaseMessageWithType(item, 'Since I reincarnated', vol, chp, frag=frag, postfix=postfix)
return False |
class Basic(Handler):
__slots__ = []
def qos(self, prefetch_count=0, prefetch_size=0, global_=False):
if (not compatibility.is_integer(prefetch_count)):
raise AMQPInvalidArgument('prefetch_count should be an integer')
elif (not compatibility.is_integer(prefetch_size)):
raise AMQPInvalidArgument('prefetch_size should be an integer')
elif (not isinstance(global_, bool)):
raise AMQPInvalidArgument('global_ should be a boolean')
qos_frame = specification.Basic.Qos(prefetch_count=prefetch_count, prefetch_size=prefetch_size, global_=global_)
return self._channel.rpc_request(qos_frame)
def get(self, queue='', no_ack=False, to_dict=False, auto_decode=False):
if (not compatibility.is_string(queue)):
raise AMQPInvalidArgument('queue should be a string')
elif (not isinstance(no_ack, bool)):
raise AMQPInvalidArgument('no_ack should be a boolean')
elif self._channel.consumer_tags:
raise AMQPChannelError("Cannot call 'get' when channel is set to consume")
get_frame = specification.Basic.Get(queue=queue, no_ack=no_ack)
with (self._channel.lock and self._channel.rpc.lock):
message = self._get_message(get_frame, auto_decode=auto_decode)
if (message and to_dict):
return message.to_dict()
return message
def recover(self, requeue=False):
if (not isinstance(requeue, bool)):
raise AMQPInvalidArgument('requeue should be a boolean')
recover_frame = specification.Basic.Recover(requeue=requeue)
return self._channel.rpc_request(recover_frame)
def consume(self, callback=None, queue='', consumer_tag='', exclusive=False, no_ack=False, no_local=False, arguments=None):
if (not compatibility.is_string(queue)):
raise AMQPInvalidArgument('queue should be a string')
elif (not compatibility.is_string(consumer_tag)):
raise AMQPInvalidArgument('consumer_tag should be a string')
elif (not isinstance(exclusive, bool)):
raise AMQPInvalidArgument('exclusive should be a boolean')
elif (not isinstance(no_ack, bool)):
raise AMQPInvalidArgument('no_ack should be a boolean')
elif (not isinstance(no_local, bool)):
raise AMQPInvalidArgument('no_local should be a boolean')
elif ((arguments is not None) and (not isinstance(arguments, dict))):
raise AMQPInvalidArgument('arguments should be a dict or None')
self._channel.consumer_callback = callback
consume_rpc_result = self._consume_rpc_request(arguments, consumer_tag, exclusive, no_ack, no_local, queue)
return self._consume_add_and_get_tag(consume_rpc_result)
def cancel(self, consumer_tag=''):
if (not compatibility.is_string(consumer_tag)):
raise AMQPInvalidArgument('consumer_tag should be a string')
cancel_frame = specification.Basic.Cancel(consumer_tag=consumer_tag)
result = self._channel.rpc_request(cancel_frame)
self._channel.remove_consumer_tag(consumer_tag)
return result
def publish(self, body, routing_key, exchange='', properties=None, mandatory=False, immediate=False):
self._validate_publish_parameters(body, exchange, immediate, mandatory, properties, routing_key)
properties = (properties or {})
body = self._handle_utf8_payload(body, properties)
properties = specification.Basic.Properties(**properties)
method_frame = specification.Basic.Publish(exchange=exchange, routing_key=routing_key, mandatory=mandatory, immediate=immediate)
header_frame = pamqp_header.ContentHeader(body_size=len(body), properties=properties)
frames_out = [method_frame, header_frame]
for body_frame in self._create_content_body(body):
frames_out.append(body_frame)
if self._channel.confirming_deliveries:
with self._channel.rpc.lock:
return self._publish_confirm(frames_out)
self._channel.write_frames(frames_out)
def ack(self, delivery_tag=None, multiple=False):
if ((delivery_tag is not None) and (not compatibility.is_integer(delivery_tag))):
raise AMQPInvalidArgument('delivery_tag should be an integer or None')
elif (not isinstance(multiple, bool)):
raise AMQPInvalidArgument('multiple should be a boolean')
ack_frame = specification.Basic.Ack(delivery_tag=delivery_tag, multiple=multiple)
self._channel.write_frame(ack_frame)
def nack(self, delivery_tag=None, multiple=False, requeue=True):
if ((delivery_tag is not None) and (not compatibility.is_integer(delivery_tag))):
raise AMQPInvalidArgument('delivery_tag should be an integer or None')
elif (not isinstance(multiple, bool)):
raise AMQPInvalidArgument('multiple should be a boolean')
elif (not isinstance(requeue, bool)):
raise AMQPInvalidArgument('requeue should be a boolean')
nack_frame = specification.Basic.Nack(delivery_tag=delivery_tag, multiple=multiple, requeue=requeue)
self._channel.write_frame(nack_frame)
def reject(self, delivery_tag=None, requeue=True):
if ((delivery_tag is not None) and (not compatibility.is_integer(delivery_tag))):
raise AMQPInvalidArgument('delivery_tag should be an integer or None')
elif (not isinstance(requeue, bool)):
raise AMQPInvalidArgument('requeue should be a boolean')
reject_frame = specification.Basic.Reject(delivery_tag=delivery_tag, requeue=requeue)
self._channel.write_frame(reject_frame)
def _consume_add_and_get_tag(self, consume_rpc_result):
consumer_tag = consume_rpc_result['consumer_tag']
self._channel.add_consumer_tag(consumer_tag)
return consumer_tag
def _consume_rpc_request(self, arguments, consumer_tag, exclusive, no_ack, no_local, queue):
consume_frame = specification.Basic.Consume(queue=queue, consumer_tag=consumer_tag, exclusive=exclusive, no_local=no_local, no_ack=no_ack, arguments=arguments)
return self._channel.rpc_request(consume_frame)
def _validate_publish_parameters(body, exchange, immediate, mandatory, properties, routing_key):
if (not compatibility.is_string(body)):
raise AMQPInvalidArgument('body should be a string')
elif (not compatibility.is_string(routing_key)):
raise AMQPInvalidArgument('routing_key should be a string')
elif (not compatibility.is_string(exchange)):
raise AMQPInvalidArgument('exchange should be a string')
elif ((properties is not None) and (not isinstance(properties, dict))):
raise AMQPInvalidArgument('properties should be a dict or None')
elif (not isinstance(mandatory, bool)):
raise AMQPInvalidArgument('mandatory should be a boolean')
elif (not isinstance(immediate, bool)):
raise AMQPInvalidArgument('immediate should be a boolean')
def _handle_utf8_payload(body, properties):
if ('content_encoding' not in properties):
properties['content_encoding'] = 'utf-8'
encoding = properties['content_encoding']
if compatibility.is_unicode(body):
body = body.encode(encoding)
elif (compatibility.PYTHON3 and isinstance(body, str)):
body = bytes(body, encoding=encoding)
return body
def _get_message(self, get_frame, auto_decode):
message_uuid = self._channel.rpc.register_request((get_frame.valid_responses + ['ContentHeader', 'ContentBody']))
try:
self._channel.write_frame(get_frame)
get_ok_frame = self._channel.rpc.get_request(message_uuid, raw=True, multiple=True)
if isinstance(get_ok_frame, specification.Basic.GetEmpty):
return None
content_header = self._channel.rpc.get_request(message_uuid, raw=True, multiple=True)
body = self._get_content_body(message_uuid, content_header.body_size)
finally:
self._channel.rpc.remove(message_uuid)
return Message(channel=self._channel, body=body, method=dict(get_ok_frame), properties=dict(content_header.properties), auto_decode=auto_decode)
def _publish_confirm(self, frames_out):
confirm_uuid = self._channel.rpc.register_request(['Basic.Ack', 'Basic.Nack'])
self._channel.write_frames(frames_out)
result = self._channel.rpc.get_request(confirm_uuid, raw=True)
self._channel.check_for_errors()
if isinstance(result, specification.Basic.Ack):
return True
return False
def _create_content_body(body):
frames = int(math.ceil((len(body) / float(FRAME_MAX))))
for offset in compatibility.RANGE(0, frames):
start_frame = (FRAME_MAX * offset)
end_frame = (start_frame + FRAME_MAX)
body_len = len(body)
if (end_frame > body_len):
end_frame = body_len
(yield pamqp_body.ContentBody(body[start_frame:end_frame]))
def _get_content_body(self, message_uuid, body_size):
body = bytes()
while (len(body) < body_size):
body_piece = self._channel.rpc.get_request(message_uuid, raw=True, multiple=True)
if (not body_piece.value):
break
body += body_piece.value
return body |
.django_db
def test_award_no_transactions(client, awards_transaction_data):
resp = client.get('/api/v2/awards/count/transaction/ASST_NON_bbb_abc123/')
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.data['transactions'] == 0)
resp = client.get('/api/v2/awards/count/transaction/3/')
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.data['transactions'] == 0) |
class Nonlocal(stmt):
_fields = ('names',)
_attributes = ('lineno', 'col_offset')
def __init__(self, names=[], lineno=0, col_offset=0, **ARGS):
stmt.__init__(self, **ARGS)
self.names = list(names)
self.lineno = int(lineno)
self.col_offset = int(col_offset) |
class EvenniaLogFile(logfile.LogFile):
global _CHANNEL_LOG_NUM_TAIL_LINES
if (_CHANNEL_LOG_NUM_TAIL_LINES is None):
from django.conf import settings
_CHANNEL_LOG_NUM_TAIL_LINES = settings.CHANNEL_LOG_NUM_TAIL_LINES
num_lines_to_append = max(1, _CHANNEL_LOG_NUM_TAIL_LINES)
def rotate(self, num_lines_to_append=None):
append_tail = (num_lines_to_append if (num_lines_to_append is not None) else self.num_lines_to_append)
if (not append_tail):
logfile.LogFile.rotate(self)
return
lines = tail_log_file(self.path, 0, self.num_lines_to_append)
super().rotate()
for line in lines:
self.write(line)
def seek(self, *args, **kwargs):
return self._file.seek(*args, **kwargs)
def readlines(self, *args, **kwargs):
lines = []
for line in self._file.readlines(*args, **kwargs):
try:
lines.append(line.decode('utf-8'))
except UnicodeDecodeError:
try:
lines.append(str(line))
except Exception:
lines.append('')
return lines |
def test_get_transaction_receipt_serialization():
msg = LedgerApiMessage(message_id=2, target=1, performative=LedgerApiMessage.Performative.GET_TRANSACTION_RECEIPT, transaction_digest=LedgerApiMessage.TransactionDigest('some_ledger_id', 'some_body'))
msg.to = 'receiver'
envelope = Envelope(to=msg.to, sender='sender', message=msg)
envelope_bytes = envelope.encode()
actual_envelope = Envelope.decode(envelope_bytes)
expected_envelope = envelope
assert (expected_envelope.to == actual_envelope.to)
assert (expected_envelope.sender == actual_envelope.sender)
assert (expected_envelope.protocol_specification_id == actual_envelope.protocol_specification_id)
assert (expected_envelope.message != actual_envelope.message)
actual_msg = LedgerApiMessage.serializer.decode(actual_envelope.message)
actual_msg.to = actual_envelope.to
actual_msg.sender = actual_envelope.sender
expected_msg = msg
assert (expected_msg == actual_msg) |
class CustomAudienceGroup(AbstractObject):
def __init__(self, api=None):
super(CustomAudienceGroup, self).__init__()
self._isCustomAudienceGroup = True
self._api = api
class Field(AbstractObject.Field):
audience_type_param_name = 'audience_type_param_name'
existing_customer_tag = 'existing_customer_tag'
new_customer_tag = 'new_customer_tag'
_field_types = {'audience_type_param_name': 'string', 'existing_customer_tag': 'string', 'new_customer_tag': 'string'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
def generate_sleep_stages_chart(date):
df = pd.read_sql(sql=app.session.query(ouraSleepSamples).filter((ouraSleepSamples.report_date == date), (ouraSleepSamples.hypnogram_5min_desc != None)).statement, con=engine, index_col='timestamp_local').sort_index(ascending=False)
app.session.remove()
df['Task'] = df['hypnogram_5min_desc']
df['Start'] = df.index
df['Finish'] = (df['Start'].shift(1) - timedelta(seconds=1))
df['Resource'] = df['hypnogram_5min']
df = df[df['Finish'].notnull()]
df = df.drop(columns=['summary_date', 'report_date', 'rmssd_5min', 'hr_5min', 'hypnogram_5min', 'hypnogram_5min_desc'])
colors = {4: white, 2: light_blue, 1: dark_blue, 3: teal}
fig = ff.create_gantt(df, colors=colors, index_col='Resource', bar_width=0.5, show_colorbar=False, showgrid_x=False, showgrid_y=False, group_tasks=True)
fig['layout'].update(paper_bgcolor='rgba(0,0,0,0)', plot_bgcolor='rgba(0,0,0,0)', showlegend=False, height=150, xaxis={'showgrid': False, 'zeroline': False, 'showline': True, 'showticklabels': True, 'tickformat': '%I:%M %p', 'range': [df.index.min(), df.index.max()], 'color': white, 'type': 'date', 'rangeselector': None}, yaxis={'autorange': False, 'range': [(- 1), 5], 'showgrid': False, 'tickvals': [0, 1, 2, 3], 'zeroline': False}, margin={'l': 40, 'b': 30, 't': 0, 'r': 40}, font={'color': white, 'size': 10}, hovermode='x')
for i in range(len(fig['data'])):
chunk = fig['data'][i]
if (chunk['legendgroup'] == white):
text = 'Awake'
elif (chunk['legendgroup'] == light_blue):
text = 'Light'
elif (chunk['legendgroup'] == teal):
text = 'REM'
elif (chunk['legendgroup'] == dark_blue):
text = 'Deep'
fig['data'][i].update(hoverinfo='text+x', text=text)
return dcc.Graph(id='gantt', style={'height': '100%'}, figure=fig, config={'displayModeBar': False}) |
def ad_from_string(s, slots=(30, 30, 30, 90, 90, 15, 15), sep=None, capitalize=False):
str_words = s.split(sep=sep)
text_ad = ['' for x in range((len(slots) + 1))]
counter = 0
for (i, slot) in enumerate(slots):
while (counter <= (len(str_words) - 1)):
if ((len((text_ad[i] + str_words[counter])) + 1) > slot):
break
text_ad[i] += ((' ' + str_words[counter]) if text_ad[i] else str_words[counter])
counter += 1
text_ad[(- 1)] = (sep.join(str_words[counter:]) if (sep is not None) else ' '.join(str_words[counter:]))
return [(string.capwords(x) if capitalize else x) for x in text_ad] |
def extractNomekunTumblrCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingTremolo(Options):
def depth(self) -> 'OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingTremoloDepth':
return self._config_sub_data('depth', OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingTremoloDepth)
def speed(self) -> 'OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingTremoloSpeed':
return self._config_sub_data('speed', OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingTremoloSpeed) |
def calc_series_stats(series: Union[(List[float], np.array)], stats: Dict[(str, Callable)]={'mean': np.mean, 'median': np.median, 'max': np.max, 'min': np.min, 'std': np.std}, name_prefix: str='', norm: bool=False) -> Dict[(str, float)]:
series = np.array(series).astype('float')
series = series[(~ np.isnan(series))]
series = list(series)
if (len(series) == 0):
series = np.array([np.nan])
result = {'{}_{}'.format(name_prefix, key): stats[key](series) for key in stats}
if norm:
result = {key: (result[key] / np.abs(series[0])) for key in result}
return result |
class OutputShapeBaseInformationError(ErsiliaError):
def __init__(self):
self.message = 'Wrong Ersilia output shape'
self.hints = 'Only one of the following output shapes is allowed: {}'.format(', '.join(_read_default_fields('Output Shape')))
ErsiliaError.__init__(self, self.message, self.hints) |
class TrialHandler(THBEventHandler):
interested = ['fatetell']
arbiter = FatetellMalleateHandler
card_usage = 'use'
def handle(self, p, act):
if p.dead:
return act
if (not p.has_skill(Trial)):
return act
self.act = act
g = self.game
if (not g.user_input([p], ChooseOptionInputlet(self, (False, True)))):
return act
cards = user_choose_cards(self, p, ('cards', 'showncards', 'equips'))
if cards:
c = cards[0]
g.process_action(TrialAction(p, act.target, act, c))
return act
def cond(self, cards):
return ((len(cards) == 1) and (not cards[0].is_card(Skill)))
def ask_for_action_verify(self, p, cl, tl):
act = self.act
return TrialAction(p, act.target, act, cl[0]).can_fire() |
class _MagicRecipe(CraftingRecipe):
name = ''
tool_tags = ['spellbook', 'wand']
error_tool_missing_message = 'Cannot cast spells without {missing}.'
success_message = 'You successfully cast the spell!'
skill_requirement = []
skill_roll = ''
desired_effects = []
failure_effects = []
error_too_low_skill_level = 'Your skill {skill_name} is too low to cast {spell}.'
error_no_skill_roll = 'You must have the skill {skill_name} to cast the spell {spell}.'
def pre_craft(self, **kwargs):
super().pre_craft(**kwargs)
crafter = self.crafter
for (skill_name, min_value) in self.skill_requirements:
skill_value = crafter.attributes.get(skill_name)
if ((skill_value is None) or (skill_value < min_value)):
self.msg(self.error_too_low_skill_level.format(skill_name=skill_name, spell=self.name))
raise CraftingValidationError
self.skill_roll_value = self.crafter.attributes.get(self.skill_roll)
if (self.skill_roll_value is None):
self.msg(self.error_no_skill_roll.format(skill_name=self.skill_roll, spell=self.name))
raise CraftingValidationError
def do_craft(self, **kwargs):
if (randint(1, 18) <= self.skill_roll_value):
return (True, self.desired_effects)
else:
return (False, self.failure_effects)
def post_craft(self, craft_result, **kwargs):
success = False
if craft_result:
(success, _) = craft_result
super().post_craft(success, **kwargs)
return craft_result |
class RemoteReplayBuffer(remote.Remote):
def __init__(self, target: ReplayBuffer, server_name: str, server_addr: str, name: Optional[str]=None, prefetch: int=0, timeout: float=60) -> None:
super().__init__(target, server_name, server_addr, name, timeout)
self._prefetch = prefetch
self._futures = collections.deque()
self._server_name = server_name
self._server_addr = server_addr
def __repr__(self):
return (f'RemoteReplayBuffer(server_name={self._server_name}, ' + f'server_addr={self._server_addr})')
def prefetch(self) -> Optional[int]:
return self._prefetch
def sample(self, num_samples: int, replacement: bool=False) -> Union[(NestedTensor, Tuple[(NestedTensor, torch.Tensor, torch.Tensor, torch.Tensor)])]:
if (len(self._futures) > 0):
ret = self._futures.popleft().result()
else:
ret = self.client.sync(self.server_name, self.remote_method_name('sample'), num_samples, replacement)
while (len(self._futures) < self.prefetch):
fut = self.client.async_(self.server_name, self.remote_method_name('sample'), num_samples, replacement)
self._futures.append(fut)
return ret
async def async_sample(self, num_samples: int, replacement: bool=False) -> Union[(NestedTensor, Tuple[(NestedTensor, torch.Tensor, torch.Tensor, torch.Tensor)])]:
if (len(self._futures) > 0):
ret = (await self._futures.popleft())
else:
ret = (await self.client.async_(self.server_name, self.remote_method_name('sample'), num_samples, replacement))
while (len(self._futures) < self.prefetch):
fut = self.client.async_(self.server_name, self.remote_method_name('sample'), num_samples, replacement)
self._futures.append(fut)
return ret
def warm_up(self, learning_starts: Optional[int]=None) -> None:
(size, capacity) = self.info()
target_size = capacity
if (learning_starts is not None):
target_size = min(target_size, learning_starts)
width = (len(str(capacity)) + 1)
while (size < target_size):
time.sleep(1)
(size, capacity) = self.info()
console.log(('Warming up replay buffer: ' + f'[{size: {width}d} / {capacity} ]')) |
def __getattr__(name: str) -> int:
deprecation_changes = {'WS_1004_NO_STATUS_RCVD': 'WS_1005_NO_STATUS_RCVD', 'WS_1005_ABNORMAL_CLOSURE': 'WS_1006_ABNORMAL_CLOSURE'}
deprecated = __deprecated__.get(name)
if deprecated:
warnings.warn(f"'{name}' is deprecated. Use '{deprecation_changes[name]}' instead.", category=DeprecationWarning, stacklevel=3)
return deprecated
raise AttributeError(f"module '{__name__}' has no attribute '{name}'") |
class DynamicUrl():
def __init__(self, url=' version='v1', cache=None):
self._cache = (cache or [])
self._baseurl = url
self._version = version
def __getattr__(self, name):
return self._(name)
def __del__(self):
pass
def _(self, name):
return DynamicUrl(url=self._baseurl, version=self._version, cache=(self._cache + [name]))
def method(self):
return self._cache
def create_url(self):
url_str = '{0}/{1}'.format(self._baseurl, self._version)
for obj in self.method():
url_str = '{0}/{1}'.format(url_str, obj)
return url_str
def get_url(self, url, params=None, json=None, timeout=30):
r = requests.get(url, params=params, json=json, timeout=timeout)
r.raise_for_status()
return r.json()
def post_url(self, url, params=None, json=None, data=None, timeout=30):
r = requests.post(url, params=params, json=json, data=data, timeout=timeout)
try:
r.raise_for_status()
except:
raise requests.exceptions.HTTPError('Error: {}'.format(r.json()))
return r.json() |
def render_rx_rates(rx_rates: ReactionRates) -> str:
def rate_line(name, kappa, rate, comment=''):
rate_h = (rate * 3600)
return f'{name: <12s} {kappa: >8.4f} {rate: >12.8e} {rate_h: >12.8e} {comment: >18s}'
env = jinja2.Environment()
env.filters['f2'] = (lambda _: f'{_:.2f}')
env.filters['f4'] = (lambda _: f'{_:.4f}')
env.filters['f6'] = (lambda _: f'{_:.6f}')
env.filters['e8'] = (lambda _: f'{_: >16.8e}')
tpl = env.from_string(RX_RATES_TPL)
rendered = tpl.render(rxr=rx_rates, rate_line=rate_line)
return rendered |
class PrivateComputationMrPidPCF2LiftStageFlow(PrivateComputationBaseStageFlow):
_order_ = 'CREATED PC_PRE_VALIDATION UNION_PID_MR_MULTIKEY ID_SPINE_COMBINER RESHARD PCF2_LIFT AGGREGATE POST_PROCESSING_HANDLERS'
CREATED = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.CREATION_INITIALIZED, started_status=PrivateComputationInstanceStatus.CREATION_STARTED, completed_status=PrivateComputationInstanceStatus.CREATED, failed_status=PrivateComputationInstanceStatus.CREATION_FAILED, is_joint_stage=False)
PC_PRE_VALIDATION = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.PC_PRE_VALIDATION_INITIALIZED, started_status=PrivateComputationInstanceStatus.PC_PRE_VALIDATION_STARTED, completed_status=PrivateComputationInstanceStatus.PC_PRE_VALIDATION_COMPLETED, failed_status=PrivateComputationInstanceStatus.PC_PRE_VALIDATION_FAILED, is_joint_stage=False)
UNION_PID_MR_MULTIKEY = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.PID_MR_INITIALIZED, started_status=PrivateComputationInstanceStatus.PID_MR_STARTED, completed_status=PrivateComputationInstanceStatus.PID_MR_COMPLETED, failed_status=PrivateComputationInstanceStatus.PID_MR_FAILED, is_joint_stage=False)
ID_SPINE_COMBINER = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.ID_SPINE_COMBINER_INITIALIZED, started_status=PrivateComputationInstanceStatus.ID_SPINE_COMBINER_STARTED, completed_status=PrivateComputationInstanceStatus.ID_SPINE_COMBINER_COMPLETED, failed_status=PrivateComputationInstanceStatus.ID_SPINE_COMBINER_FAILED, is_joint_stage=False)
RESHARD = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.RESHARD_INITIALIZED, started_status=PrivateComputationInstanceStatus.RESHARD_STARTED, completed_status=PrivateComputationInstanceStatus.RESHARD_COMPLETED, failed_status=PrivateComputationInstanceStatus.RESHARD_FAILED, is_joint_stage=False)
PCF2_LIFT = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.PCF2_LIFT_INITIALIZED, started_status=PrivateComputationInstanceStatus.PCF2_LIFT_STARTED, completed_status=PrivateComputationInstanceStatus.PCF2_LIFT_COMPLETED, failed_status=PrivateComputationInstanceStatus.PCF2_LIFT_FAILED, is_joint_stage=True, timeout=DEFAULT_CONTAINER_TIMEOUT_IN_SEC)
AGGREGATE = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.AGGREGATION_INITIALIZED, started_status=PrivateComputationInstanceStatus.AGGREGATION_STARTED, completed_status=PrivateComputationInstanceStatus.AGGREGATION_COMPLETED, failed_status=PrivateComputationInstanceStatus.AGGREGATION_FAILED, is_joint_stage=True, timeout=DEFAULT_AGGREGATE_TIMEOUT_IN_SEC)
POST_PROCESSING_HANDLERS = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.POST_PROCESSING_HANDLERS_INITIALIZED, started_status=PrivateComputationInstanceStatus.POST_PROCESSING_HANDLERS_STARTED, completed_status=PrivateComputationInstanceStatus.POST_PROCESSING_HANDLERS_COMPLETED, failed_status=PrivateComputationInstanceStatus.POST_PROCESSING_HANDLERS_FAILED, is_joint_stage=False)
def get_stage_service(self, args: PrivateComputationStageServiceArgs) -> PrivateComputationStageService:
if (self is self.UNION_PID_MR_MULTIKEY):
if (args.workflow_svc is None):
raise NotImplementedError('workflow_svc is None')
return PIDMRStageService(args.workflow_svc)
elif (self is self.ID_SPINE_COMBINER):
return IdSpineCombinerStageService(args.storage_svc, args.onedocker_svc, args.onedocker_binary_config_map, protocol_type=Protocol.MR_PID_PROTOCOL.value)
elif (self is self.PCF2_LIFT):
return PCF2LiftStageService(args.onedocker_binary_config_map, args.mpc_svc)
else:
return self.get_default_stage_service(args) |
def geth_process(geth_command_arguments):
proc = subprocess.Popen(geth_command_arguments, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=1)
logging.warning(('start geth: %r' % (geth_command_arguments,)))
try:
(yield proc)
finally:
logging.warning('shutting down geth')
kill_popen_gracefully(proc, logging.getLogger('tests.integration.sync'))
(output, errors) = proc.communicate()
logging.warning('Geth Process Exited:\nstdout:{0}\n\nstderr:{1}\n\n'.format(to_text(output), to_text(errors))) |
class OptionSeriesArearangeSonificationTracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
def double_escape(tok: tokenize.TokenInfo):
if ((tok.type != tokenize.STRING) or ((not tok.string.startswith('"""lmql')) and (not tok.string.startswith("'''lmql")))):
return tok
t = tokenize.TokenInfo(tok.type, tok_str(tok).replace('\\n', '\\\\n'), tok.start, tok.end, tok.line)
return t |
.django_db
def test_correct_response_with_award_type_codes(client, monkeypatch, helpers, elasticsearch_award_index, awards_and_transactions):
setup_elasticsearch_test(monkeypatch, elasticsearch_award_index)
resp = helpers.post_for_spending_endpoint(client, url, award_type_codes=['07'], def_codes=['L', 'M'], sort='obligation')
expected_results = {'totals': {'award_count': 2, 'face_value_of_loan': 33.0, 'obligation': 22.0, 'outlay': 11.0}, 'results': [{'code': '', 'award_count': 1, 'description': 'RECIPIENT 2', 'face_value_of_loan': 30.0, 'id': ['3c92491a-f2cd-ec7d-294b-7daf-R'], 'obligation': 20.0, 'outlay': 10.0}, {'code': 'DUNS Number not provided', 'award_count': 1, 'description': 'RECIPIENT 1', 'face_value_of_loan': 3.0, 'id': ['5f572ec9-8b49-e5eb-22c7-f6ef316f7689-R'], 'obligation': 2.0, 'outlay': 1.0}], 'page_metadata': {'hasNext': False, 'hasPrevious': False, 'limit': 10, 'next': None, 'page': 1, 'previous': None, 'total': 2}}
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.json() == expected_results)
resp = helpers.post_for_spending_endpoint(client, url, award_type_codes=['08'], def_codes=['L', 'M'], sort='description')
expected_results = {'totals': {'award_count': 1, 'face_value_of_loan': 300.0, 'obligation': 200.0, 'outlay': 100.0}, 'results': [{'code': '', 'award_count': 1, 'description': 'RECIPIENT, 3', 'face_value_of_loan': 300.0, 'id': ['bf05f751-6841-efd6-8f1b-0144163eceae-C', 'bf05f751-6841-efd6-8f1b-0144163eceae-R'], 'obligation': 200.0, 'outlay': 100.0}], 'page_metadata': {'hasNext': False, 'hasPrevious': False, 'limit': 10, 'next': None, 'page': 1, 'previous': None, 'total': 1}}
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.json() == expected_results) |
class SplitMultipleSubstTest():
def overflow(self, itemName, itemRecord):
from fontTools.otlLib.builder import buildMultipleSubstSubtable
from fontTools.ttLib.tables.otBase import OverflowErrorRecord
oldSubTable = buildMultipleSubstSubtable({'e': 1, 'a': 2, 'b': 3, 'c': 4, 'd': 5})
newSubTable = otTables.MultipleSubst()
ok = otTables.splitMultipleSubst(oldSubTable, newSubTable, OverflowErrorRecord((None, None, None, itemName, itemRecord)))
assert ok
return (oldSubTable.mapping, newSubTable.mapping)
def test_Coverage(self):
(oldMapping, newMapping) = self.overflow('Coverage', None)
assert (oldMapping == {'a': 2, 'b': 3})
assert (newMapping == {'c': 4, 'd': 5, 'e': 1})
def test_RangeRecord(self):
(oldMapping, newMapping) = self.overflow('RangeRecord', None)
assert (oldMapping == {'a': 2, 'b': 3})
assert (newMapping == {'c': 4, 'd': 5, 'e': 1})
def test_Sequence(self):
(oldMapping, newMapping) = self.overflow('Sequence', 4)
assert (oldMapping == {'a': 2, 'b': 3, 'c': 4})
assert (newMapping == {'d': 5, 'e': 1}) |
def test_event_counting():
class _EventInterface(ABC):
_step_stats(len)
def event1(self, attr1, attr2):
pass
agg = LogStatsAggregator(LogStatsLevel.STEP)
agg.add_event(EventRecord(_EventInterface, _EventInterface.event1, dict(attr1=1, attr2=2)))
agg.add_event(EventRecord(_EventInterface, _EventInterface.event1, dict(attr1=1, attr2=2)))
stats = agg.reduce()
assert (len(stats) == 1)
(key, value) = list(stats.items())[0]
assert (value == 2)
assert (key == (_EventInterface.event1, None, None)) |
class BaseCommand():
def __init__(self, command: str=None, args: List[str]=list, obj: SWAT=None) -> None:
self.command = command
self.args = args
self.logger = logging.getLogger(__name__)
self.obj = obj
def execute(self) -> None:
raise NotImplementedError('The "execute" method must be implemented in each command class.')
def custom_help(cls) -> str:
raise NotImplementedError('The "custom_help" method must be implemented in each command class.')
def load_parser(cls, *args, **kwargs) -> argparse.ArgumentParser:
return get_custom_argparse_formatter(*args, **kwargs) |
def test_lazy_loading():
once = True
def _getter():
nonlocal once
if (not once):
raise ValueError('Should be called once only')
once = False
return dummy_task
e = LazyEntity('x', _getter)
assert (e.__repr__() == 'Promise for entity [x]')
assert (e.name == 'x')
assert (e._entity is None)
assert (not e.entity_fetched())
v = e.entity
assert (e._entity is not None)
assert (v == dummy_task)
assert (e.entity == dummy_task)
assert e.entity_fetched() |
def assert_wrapper_clone_from(make_env: Callable[([], MazeEnv)], assert_member_list: List[str]=None):
if (not assert_member_list):
assert_member_list = list()
main_env = make_env()
cloned_env = make_env()
main_env.reset()
main_env.step(main_env.action_space.sample())
cloned_env.reset()
cloned_env.clone_from(main_env)
for member in assert_member_list:
assert (getattr(main_env, member) == getattr(cloned_env, member))
for j in range(10):
action = main_env.action_space.sample()
(obs, rew, done, info) = main_env.step(action)
(obs_sim, rew_sim, done_sim, info_sim) = cloned_env.step(action)
assert (rew == rew_sim)
assert (done == done_sim)
assert np.all((obs['observation'] == obs_sim['observation']))
for member in assert_member_list:
assert (getattr(main_env, member) == getattr(cloned_env, member))
if (done or (np.mod(j, 3) == 0)):
main_env.reset()
cloned_env.reset() |
def get_baseline_and_optimization_config(enable_filtering, enable_optimization):
(time_alignment_config, hand_eye_config) = get_baseline_config(enable_filtering)
optimiztion_config = OptimizationConfig()
optimiztion_config.enable_optimization = enable_optimization
optimiztion_config.optimization_only = False
algorithm_name = (hand_eye_config.algorithm_name + ('_opt' if enable_optimization else '_no_opt'))
return (algorithm_name, time_alignment_config, hand_eye_config, optimiztion_config) |
class OptionSeriesLineSonificationDefaultinstrumentoptionsMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionPlotoptionsBellcurveSonificationDefaultspeechoptionsActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class Challenge(GraphObject):
__primarykey__ = 'name'
name = Property()
def __init__(self, name):
self.name = name
def find(self):
challenge = self.match(graph, self.challenge).first()
return challenge
def register(self):
if (not self.find()):
challenge = Node('Challenge', name=self.name)
graph.create(challenge)
return True
else:
return False |
class LightSensor(Sensor):
SYSTEM_CLASS_NAME = Sensor.SYSTEM_CLASS_NAME
SYSTEM_DEVICE_NAME_CONVENTION = Sensor.SYSTEM_DEVICE_NAME_CONVENTION
MODE_REFLECT = 'REFLECT'
MODE_AMBIENT = 'AMBIENT'
MODES = (MODE_REFLECT, MODE_AMBIENT)
def __init__(self, address=None, name_pattern=SYSTEM_DEVICE_NAME_CONVENTION, name_exact=False, **kwargs):
super(LightSensor, self).__init__(address, name_pattern, name_exact, driver_name='lego-nxt-light', **kwargs)
def reflected_light_intensity(self):
self._ensure_mode(self.MODE_REFLECT)
return (self.value(0) * self._scale('REFLECT'))
def ambient_light_intensity(self):
self._ensure_mode(self.MODE_AMBIENT)
return (self.value(0) * self._scale('AMBIENT')) |
def find_r_Index(label, c):
i = 0
if (c == 'W'):
while True:
if (globVar.r_w_pieces[i].label == label):
return i
else:
i += 1
else:
while True:
if (globVar.r_b_pieces[i].label == label):
return i
else:
i += 1 |
def save_file_description(working_dir, file_name, file_description):
file_description = '\r\n'.join(file_description.splitlines())
destination_file_path = os.path.join(working_dir, file_name)
with open(destination_file_path, 'w') as f:
f.write(file_description)
return destination_file_path |
def swap_aliases(client, config):
if client.indices.get_alias(config['index_name'], '*'):
logger.info(format_log(f"Removing old aliases for index '{config['index_name']}'", action='ES Alias'))
client.indices.delete_alias(config['index_name'], '_all')
alias_patterns = (config['query_alias_prefix'] + '*')
old_indexes = []
try:
old_indexes = list(client.indices.get_alias('*', alias_patterns).keys())
for old_index in old_indexes:
client.indices.delete_alias(old_index, '_all')
logger.info(format_log(f"Removing aliases from '{old_index}'", action='ES Alias'))
except Exception:
logger.exception(format_log(f'No aliases found for {alias_patterns}', action='ES Alias'))
if config['create_award_type_aliases']:
create_award_type_aliases(client, config)
else:
create_read_alias(client, config)
create_load_alias(client, config)
try:
if old_indexes:
max_wait_time = 15
start_wait_time = time.time()
is_snapshot_conflict = is_snapshot_running(client, old_indexes)
if is_snapshot_conflict:
logger.info(format_log(f'Snapshot in-progress prevents delete; waiting up to {max_wait_time} minutes', action='ES Alias'))
while (((time.time() - start_wait_time) < (max_wait_time * 60)) and is_snapshot_conflict):
logger.info(format_log('Waiting while snapshot is in-progress', action='ES Alias'))
time.sleep(90)
is_snapshot_conflict = is_snapshot_running(client, old_indexes)
if is_snapshot_conflict:
config['raise_status_code_3'] = True
logger.error(format_log(f"Unable to delete index(es) '{old_indexes}' due to in-progress snapshot", action='ES Alias'))
else:
client.indices.delete(index=old_indexes, ignore_unavailable=False)
logger.info(format_log(f"Deleted index(es) '{old_indexes}'", action='ES Alias'))
except Exception:
logger.exception(format_log(f'Unable to delete indexes: {old_indexes}', action='ES Alias')) |
.usefixtures('use_tmpdir')
.parametrize('qstat_output, jobnr, expected_status', [(None, '', JobStatus.STATUS_FAILURE), ('', '1234', JobStatus.STATUS_FAILURE), ('Job Id: 1\njob_state = R', '1', JobStatus.RUNNING), ('Job Id: 1\n job_state = R', '1', JobStatus.RUNNING), ('Job Id:\t1\n\tjob_state = R', '1', JobStatus.RUNNING), ('Job Id: 1.namespace\njob_state = R', '1', JobStatus.RUNNING), ('Job Id: 11\njob_state = R', '1', JobStatus.STATUS_FAILURE), ('Job Id: 1', '1', JobStatus.STATUS_FAILURE), ('Job Id: 1\njob_state = E', '1', JobStatus.DONE), ('Job Id: 1\njob_state = C', '1', JobStatus.DONE), ('Job Id: 1\njob_state = H', '1', JobStatus.PENDING), ('Job Id: 1\njob_state = Q', '1', JobStatus.PENDING), ('Job Id: 1\njob_state = ', '1', JobStatus.STATUS_FAILURE), ('Job Id: 1\njob_state = E\nExit_status = 1', '1', JobStatus.EXIT), ('Job Id: 1\njob_state = C\nExit_status = 1', '1', JobStatus.EXIT), ('Job Id: 1\njob_state = C\nJob Id: 2\njob_state = R', '2', JobStatus.RUNNING)])
def test_parse_status(qstat_output: Optional[str], jobnr: str, expected_status: JobStatus):
if (qstat_output is not None):
Path('qstat.out').write_text(qstat_output, encoding='utf-8')
assert (_clib.torque_driver.parse_status('qstat.out', jobnr) == expected_status) |
class ChartJsOptions(OptChart.OptionsChart):
_struct__schema = {'elements': {}, 'scales': {}, 'layout': {}, 'title': {}, 'legend': {}, 'plugins': {}, 'size': {}}
def data(self):
return self.component._data_attrs
def data(self, values: dict):
self.component._data_attrs = values
for d in values.get('datasets', []):
self.component.add_dataset(**d)
def indexAxis(self):
return self._config_get()
({'chart.js': '3.0.0'})
def indexAxis(self, flag: bool):
self._config(flag)
def responsive(self):
return self._config_get()
def responsive(self, flag: bool):
self._config(flag)
def maintainAspectRatio(self):
return self._config_get(True)
def maintainAspectRatio(self, flag: bool):
self._config(flag)
def elements(self) -> OptionElements:
return self._config_sub_data('elements', OptionElements)
def scales(self) -> OptionScales:
return self._config_sub_data('scales', OptionScales)
def layout(self) -> OptionLayout:
return self._config_sub_data('layout', OptionLayout)
def title(self) -> OptionTitle:
return self._config_sub_data('title', OptionTitle)
def legend(self) -> OptionLegend:
return self._config_sub_data('legend', OptionLegend)
def plugins(self) -> OptionChartJsPlugins:
return self._config_sub_data('plugins', OptionChartJsPlugins)
def tooltips(self):
return self._config_sub_data('tooltips', OptionChartJsTooltips)
def add_title(self, text: str, color: str=None):
self.title.display = True
self.title.text = text
if (color is not None):
self.title.fontColor = color
return self
def size(self) -> OptionChartJsSize:
return self._config_sub_data('size', OptionChartJsSize) |
class UserPass(object):
def __init__(self, username, password):
self._username = username
self._password = password
def credential_type(self):
return GIT_CREDTYPE_USERPASS_PLAINTEXT
def credential_tuple(self):
return (self._username, self._password)
def __call__(self, _url, _username, _allowed):
return self |
def example():
c = ft.Container(width=200, height=200, bgcolor='red', animate=ft.animation.Animation(1000, 'bounceOut'))
async def animate_container(e):
c.width = (100 if (c.width == 200) else 200)
c.height = (100 if (c.height == 200) else 200)
c.bgcolor = ('blue' if (c.bgcolor == 'red') else 'red')
(await c.update_async())
return ft.Column(controls=[c, ft.ElevatedButton('Animate container', on_click=animate_container)]) |
def load(n):
with open(n) as f:
try:
x = tidy(xmltodict.parse(f.read()))
except Exception as e:
raise Exception(n, e)
klass = x['magics']['class']
klass['PATH'] = n
assert (klass['name'] not in DEFS), (klass['name'], n, DEFS[klass['name']])
DEFS[klass['name']] = Klass(klass) |
.django_db
def test_recipient_overview(client, monkeypatch, elasticsearch_transaction_index):
r_id = 'a52a7544-829b-c925-e1ba-d04d3171c09a-C'
create_transaction_test_data()
for (recipient_id, recipient_profile) in TEST_RECIPIENT_PROFILES.items():
recipient_profile_copy = recipient_profile.copy()
if (recipient_id == r_id):
recipient_profile_copy['last_12_months'] = 100
recipient_profile_copy['last_12_months_count'] = 1
baker.make('recipient.RecipientProfile', **recipient_profile_copy)
create_recipient_lookup_test_data(*TEST_RECIPIENT_LOOKUPS.values())
for (duns, duns_dict) in TEST_DUNS.items():
test_duns_model = duns_dict.copy()
country_code = test_duns_model['country_code']
baker.make('recipient.DUNS', **test_duns_model)
baker.make('references.RefCountryCode', **TEST_REF_COUNTRY_CODE[country_code])
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
resp = client.get(recipient_overview_endpoint(r_id))
assert (resp.status_code == status.HTTP_200_OK)
expected = {'name': 'PARENT RECIPIENT', 'alternate_names': [], 'duns': '', 'uei': 'AAAAAAAAAAAA', 'recipient_id': 'a52a7544-829b-c925-e1ba-d04d3171c09a-C', 'recipient_level': 'C', 'parent_name': 'PARENT RECIPIENT', 'parent_duns': '', 'parent_id': 'a52a7544-829b-c925-e1ba-d04d3171c09a-P', 'parent_uei': 'AAAAAAAAAAAA', 'parents': [{'parent_duns': '', 'parent_id': 'a52a7544-829b-c925-e1ba-d04d3171c09a-P', 'parent_name': 'PARENT RECIPIENT', 'parent_uei': 'AAAAAAAAAAAA'}], 'business_types': sorted((['expected', 'business', 'cat'] + ['category_business'])), 'location': {'address_line1': 'PARENT ADDRESS LINE 1', 'address_line2': 'PARENT ADDRESS LINE 2', 'address_line3': None, 'county_name': None, 'city_name': 'PARENT CITY', 'congressional_code': 'PARENT CONGRESSIONAL DISTRICT', 'country_code': 'PARENT COUNTRY CODE', 'country_name': 'PARENT COUNTRY NAME', 'state_code': 'PARENT STATE', 'zip': 'PARENT ZIP', 'zip4': 'PARENT ZIP4', 'foreign_province': None, 'foreign_postal_code': None}, 'total_transaction_amount': 100, 'total_transactions': 2, 'total_face_value_loan_amount': 1000, 'total_face_value_loan_transactions': 1}
resp.data['business_types'] = sorted(resp.data['business_types'])
assert (resp.data == expected) |
def _has_scope_via_role(token_data: Dict[(str, Any)], client: ClientDetail, endpoint_scopes: SecurityScopes) -> bool:
assigned_roles: List[str] = token_data.get(JWE_PAYLOAD_ROLES, [])
associated_scopes: List[str] = get_scopes_from_roles(assigned_roles)
if (not has_scope_subset(user_scopes=associated_scopes, endpoint_scopes=endpoint_scopes)):
return False
if (not set(assigned_roles).issubset(set((client.roles or [])))):
logger.debug('Client no longer allowed to issue these roles.')
return False
return True |
class AclEntryResponseAllOf(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'acl_id': (str,), 'id': (str,), 'service_id': (str,)}
_property
def discriminator():
return None
attribute_map = {'acl_id': 'acl_id', 'id': 'id', 'service_id': 'service_id'}
read_only_vars = {'acl_id', 'id', 'service_id'}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class OptionSeriesTreegraphTooltip(Options):
def clusterFormat(self):
return self._config_get('Clustered points: {point.clusterPointsAmount}')
def clusterFormat(self, text: str):
self._config(text, js_type=False)
def dateTimeLabelFormats(self) -> 'OptionSeriesTreegraphTooltipDatetimelabelformats':
return self._config_sub_data('dateTimeLabelFormats', OptionSeriesTreegraphTooltipDatetimelabelformats)
def distance(self):
return self._config_get(16)
def distance(self, num: float):
self._config(num, js_type=False)
def followPointer(self):
return self._config_get(False)
def followPointer(self, flag: bool):
self._config(flag, js_type=False)
def followTouchMove(self):
return self._config_get(True)
def followTouchMove(self, flag: bool):
self._config(flag, js_type=False)
def footerFormat(self):
return self._config_get('')
def footerFormat(self, text: str):
self._config(text, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def headerFormat(self):
return self._config_get('')
def headerFormat(self, text: str):
self._config(text, js_type=False)
def linkFormat(self):
return self._config_get('{point.fromNode.id} {point.toNode.id}')
def linkFormat(self, text: str):
self._config(text, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, text: str):
self._config(text, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def pointFormat(self):
return self._config_get('{point.id}')
def pointFormat(self, text: str):
self._config(text, js_type=False)
def pointFormatter(self):
return self._config_get(None)
def pointFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False)
def xDateFormat(self):
return self._config_get(None)
def xDateFormat(self, text: str):
self._config(text, js_type=False) |
def check_relayed_addr(sender: AddressAPI, addr: AddressAPI) -> bool:
if (addr.is_unspecified or addr.is_reserved):
return False
if (addr.is_private and (not sender.is_private)):
return False
if (addr.is_loopback and (not sender.is_loopback)):
return False
return True |
def test_async_from_web3_does_not_set_w3_object_reference(local_async_w3):
assert local_async_w3.strict_bytes_type_checking
ns = AsyncENS.from_web3(local_async_w3)
assert ns.w3.strict_bytes_type_checking
assert (local_async_w3 != ns.w3)
local_async_w3.strict_bytes_type_checking = False
assert (not local_async_w3.strict_bytes_type_checking)
assert ns.w3.strict_bytes_type_checking |
def init(model: Model[(InT, OutT)], X: Optional[InT]=None, Y: Optional[OutT]=None) -> None:
if (X is not None):
model.layers[0].set_dim('nI', get_width(X[1]))
model.layers[0].initialize(X=X[0])
X = (model.layers[0].predict(X[0]), model.layers[0].predict(X[1]))
model.layers[1].initialize(X=X, Y=Y)
model.set_dim('nI', model.layers[0].get_dim('nI'))
model.set_dim('nO', model.layers[1].get_dim('nO')) |
class OptionPlotoptionsLineSonificationContexttracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
_with_ttl(ttl_seconds=3600)
def _get_palantir_oauth_token(foundry_url: str, client_id: str, client_secret: (str | None)=None, scopes: (list | None)=None) -> str:
if (scopes is None):
scopes = DEFAULT_TPA_SCOPES
credentials = palantir_oauth_client.get_user_credentials(scopes=scopes, hostname=foundry_url, client_id=client_id, client_secret=client_secret, use_local_webserver=False)
return credentials.token |
def test_unsupported_complex_literals():
t = typing_extensions.Annotated[(typing.Dict[(int, str)], FlyteAnnotation({'foo': 'bar'}))]
with pytest.raises(ValueError):
TypeEngine.to_literal_type(t)
t = typing_extensions.Annotated[(Color, FlyteAnnotation({'foo': 'bar'}))]
with pytest.raises(ValueError):
TypeEngine.to_literal_type(t)
t = typing_extensions.Annotated[(Result, FlyteAnnotation({'foo': 'bar'}))]
with pytest.raises(ValueError):
TypeEngine.to_literal_type(t) |
class WorkerSupportedModel():
host: str
port: int
models: List[SupportedModel]
def from_dict(cls, worker_data: Dict) -> 'WorkerSupportedModel':
models = [SupportedModel.from_dict(model_data) for model_data in worker_data['models']]
worker_data['models'] = models
return cls(**worker_data) |
def test_type_regex_for_redos():
start = time.time()
long = ('1' * 30)
invalid_structured_data_string = f'''{{
"types": {{
"EIP712Domain": [
{{"name": "aaaa", "type": "$[{long}0"}},
{{"name": "version", "type": "string"}},
{{"name": "chainId", "type": "uint256"}},
{{"name": "verifyingContract", "type": "address"}}
]
}}
}}'''
with pytest.raises(re.error, match='unterminated character set at position 15'):
with pytest.raises(ValidationError, match=f'Invalid Type `$[{long}0` in `EIP712Domain`'):
load_and_validate_structured_message(invalid_structured_data_string)
done = (time.time() - start)
assert (done < 1) |
class LoaderSimple(Unpacker):
def __init__(self, apk_obj: APK, dvms, output_dir):
super().__init__('loader.simple', 'Simple methods to unpack', apk_obj, dvms, output_dir)
def start_decrypt(self, native_lib: str=''):
self.logger.info('Starting to decrypt')
package_name = self.apk_object.get_package()
self.decrypted_payload_path = None
if (package_name != None):
if self.brute_assets(package_name):
return
def brute_assets(self, key: str):
self.logger.info('Starting brute-force')
asset_list = self.apk_object.get_files()
for filepath in asset_list:
f = self.apk_object.get_file(filepath)
if self.try_one_byte_xor(f):
return self.decrypted_payload_path
return None
def try_one_byte_xor(self, file_data):
for k in range(1, 256):
xored_data = xor(file_data[:16], k.to_bytes(1, 'little'))
if (not self.check_header(xored_data)):
continue
self.logger.info(f'Found single byte xor key : {k}')
xored_data = xor(file_data, k.to_bytes(1, 'little'))
if self.check_and_write_file(xored_data):
return True
return False |
class SearchBookmark(models.Model):
name = TruncatingCharField(max_length=200)
user = models.ForeignKey(User, on_delete=models.CASCADE)
url = models.CharField(max_length=1200)
created_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return ('Bookmark: ' + self.name)
def topic(self):
return self.name
def dashboard_url(self):
return ('%s#%s' % (reverse('analyse'), self.url)) |
class Formatter(metaclass=ABCMeta):
def __init__(self, verbosity=False, **kwargs):
self._verbosity = verbosity
def copyright_lexc(self):
return '! Copyright 2015 Omorfi Contributors, GNU GPLv3'
def multichars_lexc(self):
multichars = '!! Following specials exist in all versions of omorfi\n'
for mcs in sorted(common_multichars):
multichars += (mcs + '\n')
return multichars
def root_lexicon_lexc(self):
root = 'LEXICON Root\n'
root += '!! LEXICONS per class\n 0 NOUN ;\n 0 ADJ ;\n 0 VERB ;\n 0 NUM ;\n 0 DIGITS ;\n 0 PRON ;\n 0 ADP ;\n 0 ADV ;\n 0 INTJ ;\n 0 PUNCT ;\n 0 SYM ;\n 0 CCONJ ;\n 0 SCONJ ;\n 0 CCONJ|VERB ;\n 0 PROPN ;\n 0 ACRONYM ;\n 0 ABBREVIATION ;\n 0 AUX ;\n 0 DET ;\n 0 X ;\n '
root += (version_id_easter_egg + ':__omorfi # ;\n')
return root
def wordmap2lexc(self, wordmap):
pass
def continuation2lexc(self, fields):
pass |
class port_mod_failed_error_msg(error_msg):
version = 4
type = 1
err_type = 7
def __init__(self, xid=None, code=None, data=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (code != None):
self.code = code
else:
self.code = 0
if (data != None):
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.err_type))
packed.append(struct.pack('!H', self.code))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = port_mod_failed_error_msg()
_version = reader.read('!B')[0]
assert (_version == 4)
_type = reader.read('!B')[0]
assert (_type == 1)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_err_type = reader.read('!H')[0]
assert (_err_type == 7)
obj.code = reader.read('!H')[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.code != other.code):
return False
if (self.data != other.data):
return False
return True
def pretty_print(self, q):
q.text('port_mod_failed_error_msg {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('code = ')
value_name_map = {0: 'OFPPMFC_BAD_PORT', 1: 'OFPPMFC_BAD_HW_ADDR', 2: 'OFPPMFC_BAD_CONFIG', 3: 'OFPPMFC_BAD_ADVERTISE', 4: 'OFPPMFC_EPERM'}
if (self.code in value_name_map):
q.text(('%s(%d)' % (value_name_map[self.code], self.code)))
else:
q.text(('%#x' % self.code))
q.text(',')
q.breakable()
q.text('data = ')
q.pp(self.data)
q.breakable()
q.text('}') |
def _getAssetPath():
if (not XDHqSHRD.isDev()):
throw('Should only be called in DEV context!!!')
return os.path.join(os.path.realpath(os.path.join(os.environ['Q37_EPEIOS'].replace('h:', '/cygdrive/h'), 'tools/xdhq/examples/common/')), os.path.relpath(os.getcwd(), os.path.realpath(os.path.join(os.environ['Q37_EPEIOS'].replace('h:', '/cygdrive/h'), 'tools/xdhq/examples/PYH/')))) |
class OptionSeriesNetworkgraphSonificationContexttracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TrickyRadialGradientTest():
def circle_inside_circle(c0, r0, c1, r1, rounded=False):
if rounded:
return Circle(c0, r0).round().inside(Circle(c1, r1).round())
else:
return Circle(c0, r0).inside(Circle(c1, r1))
def round_start_circle(self, c0, r0, c1, r1, inside=True):
assert (self.circle_inside_circle(c0, r0, c1, r1) is inside)
assert (self.circle_inside_circle(c0, r0, c1, r1, rounded=True) is not inside)
r = round_start_circle_stable_containment(c0, r0, c1, r1)
assert (self.circle_inside_circle(r.centre, r.radius, c1, r1, rounded=True) is inside)
return (r.centre, r.radius)
def test_noto_emoji_mosquito_u1f99f(self):
c0 = (385.23508, 70.)
r0 = 0
c1 = (642.99108, 104.)
r1 = 260.0072
assert (self.round_start_circle(c0, r0, c1, r1, inside=True) == ((386, 71), 0))
def test_noto_emoji_horns_sign_u1f918_1f3fc(self):
c0 = ((- 437.), (- 2116.))
r0 = 0.0
c1 = ((- 488.), (- 1876.))
r1 = 245.
assert self.circle_inside_circle(c0, r0, c1, r1)
assert self.circle_inside_circle(c0, r0, c1, r1, rounded=True)
.parametrize('c0, r0, c1, r1, inside, expected', [((1.4, 0), 0, (2.6, 0), 1.3, True, ((2, 0), 0)), ((1, 0), 0.6, (2.8, 0), 2.45, True, ((2, 0), 1)), ((6.49, 6.49), 0, (0.49, 0.49), 8.49, True, ((5, 5), 0)), ((0, 0), 0, (2, 0), 1.5, False, (((- 1), 0), 0)), ((0, (- 0.5)), 0, (0, (- 2.5)), 1.5, False, ((0, 1), 0)), ((0.5, 0), 0, (9.4, 0), 8.8, False, (((- 1), 0), 0)), ((1.5, 1.5), 0, (0.49, 0.49), 1.49, True, ((0, 0), 0)), ((0.5000001, 0), 0.5000001, (0.499999, 0), 0.4999999, True, ((0, 0), 0)), ((0, 0), 1.49, (0, 0), 1, False, ((0, 0), 2))])
def test_nudge_start_circle_position(self, c0, r0, c1, r1, inside, expected):
assert (self.round_start_circle(c0, r0, c1, r1, inside) == expected) |
class SimpleEntity(Base):
__auto_name__ = True
__strictly_typed__ = False
__name_formatter__ = None
__tablename__ = 'SimpleEntities'
id = Column('id', Integer, primary_key=True)
entity_type = Column(String(128), nullable=False)
__mapper_args__ = {'polymorphic_on': entity_type, 'polymorphic_identity': 'SimpleEntity'}
name = Column(String(256), nullable=False, doc='Name of this object')
description = Column('description', Text, doc='Description of this object.')
created_by_id = Column('created_by_id', Integer, ForeignKey('Users.id', use_alter=True, name='xc'), doc='The id of the :class:`.User` who has created this entity.')
created_by = relationship('User', backref='entities_created', primaryjoin='SimpleEntity.created_by_id==User.user_id', post_update=True, doc='The :class:`.User` who has created this object.')
updated_by_id = Column('updated_by_id', Integer, ForeignKey('Users.id', use_alter=True, name='xu'), doc='The id of the :class:`.User` who has updated this entity.')
updated_by = relationship('User', backref='entities_updated', primaryjoin='SimpleEntity.updated_by_id==User.user_id', post_update=True, doc='The :class:`.User` who has updated this object.')
date_created = Column(GenericDateTime, default=functools.partial(datetime.datetime.now, pytz.utc), doc='A :class:`datetime.datetime` instance showing the creation date\n and time of this object.')
date_updated = Column(GenericDateTime, default=functools.partial(datetime.datetime.now, pytz.utc), doc='A :class:`datetime.datetime` instance showing the update date\n and time of this object.')
type_id = Column('type_id', Integer, ForeignKey('Types.id', use_alter=True, name='y'), doc='The id of the :class:`.Type` of this entity. Mainly used by\n SQLAlchemy to create a Many-to-One relates between SimpleEntities and\n Types.\n ')
type = relationship('Type', primaryjoin='SimpleEntities.c.type_id==Types.c.id', post_update=True, doc='The type of the object.\n\n It is a :class:`.Type` instance with a proper\n :attr:`.Type.target_entity_type`.\n ')
generic_data = relationship('SimpleEntity', secondary='SimpleEntity_GenericData', primaryjoin='SimpleEntities.c.id==SimpleEntity_GenericData.c.simple_entity_id', secondaryjoin='SimpleEntity_GenericData.c.other_simple_entity_id==SimpleEntities.c.id', post_update=True, doc='This attribute can hold any kind of data which exists in SOM.\n ')
generic_text = Column('generic_text', Text, doc='This attribute can hold any text.')
thumbnail_id = Column('thumbnail_id', Integer, ForeignKey('Links.id', use_alter=True, name='z'))
thumbnail = relationship('Link', primaryjoin='SimpleEntities.c.thumbnail_id==Links.c.id', post_update=True)
html_style = Column(String(64), nullable=True, default='')
html_class = Column(String(64), nullable=True, default='')
__stalker_version__ = Column('stalker_version', String(256))
def __init__(self, name=None, description='', generic_text='', type=None, created_by=None, updated_by=None, date_created=None, date_updated=None, thumbnail=None, html_style='', html_class='', **kwargs):
self._nice_name = ''
self.name = name
self.description = description
self.created_by = created_by
self.updated_by = updated_by
if (date_created is None):
date_created = datetime.datetime.now(pytz.utc)
if (date_updated is None):
date_updated = date_created
self.date_created = date_created
self.date_updated = date_updated
self.type = type
self.thumbnail = thumbnail
self.generic_text = generic_text
self.html_style = html_style
self.html_class = html_class
import stalker
self.__stalker_version__ = stalker.__version__
def __repr__(self):
return ('<%s (%s)>' % (self.name, self.entity_type))
def __eq__(self, other):
from stalker.db.session import DBSession
with DBSession.no_autoflush:
return (isinstance(other, SimpleEntity) and (self.name == other.name))
def __ne__(self, other):
return (not self.__eq__(other))
def __hash__(self):
return ((hash(self.id) + (2 * hash(self.name))) + (3 * hash(self.entity_type)))
('description')
def _validate_description(self, key, description):
if (description is None):
description = ''
from stalker import __string_types__
if (not isinstance(description, __string_types__)):
raise TypeError(('%s.description should be a string, not %s' % (self.__class__.__name__, description.__class__.__name__)))
return description
('generic_text')
def _validate_generic_text(self, key, generic_text):
if (generic_text is None):
generic_text = ''
from stalker import __string_types__
if (not isinstance(generic_text, __string_types__)):
raise TypeError(('%s.generic_text should be a string, not %s' % (self.__class__.__name__, generic_text.__class__.__name__)))
return generic_text
('name')
def _validate_name(self, key, name):
if self.__auto_name__:
if ((name is None) or (name == '')):
name = ('%s_%s' % (self.__class__.__name__, uuid.uuid4().urn.split(':')[2]))
if (name is None):
raise TypeError(('%s.name can not be None' % self.__class__.__name__))
from stalker import __string_types__
if (not isinstance(name, __string_types__)):
raise TypeError(('%s.name should be a string not %s' % (self.__class__.__name__, name.__class__.__name__)))
name = self._format_name(name)
if (name == ''):
raise ValueError(('%s.name can not be an empty string' % self.__class__.__name__))
self._nice_name = self._format_nice_name(name)
return name
def _format_name(cls, name_in):
name_in = name_in.strip()
name_in = re.sub('[\\s]+', ' ', name_in)
return name_in
def _format_nice_name(cls, nice_name_in):
nice_name_in = nice_name_in.strip()
nice_name_in = re.sub('([^a-zA-Z0-9\\s_\\-]+)', '', nice_name_in).strip()
nice_name_in = re.sub('(^[^a-zA-Z0-9]+)', '', nice_name_in)
nice_name_in = re.sub('[\\s]+', ' ', nice_name_in)
nice_name_in = re.sub('([ -])+', '_', nice_name_in)
nice_name_in = re.sub('([_]+)', '_', nice_name_in)
return nice_name_in
def nice_name(self):
self._nice_name = self._format_nice_name(self.name)
return self._nice_name
('created_by')
def _validate_created_by(self, key, created_by_in):
from stalker.models.auth import User
if (created_by_in is not None):
if (not isinstance(created_by_in, User)):
raise TypeError(('%s.created_by should be a stalker.models.auth.User instance, not %s' % (self.__class__.__name__, created_by_in.__class__.__name__)))
return created_by_in
('updated_by')
def _validate_updated_by(self, key, updated_by_in):
from stalker.models.auth import User
if (updated_by_in is None):
updated_by_in = self.created_by
if (updated_by_in is not None):
if (not isinstance(updated_by_in, User)):
raise TypeError(('%s.updated_by should be a stalker.models.auth.User instance, not %s' % (self.__class__.__name__, updated_by_in.__class__.__name__)))
return updated_by_in
('date_created')
def _validate_date_created(self, key, date_created_in):
if (date_created_in is None):
raise TypeError(('%s.date_created can not be None' % self.__class__.__name__))
if (not isinstance(date_created_in, datetime.datetime)):
raise TypeError(('%s.date_created should be a datetime.datetime instance' % self.__class__.__name__))
return date_created_in
('date_updated')
def _validate_date_updated(self, key, date_updated_in):
if (date_updated_in is None):
raise TypeError(('%s.date_updated can not be None' % self.__class__.__name__))
if (not isinstance(date_updated_in, datetime.datetime)):
raise TypeError(('%s.date_updated should be a datetime.datetime instance' % self.__class__.__name__))
if (date_updated_in < self.date_created):
raise ValueError(('%(class)s.date_updated could not be set to a date before %(class)s.date_created, try setting the ``date_created`` first.' % {'class': self.__class__.__name__}))
return date_updated_in
('type')
def _validate_type(self, key, type_in):
check_for_type_class = True
if ((not self.__strictly_typed__) and (type_in is None)):
check_for_type_class = False
if check_for_type_class:
from stalker.models.type import Type
if (not isinstance(type_in, Type)):
raise TypeError(('%s.type must be a stalker.models.type.Type instance, not %s' % (self.__class__.__name__, type_in)))
return type_in
('thumbnail')
def _validate_thumbnail(self, key, thumb):
if (thumb is not None):
from stalker import Link
if (not isinstance(thumb, Link)):
raise TypeError(('%s.thumbnail should be a stalker.models.link.Link instance, not %s' % (self.__class__.__name__, thumb.__class__.__name__)))
return thumb
def tjp_id(self):
return ('%s_%s' % (self.__class__.__name__, self.id))
def to_tjp(self):
raise NotImplementedError(('This property is not implemented in %s' % self.__class__.__name__))
('html_style')
def _validate_html_style(self, key, html_style):
if (html_style is None):
html_style = ''
from stalker import __string_types__
if (not isinstance(html_style, __string_types__)):
raise TypeError(('%s.html_style should be a basestring instance, not %s' % (self.__class__.__name__, html_style.__class__.__name__)))
return html_style
('html_class')
def _validate_html_class(self, key, html_class):
if (html_class is None):
html_class = ''
from stalker import __string_types__
if (not isinstance(html_class, __string_types__)):
raise TypeError(('%s.html_class should be a basestring instance, not %s' % (self.__class__.__name__, html_class.__class__.__name__)))
return html_class |
class OptionSeriesPolygonSonificationTracksMappingTremolo(Options):
def depth(self) -> 'OptionSeriesPolygonSonificationTracksMappingTremoloDepth':
return self._config_sub_data('depth', OptionSeriesPolygonSonificationTracksMappingTremoloDepth)
def speed(self) -> 'OptionSeriesPolygonSonificationTracksMappingTremoloSpeed':
return self._config_sub_data('speed', OptionSeriesPolygonSonificationTracksMappingTremoloSpeed) |
class OptionSeriesStreamgraphSonificationDefaultspeechoptions(Options):
def activeWhen(self) -> 'OptionSeriesStreamgraphSonificationDefaultspeechoptionsActivewhen':
return self._config_sub_data('activeWhen', OptionSeriesStreamgraphSonificationDefaultspeechoptionsActivewhen)
def language(self):
return self._config_get('en-US')
def language(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionSeriesStreamgraphSonificationDefaultspeechoptionsMapping':
return self._config_sub_data('mapping', OptionSeriesStreamgraphSonificationDefaultspeechoptionsMapping)
def pointGrouping(self) -> 'OptionSeriesStreamgraphSonificationDefaultspeechoptionsPointgrouping':
return self._config_sub_data('pointGrouping', OptionSeriesStreamgraphSonificationDefaultspeechoptionsPointgrouping)
def preferredVoice(self):
return self._config_get(None)
def preferredVoice(self, text: str):
self._config(text, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def type(self):
return self._config_get('speech')
def type(self, text: str):
self._config(text, js_type=False) |
def test_can_not_combine_not_same_expression(task):
arg_1_0 = Variable('arg1', Integer(32, True), None, False, Variable('arg1', Integer(32, True), 0, False, None))
arg_1_1 = Variable('arg1', Integer(32, True), None, False, Variable('c0', Integer(32, True), 0, False, None))
task.graph.add_nodes_from((vertices := [BasicBlock(0, [Branch(Condition(OperationType.equal, [arg_1_0, Constant(5, Integer(32, True))], CustomType('bool', 1)))]), BasicBlock(1, [Assignment(ListOperation([]), print_call('Wednesday', 1))]), BasicBlock(2, [Branch(Condition(OperationType.greater, [arg_1_0, Constant(5, Integer(32, True))], CustomType('bool', 1)))]), BasicBlock(3, [Assignment(arg_1_1, BinaryOperation(OperationType.plus, [arg_1_0, Constant(2, Integer(32, True))], Integer(32, True))), Branch(Condition(OperationType.equal, [arg_1_1, Constant(6, Integer(32, True))], CustomType('bool', 1)))]), BasicBlock(4, [Branch(Condition(OperationType.equal, [arg_1_0, Constant(1, Integer(32, True))], CustomType('bool', 1)))]), BasicBlock(5, [Assignment(ListOperation([]), print_call('Saturday', 5))]), BasicBlock(6, [Branch(Condition(OperationType.greater, [arg_1_1, Constant(6, Integer(32, True))], CustomType('bool', 1)))]), BasicBlock(7, [Assignment(ListOperation([]), print_call('Monday', 2))]), BasicBlock(8, [Branch(Condition(OperationType.equal, [arg_1_0, Constant(3, Integer(32, True))], CustomType('bool', 1)))]), BasicBlock(9, [Return(ListOperation([Constant(0, Integer(32, True))]))]), BasicBlock(10, [Branch(Condition(OperationType.equal, [arg_1_1, Constant(2, Integer(32, True))], CustomType('bool', 1)))]), BasicBlock(11, [Assignment(ListOperation([]), print_call('Tuesday', 3))]), BasicBlock(12, [Assignment(ListOperation([]), print_call('Thursday', 6))]), BasicBlock(13, [Branch(Condition(OperationType.equal, [arg_1_1, Constant(4, Integer(32, True))], CustomType('bool', 1)))]), BasicBlock(14, [Assignment(ListOperation([]), print_call('Friday', 7))])]))
task.graph.add_edges_from([TrueCase(vertices[0], vertices[1]), FalseCase(vertices[0], vertices[2]), UnconditionalEdge(vertices[1], vertices[3]), FalseCase(vertices[2], vertices[4]), TrueCase(vertices[2], vertices[3]), TrueCase(vertices[3], vertices[5]), FalseCase(vertices[3], vertices[6]), TrueCase(vertices[4], vertices[7]), FalseCase(vertices[4], vertices[8]), UnconditionalEdge(vertices[5], vertices[9]), FalseCase(vertices[6], vertices[10]), TrueCase(vertices[6], vertices[9]), UnconditionalEdge(vertices[7], vertices[3]), TrueCase(vertices[8], vertices[11]), FalseCase(vertices[8], vertices[3]), TrueCase(vertices[10], vertices[12]), FalseCase(vertices[10], vertices[13]), UnconditionalEdge(vertices[11], vertices[3]), UnconditionalEdge(vertices[12], vertices[9]), TrueCase(vertices[13], vertices[14]), FalseCase(vertices[13], vertices[9]), UnconditionalEdge(vertices[14], vertices[9])])
PatternIndependentRestructuring().run(task)
assert (isinstance((seq_node := task._ast.root), SeqNode) and (len(seq_node.children) == 4))
assert isinstance((switch1 := seq_node.children[0]), SwitchNode)
assert (isinstance(seq_node.children[1], CodeNode) and (seq_node.children[1].instructions == vertices[3].instructions[:(- 1)]))
assert isinstance((switch2 := seq_node.children[2]), SwitchNode)
assert (isinstance(seq_node.children[3], CodeNode) and (seq_node.children[3].instructions == vertices[9].instructions))
assert ((switch1.expression == arg_1_0) and (len(switch1.children) == 3))
assert (isinstance((case1 := switch1.cases[0]), CaseNode) and (case1.constant == Constant(1, Integer(32, True))) and case1.break_case)
assert (isinstance((case2 := switch1.cases[1]), CaseNode) and (case2.constant == Constant(3, Integer(32, True))) and case2.break_case)
assert (isinstance((case3 := switch1.cases[2]), CaseNode) and (case3.constant == Constant(5, Integer(32, True))) and case3.break_case)
assert (isinstance(case1.child, CodeNode) and (case1.child.instructions == vertices[7].instructions))
assert (isinstance(case2.child, CodeNode) and (case2.child.instructions == vertices[11].instructions))
assert (isinstance(case3.child, CodeNode) and (case3.child.instructions == vertices[1].instructions))
assert ((switch2.expression == arg_1_1) and (len(switch1.children) == 3))
assert (isinstance((case1 := switch2.cases[0]), CaseNode) and (case1.constant == Constant(2, Integer(32, True))) and case1.break_case)
assert (isinstance((case2 := switch2.cases[1]), CaseNode) and (case2.constant == Constant(4, Integer(32, True))) and case2.break_case)
assert (isinstance((case3 := switch2.cases[2]), CaseNode) and (case3.constant == Constant(6, Integer(32, True))) and case3.break_case)
assert (isinstance(case1.child, CodeNode) and (case1.child.instructions == vertices[12].instructions))
assert (isinstance(case2.child, CodeNode) and (case2.child.instructions == vertices[14].instructions))
assert (isinstance(case3.child, CodeNode) and (case3.child.instructions == vertices[5].instructions)) |
class DirectoryEditorDemo(HasTraits):
dir_name = Directory()
dir_group = Group(Item('dir_name', style='simple', label='Simple'), Item('_'), Item('dir_name', style='custom', label='Custom'), Item('_'), Item('dir_name', style='text', label='Text'), Item('_'), Item('dir_name', style='readonly', label='ReadOnly'))
traits_view = View(dir_group, title='DirectoryEditor', width=400, height=600, buttons=['OK'], resizable=True) |
def get_bend_inds(coords3d, bond_inds, min_deg, max_deg, logger=None):
bond_sets = {frozenset(bi) for bi in bond_inds}
bend_inds = list()
for (bond_set1, bond_set2) in it.combinations(bond_sets, 2):
union = (bond_set1 | bond_set2)
if (len(union) == 3):
(indices, _) = sort_by_central(bond_set1, bond_set2)
if (not bend_valid(coords3d, indices, min_deg, max_deg)):
log(logger, f'Bend {indices} is not valid!')
continue
bend_inds.append(indices)
return bend_inds |
class TlsBulkCertificatesResponseAllOf(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'data': ([TlsBulkCertificateResponseData],)}
_property
def discriminator():
return None
attribute_map = {'data': 'data'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class OptionSeriesSplineSonificationContexttracksMappingTremolo(Options):
def depth(self) -> 'OptionSeriesSplineSonificationContexttracksMappingTremoloDepth':
return self._config_sub_data('depth', OptionSeriesSplineSonificationContexttracksMappingTremoloDepth)
def speed(self) -> 'OptionSeriesSplineSonificationContexttracksMappingTremoloSpeed':
return self._config_sub_data('speed', OptionSeriesSplineSonificationContexttracksMappingTremoloSpeed) |
class TestIlluminaRunInfo(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
def tearDown(self):
try:
os.rmdir(self.tmpdir)
except Exception:
pass
def test_illuminaruninfo(self):
run_info_xml = os.path.join(self.tmpdir, 'RunInfo.xml')
with open(run_info_xml, 'wt') as fp:
fp.write(RunInfoXml.create(run_name='151125_NB500968_0003_-ABCDE1XX', bases_mask='y101,I8,I8,y101', nlanes=8, tilecount=16))
run_info = IlluminaRunInfo(run_info_xml)
self.assertEqual(run_info.run_id, '151125_NB500968_0003_-ABCDE1XX')
self.assertEqual(run_info.date, '151125')
self.assertEqual(run_info.instrument, 'NB500968')
self.assertEqual(run_info.run_number, '0003')
self.assertEqual(run_info.flowcell, '-ABCDE1XX')
self.assertEqual(run_info.lane_count, '8')
self.assertEqual(run_info.bases_mask, 'y101,I8,I8,y101')
self.assertEqual(len(run_info.reads), 4)
self.assertEqual(run_info.reads[0]['number'], '1')
self.assertEqual(run_info.reads[0]['num_cycles'], '101')
self.assertEqual(run_info.reads[0]['is_indexed_read'], 'N')
self.assertEqual(run_info.reads[1]['number'], '2')
self.assertEqual(run_info.reads[1]['num_cycles'], '8')
self.assertEqual(run_info.reads[1]['is_indexed_read'], 'Y')
self.assertEqual(run_info.reads[2]['number'], '3')
self.assertEqual(run_info.reads[2]['num_cycles'], '8')
self.assertEqual(run_info.reads[2]['is_indexed_read'], 'Y')
self.assertEqual(run_info.reads[3]['number'], '4')
self.assertEqual(run_info.reads[3]['num_cycles'], '101')
self.assertEqual(run_info.reads[3]['is_indexed_read'], 'N') |
class ETHPeerFactory(BaseChainPeerFactory):
peer_class = ETHPeer
async def get_handshakers(self) -> Tuple[(HandshakerAPI[Any], ...)]:
headerdb = self.context.headerdb
head = (await headerdb.coro_get_canonical_head())
total_difficulty = (await headerdb.coro_get_score(head.hash))
genesis_hash = (await headerdb.coro_get_canonical_block_hash(BlockNumber(GENESIS_BLOCK_NUMBER)))
handshake_v63_params = StatusV63Payload(head_hash=head.hash, total_difficulty=total_difficulty, genesis_hash=genesis_hash, network_id=self.context.network_id, version=ETHProtocolV63.version)
fork_blocks = forkid.extract_fork_blocks(self.context.vm_configuration)
our_forkid = forkid.make_forkid(genesis_hash, head.block_number, fork_blocks)
handshake_params = StatusPayload(head_hash=head.hash, total_difficulty=total_difficulty, genesis_hash=genesis_hash, network_id=self.context.network_id, version=ETHProtocolV65.version, fork_id=our_forkid)
highest_eth_protocol = max(self.peer_class.supported_sub_protocols, key=operator.attrgetter('version'))
eth_handshakers = (ETHV63Handshaker(handshake_v63_params), ETHHandshaker(handshake_params, head.block_number, fork_blocks, highest_eth_protocol))
return (eth_handshakers + self._get_wit_handshakers())
def _get_wit_handshakers(self) -> Tuple[(HandshakerAPI[Any], ...)]:
return (WitnessHandshaker(),) |
class OptionSeriesStreamgraphSonificationTracksMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def app_no_client():
class HelloHandler(tornado.web.RequestHandler):
def get(self):
with async_capture_span('test'):
pass
return self.write('Hello, world')
post = get
class RenderHandler(tornado.web.RequestHandler):
def get(self):
with async_capture_span('test'):
pass
items = ['Item 1', 'Item 2', 'Item 3']
return self.render('test.html', title='Testing so hard', items=items)
class BoomHandler(tornado.web.RequestHandler):
def get(self):
raise tornado.web.HTTPError()
post = get
app = tornado.web.Application([('/', HelloHandler), ('/boom', BoomHandler), ('/render', RenderHandler)], template_path=os.path.join(os.path.dirname(__file__), 'templates'))
return app |
class EstimatorMixin(object):
def fit(self, X, y, proba=False, refit=True):
if hasattr(self, '_build'):
self._build()
run(get_backend(self), 'fit', X, y, proba=proba, refit=refit, return_preds=False)
return self
def fit_transform(self, X, y, proba=False, refit=True):
if hasattr(self, '_build'):
self._build()
return run(get_backend(self), 'fit', X, y, proba=proba, refit=refit, return_preds=True)
def predict(self, X, proba=False):
if hasattr(self, '__fitted__'):
if (not self.__fitted__):
raise NotFittedError('Instance not fitted (with current params).')
return run(get_backend(self), 'predict', X, proba=proba, return_preds=True)
def transform(self, X, proba=False):
if hasattr(self, '__fitted__'):
if (not self.__fitted__):
raise NotFittedError('Instance not fitted (with current params).')
return run(get_backend(self), 'transform', X, proba=proba, return_preds=True) |
def lazy_import():
from fastly.model.relationship_tls_certificate import RelationshipTlsCertificate
from fastly.model.relationship_tls_certificate_tls_certificate import RelationshipTlsCertificateTlsCertificate
from fastly.model.relationship_tls_configuration import RelationshipTlsConfiguration
from fastly.model.relationship_tls_configuration_tls_configuration import RelationshipTlsConfigurationTlsConfiguration
from fastly.model.relationship_tls_domain import RelationshipTlsDomain
from fastly.model.relationship_tls_domain_tls_domain import RelationshipTlsDomainTlsDomain
globals()['RelationshipTlsCertificate'] = RelationshipTlsCertificate
globals()['RelationshipTlsCertificateTlsCertificate'] = RelationshipTlsCertificateTlsCertificate
globals()['RelationshipTlsConfiguration'] = RelationshipTlsConfiguration
globals()['RelationshipTlsConfigurationTlsConfiguration'] = RelationshipTlsConfigurationTlsConfiguration
globals()['RelationshipTlsDomain'] = RelationshipTlsDomain
globals()['RelationshipTlsDomainTlsDomain'] = RelationshipTlsDomainTlsDomain |
def test_get_manifest_from_ipfs():
path = _get_data_folder().joinpath('ethpm/zeppelin.snakecharmers.eth')
if path.exists():
shutil.rmtree(path)
ethpm.get_manifest('ethpm://zeppelin.snakecharmers.eth:1/.0.0')
assert _get_data_folder().joinpath('ethpm/zeppelin.snakecharmers.eth').exists()
ethpm.get_manifest('ethpm://zeppelin.snakecharmers.eth:1/.0.0')
assert _get_data_folder().joinpath('ethpm/zeppelin.snakecharmers.eth').exists() |
class SDBlock2MemDMA(LiteXModule):
def __init__(self, bus, endianness, fifo_depth=512):
self.bus = bus
self.sink = stream.Endpoint([('data', 8)])
self.irq = Signal()
fifo = stream.SyncFIFO([('data', 8)], fifo_depth, buffered=True)
converter = stream.Converter(8, bus.data_width, reverse=True)
self.submodules += (fifo, converter)
self.dma = WishboneDMAWriter(bus, with_csr=True, endianness=endianness)
start = Signal()
connect = Signal()
self.comb += start.eq((self.sink.valid & self.sink.first))
self.sync += [If((~ self.dma._enable.storage), connect.eq(0)).Elif(start, connect.eq(1))]
self.comb += [If((self.dma._enable.storage & (start | connect)), self.sink.connect(fifo.sink)).Else(self.sink.ready.eq(1)), fifo.source.connect(converter.sink), converter.source.connect(self.dma.sink)]
done_d = Signal()
self.sync += done_d.eq(self.dma._done.status)
self.sync += self.irq.eq((self.dma._done.status & (~ done_d))) |
def test_affine_index_range():
def bar():
for i in seq(0, 6):
for j in seq(0, (i + 2)):
pass
e = bar.find('for j in _:_').hi()._impl._node
i_sym = bar.find('for i in _:_')._impl._node.iter
e_range = index_range_analysis(e, {i_sym: (0, 5)})
assert (e_range == (2, 7)) |
(scope='function')
def mailchimp_transactional_connection_config_no_secrets(db: session, mailchimp_transactional_config) -> Generator:
fides_key = mailchimp_transactional_config['fides_key']
connection_config = ConnectionConfig.create(db=db, data={'key': fides_key, 'name': fides_key, 'connection_type': ConnectionType.saas, 'access': AccessLevel.write, 'secrets': {'api_key': 'test'}, 'saas_config': mailchimp_transactional_config})
(yield connection_config)
connection_config.delete(db) |
def constrained_array(dtype: type=None, ndim: int=None, shape: Tuple[(pydantic.NonNegativeInt, ...)]=None) -> type:
type_name = 'ArrayLike'
meta_args = []
if (dtype is not None):
meta_args.append(f'dtype={dtype.__name__}')
if (ndim is not None):
meta_args.append(f'ndim={ndim}')
if (shape is not None):
meta_args.append(f'shape={shape}')
type_name += (('[' + ', '.join(meta_args)) + ']')
return type(type_name, (ArrayLike,), dict(dtype=dtype, ndim=ndim, shape=shape)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.