code stringlengths 281 23.7M |
|---|
class DefaultGuest(DefaultAccount):
def create(cls, **kwargs):
return cls.authenticate(**kwargs)
def authenticate(cls, **kwargs):
errors = []
account = None
username = None
ip = kwargs.get('ip', '').strip()
if (not settings.GUEST_ENABLED):
errors.append(_('Guest accounts are not enabled on this server.'))
return (None, errors)
try:
for name in settings.GUEST_LIST:
if (not AccountDB.objects.filter(username__iexact=name).exists()):
username = name
break
if (not username):
errors.append(_('All guest accounts are in use. Please try again later.'))
if ip:
LOGIN_THROTTLE.update(ip, 'Too many requests for Guest access.')
return (None, errors)
else:
password = ('%016x' % getrandbits(64))
home = settings.GUEST_HOME
permissions = settings.PERMISSION_GUEST_DEFAULT
typeclass = settings.BASE_GUEST_TYPECLASS
(account, errs) = super(DefaultGuest, cls).create(guest=True, username=username, password=password, permissions=permissions, typeclass=typeclass, home=home, ip=ip)
errors.extend(errs)
if (not account.characters):
(character, errs) = account.create_character()
if errs:
errors.extend(errs)
return (account, errors)
except Exception:
errors.append(_('An error occurred. Please e-mail an admin if the problem persists.'))
logger.log_trace()
return (None, errors)
return (account, errors)
def at_post_login(self, session=None, **kwargs):
self._send_to_connect_channel(_('|G{key} connected|n').format(key=self.key))
self.puppet_object(session, self.db._last_puppet)
def at_server_shutdown(self):
super().at_server_shutdown()
for character in self.characters:
character.delete()
def at_post_disconnect(self, **kwargs):
super().at_post_disconnect()
for character in self.characters:
character.delete()
self.delete() |
class OptionPlotoptionsParetoSonificationDefaultinstrumentoptionsActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
.django_db
def test_date_range(award_data_fixture, elasticsearch_award_index):
elasticsearch_award_index.update_index()
should = {'bool': {'should': [{'range': {'action_date': {'gte': '2010-10-01'}}}, {'range': {'date_signed': {'lte': '2011-09-30'}}}], 'minimum_should_match': 2}}
query = create_query(should)
client = elasticsearch_award_index.client
response = client.search(index=elasticsearch_award_index.index_name, body=query)
assert (response['hits']['total']['value'] == 1)
should = {'bool': {'should': [{'range': {'action_date': {'gte': '2011-10-01'}}}, {'range': {'date_signed': {'lte': '2012-09-30'}}}], 'minimum_should_match': 2}}
query = create_query(should)
response = client.search(index=elasticsearch_award_index.index_name, body=query)
assert (response['hits']['total']['value'] == 0) |
def get_geoms(images=KWARGS['images']):
initial = 'xyz_files/hcn.xyz'
ts_guess = 'xyz_files/hcn_iso_ts.xyz'
final = 'xyz_files/nhc.xyz'
xyz_fns = (initial, ts_guess, final)
atoms_coords = [parse_xyz_file(fn) for fn in xyz_fns]
geoms = [Geometry(atoms, coords.flatten()) for (atoms, coords) in atoms_coords]
geoms = idpp_interpolate(geoms, images_between=images)
return (geoms, copy.copy(KWARGS)) |
.django_db
class BaseConversationTagsTestCase(object):
(autouse=True)
def setup(self):
self.loadstatement = '{% load forum_conversation_tags %}'
self.request_factory = RequestFactory()
self.g1 = GroupFactory.create()
self.u1 = UserFactory.create()
self.u2 = UserFactory.create()
self.u1.groups.add(self.g1)
self.u2.groups.add(self.g1)
self.moderators = GroupFactory.create()
self.moderator = UserFactory.create()
self.moderator.groups.add(self.moderators)
self.superuser = UserFactory.create(is_superuser=True)
self.perm_handler = PermissionHandler()
self.top_level_cat = create_category_forum()
self.forum_1 = create_forum(parent=self.top_level_cat)
self.forum_2 = create_forum(parent=self.top_level_cat)
self.forum_1_topic = create_topic(forum=self.forum_1, poster=self.u1)
self.forum_2_topic = create_topic(forum=self.forum_2, poster=self.u2)
self.forum_3_topic = create_topic(forum=self.forum_2, poster=self.u2)
self.post_1 = PostFactory.create(topic=self.forum_1_topic, poster=self.u1)
self.post_2 = PostFactory.create(topic=self.forum_2_topic, poster=self.u2)
self.post_3 = PostFactory.create(topic=self.forum_3_topic, poster=self.u2)
assign_perm('can_see_forum', self.g1, self.forum_1)
assign_perm('can_read_forum', self.g1, self.forum_1)
assign_perm('can_edit_own_posts', self.g1, self.forum_1)
assign_perm('can_delete_own_posts', self.g1, self.forum_1)
assign_perm('can_reply_to_topics', self.g1, self.forum_1)
assign_perm('can_see_forum', self.moderators, self.forum_1)
assign_perm('can_read_forum', self.moderators, self.forum_1)
assign_perm('can_edit_own_posts', self.moderators, self.forum_1)
assign_perm('can_delete_own_posts', self.moderators, self.forum_1)
assign_perm('can_edit_posts', self.moderators, self.forum_1)
assign_perm('can_delete_posts', self.moderators, self.forum_1)
def get_request(self, url='/'):
request = self.request_factory.get('/')
middleware = SessionMiddleware((lambda r: HttpResponse('Response')))
middleware.process_request(request)
request.session.save()
return request |
def extractUnderworldersBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_task_set_system_controlsts_exclusively(task_definition):
assert (len(task_definition.containers[0]['systemControls']) == 1)
assert ('net.core.somaxconn' == task_definition.containers[0]['systemControls'][0]['namespace'])
task_definition.set_system_controls(((u'webserver', u'net.ipv4.ip_forward', u'1'),), exclusive=True)
assert (len(task_definition.containers[0]['systemControls']) == 1)
assert ('net.ipv4.ip_forward' == task_definition.containers[0]['systemControls'][0]['namespace'])
assert ({'namespace': 'net.ipv4.ip_forward', 'value': '1'} in task_definition.containers[0]['systemControls']) |
class ObjectClassFinancialSpendingViewSet(CachedDetailViewSet):
endpoint_doc = 'usaspending_api/api_contracts/contracts/v2/financial_spending/major_object_class.md'
serializer_class = ObjectClassFinancialSpendingSerializer
def get_queryset(self):
json_request = self.request.query_params
fiscal_year = json_request.get('fiscal_year', None)
funding_agency_id = json_request.get('funding_agency_id', None)
if (not (fiscal_year and funding_agency_id)):
raise InvalidParameterException('Missing one or more required query parameters: fiscal_year, funding_agency_id')
toptier_agency = Agency.objects.filter(id=funding_agency_id).first()
if (toptier_agency is None):
return FinancialAccountsByProgramActivityObjectClass.objects.none()
toptier_agency = toptier_agency.toptier_agency
submission_queryset = SubmissionAttributes.objects.all()
submission_queryset = submission_queryset.filter(toptier_code=toptier_agency.toptier_code, reporting_fiscal_year=fiscal_year).order_by('-reporting_fiscal_year', '-reporting_fiscal_quarter').annotate(fiscal_year=F('reporting_fiscal_year'), fiscal_quarter=F('reporting_fiscal_quarter'))
submission = submission_queryset.first()
if (submission is None):
return FinancialAccountsByProgramActivityObjectClass.objects.none()
active_fiscal_year = submission.reporting_fiscal_year
active_fiscal_quarter = submission.fiscal_quarter
queryset = FinancialAccountsByProgramActivityObjectClass.objects.filter(submission__reporting_fiscal_year=active_fiscal_year, submission__reporting_fiscal_quarter=active_fiscal_quarter, treasury_account__funding_toptier_agency=toptier_agency, submission__is_final_balances_for_fy=True).annotate(major_object_class_name=Case(When(object_class__major_object_class='00', then=Value('Unknown Object Type')), default='object_class__major_object_class_name', output_field=TextField()), major_object_class_code=F('object_class__major_object_class')).values('major_object_class_name', 'major_object_class_code').annotate(obligated_amount=Sum('obligations_incurred_by_program_object_class_cpe')).order_by('major_object_class_code')
return queryset |
def metrics_store(cfg, read_only=True, track=None, challenge=None, car=None, meta_info=None):
cls = metrics_store_class(cfg)
store = cls(cfg=cfg, meta_info=meta_info)
logging.getLogger(__name__).info('Creating %s', str(store))
race_id = cfg.opts('system', 'race.id')
race_timestamp = cfg.opts('system', 'time.start')
selected_car = (cfg.opts('mechanic', 'car.names') if (car is None) else car)
store.open(race_id, race_timestamp, track, challenge, selected_car, create=(not read_only))
return store |
class FullTextProcessorConfig(NamedTuple):
extract_front: bool = True
extract_authors: bool = True
extract_affiliations: bool = True
extract_body_sections: bool = True
extract_acknowledgements: bool = True
extract_back_sections: bool = True
extract_references: bool = True
extract_citation_fields: bool = True
extract_citation_authors: bool = True
extract_citation_editors: bool = False
extract_figure_fields: bool = True
extract_table_fields: bool = True
merge_raw_authors: bool = False
extract_graphic_bounding_boxes: bool = True
extract_graphic_assets: bool = False
use_cv_model: bool = False
cv_render_dpi: float = DEFAULT_PDF_RENDER_DPI
use_ocr_model: bool = False
replace_text_by_cv_graphic: bool = False
max_graphic_distance: float = DEFAULT_MAX_GRAPHIC_DISTANCE
def from_app_config(app_config: AppConfig) -> 'FullTextProcessorConfig':
return FullTextProcessorConfig()._replace(**app_config.get('processors', {}).get('fulltext', {}))
def get_for_requested_field_names(self, request_field_names: Set[str]) -> 'FullTextProcessorConfig':
if (not request_field_names):
return self
remaining_field_names = ((request_field_names - FRONT_FIELDS) - {RequestFieldNames.REFERENCES})
if remaining_field_names:
return self
extract_front = bool((FRONT_FIELDS & request_field_names))
extract_authors = (RequestFieldNames.AUTHORS in request_field_names)
extract_affiliations = (RequestFieldNames.AFFILIATIONS in request_field_names)
extract_references = (RequestFieldNames.REFERENCES in request_field_names)
return self._replace(extract_front=extract_front, extract_authors=extract_authors, extract_affiliations=extract_affiliations, extract_body_sections=False, extract_acknowledgements=False, extract_back_sections=False, extract_references=extract_references, extract_graphic_bounding_boxes=False)
def get_for_header_document(self) -> 'FullTextProcessorConfig':
return self.get_for_requested_field_names(FRONT_FIELDS) |
class Tokenizer(object):
_pyopmap = {'(': tokenize.LPAR, ')': tokenize.RPAR, '[': tokenize.LSQB, ']': tokenize.RSQB, ':': tokenize.COLON, ',': tokenize.COMMA, ';': tokenize.SEMI, '+': tokenize.PLUS, '+=': tokenize.PLUSEQUAL, '-': tokenize.MINUS, '-=': tokenize.MINEQUAL, '*': tokenize.STAR, '**': tokenize.DOUBLESTAR, '**=': tokenize.DOUBLESTAREQUAL, '*=': tokenize.STAREQUAL, '/': tokenize.SLASH, '//': tokenize.DOUBLESLASH, '//=': tokenize.DOUBLESLASHEQUAL, '/=': tokenize.SLASHEQUAL, '|': tokenize.VBAR, '|=': tokenize.VBAREQUAL, '&': tokenize.AMPER, '&=': tokenize.AMPEREQUAL, '<': tokenize.LESS, '<=': tokenize.LESSEQUAL, '<<': tokenize.LEFTSHIFT, '<<=': tokenize.LEFTSHIFTEQUAL, '>': tokenize.GREATER, '>=': tokenize.GREATEREQUAL, '>>': tokenize.RIGHTSHIFT, '>>=': tokenize.RIGHTSHIFTEQUAL, '=': tokenize.EQUAL, '==': tokenize.EQEQUAL, '.': tokenize.DOT, '%': tokenize.PERCENT, '%=': tokenize.PERCENTEQUAL, '{': tokenize.LBRACE, '}': tokenize.RBRACE, '^': tokenize.CIRCUMFLEX, '^=': tokenize.CIRCUMFLEXEQUAL, '~': tokenize.TILDE, '!=': tokenize.NOTEQUAL, '<>': tokenize.NOTEQUAL, '': tokenize.AT}
def __init__(self, python=True, opmap={}, skip=None, **extra):
self._python = python
self._opmap = opmap.copy()
if python:
self.opmap = self._pyopmap.copy()
self.opmap.update(opmap)
else:
self.opmap = opmap.copy()
self.tok_name = {}
self._extra = {}
if python:
for (kind, name) in tokenize.tok_name.items():
self.tok_name[kind] = name
setattr(self, name, kind)
if (not hasattr(self, 'NT_OFFSET')):
self.NT_OFFSET = 256
last = max((n for n in self.tok_name if (n != self.NT_OFFSET)))
for (shift, (name, txt)) in enumerate(sorted(extra.items())):
kind = (last + shift)
if (kind >= self.NT_OFFSET):
raise TypeError('too many new tokens')
self.tok_name[kind] = name
setattr(self, name, kind)
self._extra[txt] = kind
self.opmap.update(self._extra)
if (skip is None):
skip = [self.COMMENT, self.NL]
self._skip = set(skip)
def __repr__(self):
args = []
if (not self._python):
args.append(('python=%s' % self._python))
if self._opmap:
args.append(('opmap=%r' % self._opmap))
if (self._skip != set([self.COMMENT, self.NL])):
args.append(('skip=%r' % list(self._skip)))
args.extend((('%s=%r' % (self.tok_name[kind], txt)) for (txt, kind) in self._extra.items()))
return ('%s(%s)' % (self.__class__.__name__, ', '.join(args)))
def tokenize(self, stream):
self.infile = stream
self.last = None
self.lines = []
def readline():
self.lines.append(stream.readline())
return self.lines[(- 1)]
err = self.ERRORTOKEN
for token in tokenize.generate_tokens(readline):
if (token[0] == err):
try:
token = ((self._extra[token[1]],) + token[1:])
except:
raise ParseError(Token(token, self))
elif (token[0] in self._skip):
try:
self.skip_token(Token(token, self))
except:
pass
continue
elif (token[0] == self.OP):
token = ((self.opmap[token[1]],) + token[1:])
self.last = Token(token, self)
(yield self.last)
def skip_token(self, token):
pass |
def test_cube_attr_mean_two_surfaces_multiattr(tmpdir, load_cube_rsgy1, generate_plot):
xs1 = xtgeo.surface_from_file(RTOP1)
xs2 = xtgeo.surface_from_file(RBAS1)
kube = load_cube_rsgy1
xss = xs1.copy()
xss.slice_cube_window(kube, other=xs2, other_position='below', attribute='rms', sampling='trilinear', showprogress=True)
xsx = xs1.copy()
attrs = xsx.slice_cube_window(kube, other=xs2, other_position='below', attribute=['max', 'mean', 'min', 'rms'], sampling='trilinear', showprogress=True)
assert (xss.values.mean() == attrs['rms'].values.mean())
assert (xss.values.std() == attrs['rms'].values.std())
for attr in attrs:
xxx = attrs[attr]
xxx.to_file(join(tmpdir, (('surf_slice_cube_2surf_' + attr) + 'multi.gri')))
minmax = (None, None)
if (attr == 'mean'):
minmax = ((- 0.1), 0.1)
if generate_plot:
xxx.quickplot(filename=join(tmpdir, (('surf_slice_cube_2surf_' + attr) + 'multi.png')), colormap='jet', minmax=minmax, title=('Reek two surfs mean multiattr: ' + attr), infotext=('Method: trilinear, 2 surfs multiattr ' + attr)) |
class aggregate_stats_reply(stats_reply):
version = 5
type = 19
stats_type = 2
def __init__(self, xid=None, flags=None, packet_count=None, byte_count=None, flow_count=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (packet_count != None):
self.packet_count = packet_count
else:
self.packet_count = 0
if (byte_count != None):
self.byte_count = byte_count
else:
self.byte_count = 0
if (flow_count != None):
self.flow_count = flow_count
else:
self.flow_count = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(struct.pack('!Q', self.packet_count))
packed.append(struct.pack('!Q', self.byte_count))
packed.append(struct.pack('!L', self.flow_count))
packed.append(('\x00' * 4))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = aggregate_stats_reply()
_version = reader.read('!B')[0]
assert (_version == 5)
_type = reader.read('!B')[0]
assert (_type == 19)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 2)
obj.flags = reader.read('!H')[0]
reader.skip(4)
obj.packet_count = reader.read('!Q')[0]
obj.byte_count = reader.read('!Q')[0]
obj.flow_count = reader.read('!L')[0]
reader.skip(4)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.packet_count != other.packet_count):
return False
if (self.byte_count != other.byte_count):
return False
if (self.flow_count != other.flow_count):
return False
return True
def pretty_print(self, q):
q.text('aggregate_stats_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REPLY_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('packet_count = ')
q.text(('%#x' % self.packet_count))
q.text(',')
q.breakable()
q.text('byte_count = ')
q.text(('%#x' % self.byte_count))
q.text(',')
q.breakable()
q.text('flow_count = ')
q.text(('%#x' % self.flow_count))
q.breakable()
q.text('}') |
.skipif((sys.platform in {'win32', 'linux'}), reason='macOS specific test')
def test_macos_open_privacy_settings_logs_exception(monkeypatch, caplog):
def mocked_run(*_, **__):
raise ValueError("Mocked exception on 'open' call")
monkeypatch.setattr(subprocess, 'run', mocked_run)
with caplog.at_level(logging.ERROR):
permissions._macos_open_privacy_settings()
assert ("couldn't open" in caplog.text.lower()) |
def upload_csv(resource_name, data, gcs_upload_path):
try:
with csv_writer.write_csv(resource_name, data, True) as csv_file:
LOGGER.info('CSV filename: %s', csv_file.name)
storage_client = StorageClient({})
storage_client.put_text_file(csv_file.name, gcs_upload_path)
except Exception:
LOGGER.exception('Unable to upload csv document to bucket %s:\n%s\n%s', gcs_upload_path, data, resource_name) |
def create_class_type(rid, type_desc):
if (type_desc == 'implementable'):
return ClassTypeImplementable()
if (type_desc == 'detailable'):
return ClassTypeDetailable()
if (type_desc == 'selected'):
return ClassTypeSelected()
raise RMTException(95, ("%s:class type invalid '%s'" % (rid, type_desc))) |
('ecs_deploy.cli.get_client')
def test_deploy(get_client, runner):
get_client.return_value = EcsTestClient('acces_key', 'secret_key')
result = runner.invoke(cli.deploy, (CLUSTER_NAME, SERVICE_NAME))
assert (result.exit_code == 0)
assert (not result.exception)
assert (u'Deploying based on task definition: test-task:1' in result.output)
assert (u'Successfully created revision: 2' in result.output)
assert (u'Successfully deregistered revision: 1' in result.output)
assert (u'Successfully changed task definition to: test-task:2' in result.output)
assert (u'Deployment successful' in result.output)
assert (u'Updating task definition' not in result.output) |
.parametrize('threshold', [0.3, 0.5, 0.8])
.parametrize('y_true, y_pred', [(binary_raw_inputs.target, binary_raw_inputs.preds), (binary_prob_inputs.target, binary_prob_inputs.preds)])
def test_accuracy(y_true, y_pred, threshold):
sk_preds = (y_pred.view((- 1)).numpy() >= threshold).astype(np.uint8)
sk_target = y_true.view((- 1)).numpy()
sk_score = accuracy_score(y_true=sk_target, y_pred=sk_preds)
torch_metric = Accuracy(threshold=threshold)
tm_score = torch_metric(y_pred, y_true)
assert_allclose(sk_score, tm_score) |
def test_matcher_regex_shape(en_vocab):
matcher = Matcher(en_vocab)
pattern = [{'SHAPE': {'REGEX': '^[^x]+$'}}]
matcher.add('NON_ALPHA', [pattern])
doc = Doc(en_vocab, words=['99', 'problems', '!'])
matches = matcher(doc)
assert (len(matches) == 2)
doc = Doc(en_vocab, words=['bye'])
matches = matcher(doc)
assert (len(matches) == 0) |
class Association(Base):
__tablename__ = 'association'
cve_id = Column(String(), ForeignKey('cves.cve_id'), primary_key=True)
cpe_id = Column(String(), ForeignKey('cpes.cpe_id'), primary_key=True)
version_start_including = Column(String())
version_start_excluding = Column(String())
version_end_including = Column(String())
version_end_excluding = Column(String())
cpe = relationship('Cpe', back_populates='cves')
cve = relationship('Cve', back_populates='cpes')
def __repr__(self) -> str:
return f'Association({(self.cve_id, self.cpe_id)})' |
class TestDemoDocs():
def setup_class(cls):
md_path = os.path.join(ROOT_DIR, 'docs', 'generic-skills-step-by-step.md')
code_blocks = extract_code_blocks(filepath=md_path, filter_='python')
cls.generic_seller = code_blocks[0:11]
cls.generic_buyer = code_blocks[11:len(code_blocks)]
def test_generic_seller_skill_behaviour(self):
path = Path(ROOT_DIR, 'packages', 'fetchai', 'skills', 'generic_seller', 'behaviours.py')
with open(path, 'r') as file:
python_code = file.read()
assert (self.generic_seller[0] in python_code), 'Code is not identical.'
def test_generic_seller_skill_handler(self):
path = Path(ROOT_DIR, 'packages', 'fetchai', 'skills', 'generic_seller', 'handlers.py')
with open(path, 'r') as file:
python_code = file.read()
for code_block in self.generic_seller[1:8]:
assert (code_block in python_code), 'Code is not identical.'
def test_generic_seller_skill_strategy(self):
path = Path(ROOT_DIR, 'packages', 'fetchai', 'skills', 'generic_seller', 'strategy.py')
with open(path, 'r') as file:
python_code = file.read()
for code_block in self.generic_seller[8:10]:
assert (code_block in python_code), 'Code is not identical.'
def test_generic_seller_skill_dialogues(self):
path = Path(ROOT_DIR, 'packages', 'fetchai', 'skills', 'generic_seller', 'dialogues.py')
with open(path, 'r') as file:
python_code = file.read()
assert (self.generic_seller[10] in python_code), 'Code is not identical.'
def test_generic_buyer_skill_behaviour(self):
path = Path(ROOT_DIR, 'packages', 'fetchai', 'skills', 'generic_buyer', 'behaviours.py')
with open(path, 'r') as file:
python_code = file.read()
assert (self.generic_buyer[0] in python_code), 'Code is not identical.'
def test_generic_buyer_skill_handler(self):
path = Path(ROOT_DIR, 'packages', 'fetchai', 'skills', 'generic_buyer', 'handlers.py')
with open(path, 'r') as file:
python_code = file.read()
for code_block in self.generic_buyer[1:9]:
assert (code_block in python_code), 'Code is not identical.'
def test_generic_buyer_skill_strategy(self):
path = Path(ROOT_DIR, 'packages', 'fetchai', 'skills', 'generic_buyer', 'strategy.py')
with open(path, 'r') as file:
python_code = file.read()
for code_block in self.generic_buyer[9:13]:
assert (code_block in python_code), 'Code is not identical.'
def test_generic_buyer_skill_dialogues(self):
path = Path(ROOT_DIR, 'packages', 'fetchai', 'skills', 'generic_buyer', 'dialogues.py')
with open(path, 'r') as file:
python_code = file.read()
assert (self.generic_buyer[13] in python_code), 'Code is not identical.' |
_register_parser
_set_msg_type(ofproto.OFPT_ERROR)
class OFPErrorMsg(MsgBase):
def __init__(self, datapath, type_=None, code=None, data=None, **kwargs):
super(OFPErrorMsg, self).__init__(datapath)
self.type = type_
self.code = code
if isinstance(data, six.string_types):
data = data.encode('ascii')
self.data = data
if (self.type == ofproto.OFPET_EXPERIMENTER):
self.exp_type = kwargs.get('exp_type', None)
self.experimenter = kwargs.get('experimenter', None)
def parser(cls, datapath, version, msg_type, msg_len, xid, buf):
(type_,) = struct.unpack_from('!H', six.binary_type(buf), ofproto.OFP_HEADER_SIZE)
msg = super(OFPErrorMsg, cls).parser(datapath, version, msg_type, msg_len, xid, buf)
if (type_ == ofproto.OFPET_EXPERIMENTER):
(msg.type, msg.exp_type, msg.experimenter, msg.data) = cls.parse_experimenter_body(buf)
else:
(msg.type, msg.code, msg.data) = cls.parse_body(buf)
return msg
def parse_body(cls, buf):
(type_, code) = struct.unpack_from(ofproto.OFP_ERROR_MSG_PACK_STR, buf, ofproto.OFP_HEADER_SIZE)
data = buf[ofproto.OFP_ERROR_MSG_SIZE:]
return (type_, code, data)
def parse_experimenter_body(cls, buf):
(type_, exp_type, experimenter) = struct.unpack_from(ofproto.OFP_ERROR_EXPERIMENTER_MSG_PACK_STR, buf, ofproto.OFP_HEADER_SIZE)
data = buf[ofproto.OFP_ERROR_EXPERIMENTER_MSG_SIZE:]
return (type_, exp_type, experimenter, data)
def _serialize_body(self):
assert (self.data is not None)
if (self.type == ofproto.OFPET_EXPERIMENTER):
msg_pack_into(ofproto.OFP_ERROR_EXPERIMENTER_MSG_PACK_STR, self.buf, ofproto.OFP_HEADER_SIZE, self.type, self.exp_type, self.experimenter)
self.buf += self.data
else:
msg_pack_into(ofproto.OFP_ERROR_MSG_PACK_STR, self.buf, ofproto.OFP_HEADER_SIZE, self.type, self.code)
self.buf += self.data |
class Net(nn.Module):
def __init__(self, num_classes=576, reid=False):
super(Net, self).__init__()
self.conv = nn.Sequential(nn.Conv2d(3, 64, 3, stride=1, padding=1), nn.BatchNorm2d(64), nn.ReLU(inplace=True), nn.MaxPool2d(3, 2, padding=1))
self.layer1 = make_layers(64, 64, 2, False)
self.layer2 = make_layers(64, 128, 2, True)
self.layer3 = make_layers(128, 256, 2, True)
self.layer4 = make_layers(256, 512, 2, True)
self.avgpool = nn.AvgPool2d((8, 4), 1)
self.reid = reid
self.classifier = nn.Sequential(nn.Linear(512, 256), nn.BatchNorm1d(256), nn.ReLU(inplace=True), nn.Dropout(), nn.Linear(256, num_classes))
def forward(self, x):
x = self.conv(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.view(x.size(0), (- 1))
if self.reid:
x = x.div(x.norm(p=2, dim=1, keepdim=True))
return x
x = self.classifier(x)
return x |
def _check_preliminaries() -> None:
try:
import aea
except ModuleNotFoundError:
enforce(False, "'aea' package not installed.")
enforce((shutil.which('black') is not None), 'black command line tool not found.')
enforce((shutil.which('isort') is not None), 'isort command line tool not found.')
enforce((shutil.which('protoc') is not None), 'protoc command line tool not found.')
result = subprocess.run(['protoc', '--version'], stdout=subprocess.PIPE, check=True)
result_str = result.stdout.decode('utf-8')
enforce((LIBPROTOC_VERSION in result_str), f'Invalid version for protoc. Found: {result_str}. Required: {LIBPROTOC_VERSION}.') |
class ToptierAgencyPublishedDABSView(models.Model):
toptier_code = models.TextField()
name = models.TextField()
abbreviation = models.TextField()
toptier_agency = models.OneToOneField('references.ToptierAgency', on_delete=models.DO_NOTHING, primary_key=True, related_name='%(class)s')
agency = models.OneToOneField('references.Agency', on_delete=models.DO_NOTHING, primary_key=False, related_name='%(class)s')
user_selectable = models.BooleanField()
class Meta():
db_table = 'vw_published_dabs_toptier_agency'
managed = False |
class OptionPlotoptionsPyramid3dEvents(Options):
def afterAnimate(self):
return self._config_get(None)
def afterAnimate(self, value: Any):
self._config(value, js_type=False)
def checkboxClick(self):
return self._config_get(None)
def checkboxClick(self, value: Any):
self._config(value, js_type=False)
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def hide(self):
return self._config_get(None)
def hide(self, value: Any):
self._config(value, js_type=False)
def legendItemClick(self):
return self._config_get(None)
def legendItemClick(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def show(self):
return self._config_get(None)
def show(self, value: Any):
self._config(value, js_type=False) |
class TestPythonProperty(unittest.TestCase):
def test_read_only_property(self):
model = Model()
self.assertEqual(model.read_only, 1729)
with self.assertRaises(AttributeError):
model.read_only = 2034
with self.assertRaises(AttributeError):
del model.read_only
def test_read_write_property(self):
model = Model()
self.assertEqual(model.value, 0)
model.value = 23
self.assertEqual(model.value, 23)
model.value = 77
self.assertEqual(model.value, 77)
with self.assertRaises(AttributeError):
del model.value |
class ThreeColumns(Layout):
def __init__(self, workspace_name: str, params: List[Any]):
super().__init__(LayoutName.THREE_COLUMNS, workspace_name)
try:
self.two_columns_main_ratio = (float(params[0]) if (len(params) > 0) else 0.5)
self.three_columns_main_ratio = (float(params[1]) if (len(params) > 1) else 0.5)
self.second_column_max = (int(params[2]) if (len(params) > 2) else 0)
self.second_column_position = (HorizontalPosition(params[3]) if (len(params) > 3) else HorizontalPosition.LEFT)
except ValueError:
self.two_columns_main_ratio = 0.5
self.three_columns_main_ratio = 0.5
self.second_column_max = 0
self.second_column_position = HorizontalPosition.LEFT
self._warn_wrong_parameters(params)
def _params(self) -> List[Any]:
return [self.two_columns_main_ratio, self.three_columns_main_ratio, self.second_column_max, self.second_column_position.value]
def anchor_mark(self) -> Optional[str]:
return self.mark_last()
def split_direction(self, context: Context) -> Optional[Direction]:
third_column_container_index = (3 if (self.second_column_max == 0) else (self.second_column_max + 2))
return (Direction.VERTICAL if (len(context.containers) in [2, 3, (third_column_container_index + 1)]) else None)
def stack_direction(self, context: Context) -> Optional[Direction]:
return Direction.VERTICAL
def _update(self, context: Context):
is_second_column = (((self.second_column_max == 0) and ((len(context.containers) % 2) == 0)) or ((len(context.containers) - 1) <= self.second_column_max))
is_right = (((self.second_column_position == HorizontalPosition.RIGHT) and is_second_column) or ((self.second_column_position == HorizontalPosition.LEFT) and (not is_second_column)))
third_column_container_index = (3 if (self.second_column_max == 0) else (self.second_column_max + 2))
corners = Corners(context.containers)
bottom_container = (corners.bottom_right() if is_right else corners.bottom_left())
direction = (None if (len(context.containers) not in [2, third_column_container_index]) else ('right' if is_right else 'left'))
Mover(context).move_to_container(bottom_container.id, direction)
if (len(context.containers) == 2):
main_width = context.workspace_width(self.two_columns_main_ratio)
context.exec(f'[con_mark="{self.mark_main()}"] resize set {main_width}')
elif (len(context.containers) == third_column_container_index):
containers = context.resync().sorted_containers()
stack_width = context.workspace_width(((1 - self.three_columns_main_ratio) / 2))
stack_width_delta = (containers[1].rect.width - stack_width)
self._resize(context, 'con_id', containers[1].id, stack_width_delta)
main_width = context.workspace_width(self.three_columns_main_ratio)
main_width_delta = ((containers[0].rect.width + stack_width_delta) - main_width)
self._resize(context, 'con_mark', self.mark_main(), main_width_delta)
def _resize(self, context: Context, attr: str, value: str, delta: int):
resize_direction = self.second_column_position.opposite().value
resize_expansion = ('shrink' if (delta >= 0) else 'grow')
context.exec(f'[{attr}="{value}"] resize {resize_expansion} {resize_direction} {abs(delta)} px')
def create(cls, workspace_name: str, params: List[Any]) -> Optional['Layout']:
return ThreeColumns(workspace_name, params) |
def upgrade():
op.create_table('fidesopsuserpermissions', sa.Column('id', sa.String(length=255), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True), sa.Column('user_id', sa.String(), nullable=False), sa.Column('scopes', sa.ARRAY(sa.String()), nullable=False), sa.ForeignKeyConstraint(['user_id'], ['fidesopsuser.id']), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('user_id'))
op.create_index(op.f('ix_fidesopsuserpermissions_id'), 'fidesopsuserpermissions', ['id'], unique=False) |
class ExperimentalFlagsManager(LocaleMixin):
DEFAULT_VALUES = {'chats_per_page': 10, 'multiple_slave_chats': True, 'network_error_prompt_interval': 100, 'prevent_message_removal': True, 'auto_locale': True, 'retry_on_error': False, 'send_image_as_file': False, 'message_muted_on_slave': 'normal', 'your_message_on_slave': 'silent', 'animated_stickers': False, 'send_to_last_chat': 'warn', 'default_media_prompt': 'emoji', 'api_base_url': None, 'api_base_file_url': None, 'local_tdlib_api': False}
def __init__(self, channel: 'TelegramChannel'):
self.channel = channel
self.config: Dict[(str, Any)] = ExperimentalFlagsManager.DEFAULT_VALUES.copy()
self.config.update((channel.config.get('flags', dict()) or dict()))
def __call__(self, flag_key: str) -> Any:
if (flag_key not in self.config):
raise ValueError(self._('{0} is not a valid experimental flag').format(flag_key))
return self.config[flag_key] |
class DefaultScale(AbstractScale):
def __init__(self, formatter=None):
if (formatter is None):
formatter = BasicFormatter()
self.formatter = formatter
def ticks(self, start, end, desired_ticks=8):
if ((start == end) or isnan(start) or isnan(end)):
return [start]
(min, max, delta) = heckbert_interval(start, end, desired_ticks, enclose=True)
return frange(min, max, delta)
def num_ticks(self, start, end, desired_ticks=8):
return len(self.ticks(start, end, desired_ticks)) |
class BenjiStoreTestCase(BenjiTestCaseBase):
def generate_version(self, testpath):
size = ((512 * kB) + 123)
image_filename = os.path.join(testpath, 'image')
self.image = ((self.random_bytes((size - (2 * 128123))) + (b'\x00' * 128123)) + self.random_bytes(128123))
with open(image_filename, 'wb') as f:
f.write(self.image)
benji_obj = self.benji_open(init_database=True)
version = benji_obj.backup(version_uid=str(uuid.uuid4()), volume='data-backup', snapshot='snapshot-name', source=('file:' + image_filename))
version_uid = version.uid
benji_obj.close()
return (version_uid, size, image_filename)
def setUp(self):
super().setUp()
(version_uid, size, image_filename) = self.generate_version(self.testpath.path)
self.version_uid = version_uid
self.size = size
self.image_filename = image_filename
def test_find_versions(self):
benji_obj = self.benji_open()
store = BenjiStore(benji_obj)
versions = store.find_versions()
self.assertEqual(1, len(versions))
self.assertEqual(self.version_uid, versions[0].uid)
self.assertEqual(self.size, versions[0].size)
benji_obj.close()
([(512,), (1024,), (4096,), (65536,), (1861,)])
def test_read(self, block_size):
benji_obj = self.benji_open()
store = BenjiStore(benji_obj)
version = store.find_versions(version_uid=self.version_uid)[0]
store.open(version)
image = bytearray()
for pos in range(0, self.size, block_size):
if ((pos + block_size) > self.size):
read_length = (self.size - pos)
else:
read_length = block_size
image = (image + store.read(version, None, pos, read_length))
self.assertEqual(self.size, len(image))
self.assertEqual(self.image, image)
store.close(version)
benji_obj.close()
def test_create_cow_version(self):
benji_obj = self.benji_open()
store = BenjiStore(benji_obj)
version = store.find_versions(version_uid=self.version_uid)[0]
store.open(version)
cow_version = store.create_cow_version(version)
self.assertEqual(version.volume, cow_version.volume)
self.assertEqual(version.size, cow_version.size)
self.assertEqual(version.block_size, cow_version.block_size)
self.assertEqual(version.storage_id, cow_version.storage_id)
self.assertNotEqual(version.snapshot, cow_version.snapshot)
store.fixate_cow_version(cow_version)
store.close(version)
benji_obj.close()
([['{:03}'.format(run)] for run in range(51)])
def test_write_read(self, run):
benji_obj = self.benji_open()
store = BenjiStore(benji_obj)
version = store.find_versions(version_uid=self.version_uid)[0]
store.open(version)
cow_version = store.create_cow_version(version)
image_2_filename = os.path.join(self.testpath.path, 'image')
image_2 = bytearray(self.image)
block_size = random.randint(512, (2 * 65536))
for pos in range(0, self.size, block_size):
if ((pos + block_size) > self.size):
write_length = (self.size - pos)
else:
write_length = block_size
if (random.randint(1, 100) <= 25):
if random.randint(0, 1):
image_2[pos:(pos + write_length)] = self.random_bytes(write_length)
store.write(cow_version, pos, image_2[pos:(pos + write_length)])
else:
image_2[pos:(pos + write_length)] = (b'\x00' * write_length)
store.write(cow_version, pos, (b'\x00' * write_length))
with open(image_2_filename, 'wb') as f:
f.write(image_2)
for block_size in (512, 1024, 4096, 65536, 1861):
image = bytearray()
for pos in range(0, self.size, block_size):
if ((pos + block_size) > self.size):
read_length = (self.size - pos)
else:
read_length = block_size
image = (image + store.read(version, cow_version, pos, read_length))
self.assertEqual(self.size, len(image))
for pos in range(0, self.size):
if (image_2[pos] != image[pos]):
self.fail('Written image different at offset {} (block size {}).'.format(pos, block_size))
break
store.fixate_cow_version(cow_version)
benji_obj.deep_scrub(cow_version.uid, 'file:{}'.format(image_2_filename))
store.close(version)
benji_obj.close() |
class TernaryOp(Node):
__slots__ = ('cond', 'iftrue', 'iffalse', 'coord', '__weakref__')
def __init__(self, cond, iftrue, iffalse, coord=None):
self.cond = cond
self.iftrue = iftrue
self.iffalse = iffalse
self.coord = coord
def children(self):
nodelist = []
if (self.cond is not None):
nodelist.append(('cond', self.cond))
if (self.iftrue is not None):
nodelist.append(('iftrue', self.iftrue))
if (self.iffalse is not None):
nodelist.append(('iffalse', self.iffalse))
return tuple(nodelist)
attr_names = () |
def get_brew_arch(wf):
find_brew = brew_installed()
result = wf.settings.get('HOMEBREW_OPTS', None)
if (result is not None):
brew_arch = result['current_brew']
else:
brew_arch = ('ARM' if (find_brew['ARM'] and (not find_brew['INTEL'])) else 'INTEL')
return brew_arch |
class OptionPlotoptionsFunnelSonificationDefaultinstrumentoptionsMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def set_develop_mode(on=True, intensive_logging=False, skip_wait_methods=False):
from .. import io
if on:
defaults._mode_settings = [defaults.initialize_delay, defaults.window_mode, defaults.fast_quit, io.defaults.outputfile_time_stamp, defaults.auto_create_subject_id]
if intensive_logging:
defaults._mode_settings.append(defaults.event_logging)
defaults.event_logging = 2
if skip_wait_methods:
set_skip_wait_methods(True)
print('*** DEVELOP MODE ***')
defaults.initialize_delay = 0
defaults.window_mode = True
defaults.fast_quit = True
io.defaults.outputfile_time_stamp = False
defaults.auto_create_subject_id = True
else:
print('*** NORMAL MODE ***')
if (defaults._mode_settings is not None):
defaults.initialize_delay = defaults._mode_settings[0]
defaults.window_mode = defaults._mode_settings[1]
defaults.fast_quit = defaults._mode_settings[2]
io.defaults.outputfile_time_stamp = defaults._mode_settings[3]
defaults.auto_create_subject_id = defaults._mode_settings[4]
defaults._mode_settings = None
try:
defaults.event_logging = defaults._mode_settings[5]
except Exception:
pass
set_skip_wait_methods(False)
else:
pass |
def expect(invocation, out, program=None, test='equals'):
if (program is None):
program = make_program()
program.run('fab {}'.format(invocation), exit=False)
output = sys.stdout.getvalue()
if (test == 'equals'):
assert (output == out)
elif (test == 'contains'):
assert (out in output)
elif (test == 'regex'):
assert re.match(out, output)
else:
err = "Don't know how to expect that <stdout> {} <expected>!"
assert False, err.format(test)
assert (not sys.stderr.getvalue()) |
def plugin_init(config, ingest_ref, callback):
global shutdown_in_progress, the_rpc
_LOGGER.info('plugin_init called')
_config = config
_config['ingest_ref'] = ingest_ref
_config['callback'] = callback
the_rpc = iprpc.IPCModuleClient('np_server', _module_dir)
_config['readings_buffer'] = list()
shutdown_in_progress = False
return _config |
_cache(maxsize=1)
def get_yt_id(url, ignore_playlist=False):
query = urlparse(url)
if (query.hostname == 'youtu.be'):
return query.path[1:]
if (query.hostname in {'www.youtube.com', 'youtube.com', 'music.youtube.com'}):
if (not ignore_playlist):
with suppress(KeyError):
return parse_qs(query.query)['list'][0]
if (query.path == '/watch'):
return parse_qs(query.query)['v'][0]
if (query.path[:7] == '/watch/'):
return query.path.split('/')[1]
if (query.path[:7] == '/embed/'):
return query.path.split('/')[2]
if (query.path[:3] == '/v/'):
return query.path.split('/')[2] |
class Command(DanubeCloudCommand):
help = 'Display Danube Cloud version.'
options = (CommandOption('-f', '--full', action='store_true', dest='full', default=False, help='Display full version string (including edition).'),)
def handle(self, full=False, **options):
from core.version import __version__, __edition__
if full:
version = ('%s:%s' % (__edition__, __version__))
else:
version = __version__
self.display(version) |
class ContextVarsContext(BaseContext):
elasticapm_transaction_var = contextvars.ContextVar('elasticapm_transaction_var')
elasticapm_spans_var = contextvars.ContextVar('elasticapm_spans_var', default=())
def get_transaction(self, clear: bool=False) -> 'elasticapm.traces.Transaction':
try:
transaction = self.elasticapm_transaction_var.get()
if clear:
self.set_transaction(None)
return transaction
except LookupError:
return None
def set_transaction(self, transaction: 'elasticapm.traces.Transaction') -> None:
self.elasticapm_transaction_var.set(transaction)
def get_span(self) -> 'elasticapm.traces.Span':
spans = self.elasticapm_spans_var.get()
return (spans[(- 1)] if spans else None)
def set_span(self, span: 'elasticapm.traces.Span') -> None:
spans = self.elasticapm_spans_var.get()
self.elasticapm_spans_var.set((spans + (span,)))
def unset_span(self, clear_all: bool=False) -> 'elasticapm.traces.Span':
spans = self.elasticapm_spans_var.get()
span = None
if spans:
span = spans[(- 1)]
if clear_all:
self.elasticapm_spans_var.set(())
else:
self.elasticapm_spans_var.set(spans[0:(- 1)])
return span |
def preprocess_observation(observation: np.ndarray, key: jnp.ndarray) -> jnp.ndarray:
observation = observation.astype(jnp.float32)
observation = jnp.floor((observation / 8))
observation = (observation / 32)
observation = (observation + (jax.random.uniform(key, observation.shape) / 32))
observation = (observation - 0.5)
return observation |
class OptionPlotoptionsTimelineStatesInactive(Options):
def animation(self) -> 'OptionPlotoptionsTimelineStatesInactiveAnimation':
return self._config_sub_data('animation', OptionPlotoptionsTimelineStatesInactiveAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def opacity(self):
return self._config_get(0.2)
def opacity(self, num: float):
self._config(num, js_type=False) |
('/calls/view/<int:call_no>', methods=['GET'])
def calls_view(call_no):
sql = "SELECT\n a.CallLogID,\n CASE\n WHEN b.PhoneNo is not null then b.Name\n WHEN c.PhoneNo is not null then c.Name\n ELSE a.Name\n END Name,\n a.Number Number,\n a.Date,\n a.Time,\n a.Action,\n a.Reason,\n CASE WHEN b.PhoneNo is null THEN 'N' ELSE 'Y' END Whitelisted,\n CASE WHEN c.PhoneNo is null THEN 'N' ELSE 'Y' end Blacklisted,\n d.MessageID,\n d.Played,\n d.Filename,\n a.SystemDateTime\n FROM CallLog as a\n LEFT JOIN Whitelist AS b ON a.Number = b.PhoneNo\n LEFT JOIN Blacklist AS c ON a.Number = c.PhoneNo\n LEFT JOIN Message AS d ON a.CallLogID = d.CallLogID\n WHERE a.CallLogID={}".format(call_no)
g.cur.execute(sql)
row = g.cur.fetchone()
caller = {}
if (len(row) > 0):
number = row[2]
phone_no = format_phone_no(number)
filepath = row[11]
if (filepath is not None):
basename = os.path.basename(filepath)
filepath = os.path.join('../../static/messages', basename)
date_time = datetime.strptime(row[12][:19], '%Y-%m-%d %H:%M:%S')
caller.update(dict(call_no=row[0], phone_no=phone_no, name=row[1], date=date_time.strftime('%d-%b-%y'), time=date_time.strftime('%I:%M %p'), action=row[5], reason=row[6], whitelisted=row[7], blacklisted=row[8], msg_no=row[9], msg_played=row[10], wav_file=filepath))
else:
pass
return render_template('calls_view.html', caller=caller) |
def get_layouts(layouts=None):
layouts_from_module = get_layouts_from_getters()
default_layouts = layouts_from_module.pop('default_layouts')
all_layouts = {}
for (idx, layout) in enumerate((default_layouts + (layouts or []))):
layout.module = 'default'
all_layouts[(layout.name or idx)] = layout
return (list(all_layouts.values()), layouts_from_module) |
class TraitEnum(TraitHandler):
def __init__(self, *values):
if ((len(values) == 1) and (type(values[0]) in SequenceTypes)):
values = values[0]
self.values = tuple(values)
self.fast_validate = (ValidateTrait.enum, self.values)
def validate(self, object, name, value):
if (value in self.values):
return value
self.error(object, name, value)
def info(self):
return ' or '.join([repr(x) for x in self.values])
def get_editor(self, trait):
from traitsui.api import EnumEditor
return EnumEditor(values=self, cols=(trait.cols or 3), evaluate=trait.evaluate, mode=(trait.mode or 'radio')) |
def _attr_to_optparse_option(at: Field, default: Any) -> Tuple[(dict, str)]:
if (at.name == 'url_schemes'):
return ({'metavar': '<comma-delimited>|<yaml-dict>', 'validator': _validate_url_schemes}, ','.join(default))
if (at.type is int):
return ({'metavar': '<int>', 'validator': _validate_int}, str(default))
if (at.type is bool):
return ({'metavar': '<boolean>', 'validator': frontend.validate_boolean}, str(default))
if ((at.type is str) or (at.name == 'heading_slug_func')):
return ({'metavar': '<str>'}, f"(default: '{default}')")
if ((get_origin(at.type) is Literal) and all((isinstance(a, str) for a in get_args(at.type)))):
args = get_args(at.type)
return ({'metavar': f"<{'|'.join((repr(a) for a in args))}>", 'type': 'choice', 'choices': args}, repr(default))
if (at.type in (Iterable[str], Sequence[str])):
return ({'metavar': '<comma-delimited>', 'validator': frontend.validate_comma_separated_list}, ','.join(default))
if (at.type == Set[str]):
return ({'metavar': '<comma-delimited>', 'validator': _validate_comma_separated_set}, ','.join(default))
if (at.type == Tuple[(str, str)]):
return ({'metavar': '<str,str>', 'validator': _create_validate_tuple(2)}, ','.join(default))
if (at.type == Union[(int, type(None))]):
return ({'metavar': '<null|int>', 'validator': _validate_int}, str(default))
if (at.type == Union[(Iterable[str], type(None))]):
return ({'metavar': '<null|comma-delimited>', 'validator': frontend.validate_comma_separated_list}, (','.join(default) if default else ''))
if (get_origin(at.type) is dict):
return ({'metavar': '<yaml-dict>', 'validator': _create_validate_yaml(at)}, (str(default) if default else ''))
raise AssertionError(f'Configuration option {at.name} not set up for use in docutils.conf.') |
_cache()
def _load_config(env_code=None) -> Type[DefaultConfig]:
if (not env_code):
env_code = os.environ.get(ENV_CODE_VAR, _FALLBACK_ENV_CODE)
runtime_env = next((env for env in ENVS if (env['code'] == env_code)), None)
if (not runtime_env):
raise KeyError(f'Runtime environment with code={env_code} not found in supported runtime ENVS dict. Check that you are supplying the correct runtime env specifier in the {ENV_CODE_VAR} environment variable when running this program')
cli_config_overrides = _parse_config_arg()
return (runtime_env['constructor'](**cli_config_overrides) if cli_config_overrides else runtime_env['constructor']()) |
class Cmd(object):
CMD = NotImplemented
OK = 0
ERR_HOST_CHECK = 3
ERR_UNKNOWN = 99
msg = None
_time_started = 0
def __init__(self, host=None, verbose=False, **kwargs):
self.host = host
self.verbose = verbose
self.__dict__.update(kwargs)
self._time_started = get_timestamp()
def log(self, msg):
if self.verbose:
print(('[%s] %s' % (get_timestamp(), msg)), file=sys.stderr)
def _run_local(self, cmd, **kwargs):
cmd = (self.CMD + cmd)
self.log(('Running command: %s' % ' '.join(cmd)))
(rc, stdout, stderr) = execute(cmd, **kwargs)
self.log(('Return code: %d' % rc))
if (rc != 0):
raise CmdError(rc, (stderr or stdout))
return stdout.strip()
def _run_remote(self, cmd, **kwargs):
if self.host:
cmd = ((('run_ssh', ('%s' % self.host)) + self.CMD) + cmd)
return self._run_local(cmd, **kwargs)
def _run_cmd(self, *cmd, **kwargs):
if kwargs.pop('remote', False):
return self._run_remote(cmd, **kwargs)
else:
return self._run_local(cmd, **kwargs)
def _check_host(self, hostname=False):
if self.host:
if hostname:
test_cmd = 'test_ssh_hostname'
else:
test_cmd = 'test_ssh'
try:
return self._run_local((test_cmd, self.host))
except CmdError as e:
raise CmdError(self.ERR_HOST_CHECK, (('Remote host is unreachable (%s)' % e.msg) or e.rc))
def _get_hostname(self, remote=False):
return self._run_cmd('get_hostname', remote=remote)
def add_timestamps(self, output_dict):
time_ended = get_timestamp()
output_dict.update({'time_started': self._time_started, 'time_ended': time_ended, 'time_elapsed': (time_ended - self._time_started)})
def cleanup(self, action, response):
fun = getattr(self, ('%s_cleanup' % action), None)
if fun:
self.log(('Running cleanup for "%s" action' % action))
fun(response)
def print_output(output_dict):
print(json.dumps(output_dict, indent=4))
def output_and_exit(cls, output_dict):
cls.print_output(output_dict)
sys.exit(output_dict['rc']) |
def parse(definition, parcel: ParcelParser, parent: Field) -> None:
names = definition.keys()
if ('__parcelType' in names):
parse_parcel_type(definition, parcel, parent)
elif ('__type' in names):
parse_generic_type(definition, parcel, parent)
elif ('__repeated' in names):
parse_repeated_value(definition, parcel, parent)
elif ('__conditional' in names):
parse_conditional(definition, parcel, parent)
else:
parse_value_from_definition(definition, parcel, parent) |
(scope='function')
def strava(stravawidget):
class StravaConfig(libqtile.confreader.Config):
auto_fullscreen = True
keys = []
mouse = []
groups = [libqtile.config.Group('a')]
layouts = [libqtile.layout.Max()]
floating_layout = libqtile.resources.default_config.floating_layout
screens = [libqtile.config.Screen(top=libqtile.bar.Bar([stravawidget(startup_delay=0)], 50))]
(yield StravaConfig) |
class BasicEdge(GraphEdgeInterface):
def __init__(self, source: GraphNodeInterface, sink: GraphNodeInterface):
self._source: GraphNodeInterface = source
self._sink: GraphNodeInterface = sink
def source(self) -> GraphNodeInterface:
return self._source
def sink(self) -> GraphNodeInterface:
return self._sink
def __eq__(self, other) -> bool:
return ((other is not None) and (self.__dict__ == other.__dict__))
def copy(self, source: GraphNodeInterface=None, sink: GraphNodeInterface=None) -> GraphEdgeInterface:
return BasicEdge((source if (source is not None) else self._source), (sink if (sink is not None) else self._sink))
def __hash__(self) -> int:
return hash((hash(self._source), hash(self._sink))) |
class News(commands.Cog):
__version__ = '0.0.3'
__author__ = 'flare#0001'
def format_help_for_context(self, ctx):
pre_processed = super().format_help_for_context(ctx)
return f'''{pre_processed}
Cog Version: {self.__version__}
Author: {self.__author__}'''
def __init__(self, bot):
self.bot = bot
self.api = '
self.session = aio
self.newsapikey = None
async def red_get_data_for_user(self, *, user_id: int):
return {}
async def red_delete_data_for_user(self, *, requester, user_id: int) -> None:
pass
async def initalize(self):
token = (await self.bot.get_shared_api_tokens('newsapi'))
self.newsapikey = token.get('key', None)
.listener()
async def on_red_api_tokens_update(self, service_name, api_tokens):
if (service_name == 'newsapi'):
self.newsapikey = api_tokens.get('key', None)
def cog_unload(self):
self.bot.loop.create_task(self.session.close())
async def get(self, url):
async with self.session.get(url) as response:
data = (await response.json())
if (response.status != 200):
return {'failed': data['message']}
try:
return data
except aio
return {'failed': 'Their appears to be an issue with the API. Please try again later.'}
()
async def news(self, ctx):
()
async def newssetup(self, ctx):
msg = f'''**News API Setup**
**1**. Visit and register for an API.
**2**. Use the following command: {ctx.prefix}set api newsapi key <api_key_here>
**3**. Reload the cog if it doesnt work immediately.'''
(await ctx.maybe_send_embed(msg))
(hidden=True)
async def countrycodes(self, ctx):
(await ctx.send('Valid country codes are:\nae ar at au be bg br ca ch cn co cu cz de eg fr gb gr hk hu id ie il in it jp kr lt lv ma mx my ng nl no nz ph pl pt ro rs ru sa se sg si sk th tr tw ua us ve za'))
()
async def top(self, ctx, countrycode: str, *, query: str=None):
async with ctx.typing():
data = (await self.get(self.api.format('top-headlines', (f'q={query}' if (query is not None) else ''), f'&country={countrycode}', self.newsapikey, '')))
if (data.get('failed') is not None):
return (await ctx.send(data.get('failed')))
if (data['totalResults'] == 0):
return (await ctx.send(f"No results found, ensure you're looking up the correct country code. Check {ctx.prefix}countrycodes for a list. Alternatively, your query may be returning no results."))
(await GenericMenu(source=ArticleFormat(data['articles'][:15]), ctx=ctx).start(ctx=ctx, wait=False))
(name='global')
async def global_all(self, ctx, *, query: str=None):
async with ctx.typing():
data = (await self.get(self.api.format('everything', f'q={query}', '', self.newsapikey, '')))
if (data.get('failed') is not None):
return (await ctx.send(data.get('failed')))
if (data['totalResults'] == 0):
return (await ctx.send('No results found.'))
(await GenericMenu(source=ArticleFormat(data['articles']), ctx=ctx).start(ctx=ctx, wait=False))
()
async def topglobal(self, ctx, *, query: str):
async with ctx.typing():
data = (await self.get(self.api.format('top-headlines', f'q={query}', '', self.newsapikey, '')))
if (data.get('failed') is not None):
return (await ctx.send(data.get('failed')))
if (data['totalResults'] == 0):
return (await ctx.send('No results found.'))
(await GenericMenu(source=ArticleFormat(data['articles']), ctx=ctx).start(ctx=ctx, wait=False)) |
def test_example_tensor():
def t1(array: torch.Tensor) -> torch.Tensor:
return torch.flatten(array)
task_spec = get_serializable(OrderedDict(), serialization_settings, t1)
assert (task_spec.template.interface.outputs['o0'].type.blob.format is PyTorchTensorTransformer.PYTORCH_FORMAT) |
def start_north(start_north_omf_as_a_service, fledge_url, num_assets, pi_host, pi_port, pi_admin, pi_passwd, clear_pi_system_through_pi_web_api, pi_db):
global north_schedule_id
af_hierarchy_level_list = AF_HIERARCHY_LEVEL.split('/')
dp_list = ['']
asset_dict = {}
no_of_services = 6
num_assets_per_service = (num_assets // no_of_services)
for service_count in range(no_of_services):
for asst_count in range(num_assets_per_service):
asset_name = (ASSET_NAME + '-{}{}'.format((service_count + 1), (asst_count + 1)))
asset_dict[asset_name] = dp_list
clear_pi_system_through_pi_web_api(pi_host, pi_admin, pi_passwd, pi_db, af_hierarchy_level_list, asset_dict)
response = start_north_omf_as_a_service(fledge_url, pi_host, pi_port, pi_user=pi_admin, pi_pwd=pi_passwd, default_af_location=AF_HIERARCHY_LEVEL)
north_schedule_id = response['id']
(yield start_north) |
def events(request, **kwargs):
from .eventrequest import EventRequest
from .eventstream import EventPermissionError, get_events
from .utils import sse_error_response
try:
event_request = EventRequest(request, view_kwargs=kwargs)
event_response = get_events(event_request)
response = event_response.to_
except EventRequest.ResumeNotAllowedError as e:
response = HttpResponseBadRequest(('Invalid request: %s.\n' % str(e)))
except EventRequest.GripError as e:
if request.grip.proxied:
response = sse_error_response('internal-error', 'Invalid internal request.')
else:
response = sse_error_response('bad-request', ('Invalid request: %s.' % str(e)))
except EventRequest.Error as e:
response = sse_error_response('bad-request', ('Invalid request: %s.' % str(e)))
except EventPermissionError as e:
response = sse_error_response('forbidden', str(e), {'channels': e.channels})
add_default_headers(response)
return response |
.django_db
def test_spending_over_time_new_awards_only_filter(client, monkeypatch, elasticsearch_transaction_index, populate_models):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
group = 'month'
test_payload = {'group': group, 'subawards': False, 'filters': {'time_period': [{'date_type': 'date_signed', 'start_date': '2010-02-01', 'end_date': '2010-03-31'}]}, 'messages': [get_time_period_message()]}
expected_response = {'results': [{'aggregated_amount': 0, 'time_period': {'fiscal_year': '2010', 'month': '5'}}, {'aggregated_amount': 100.0, 'time_period': {'fiscal_year': '2010', 'month': '6'}}], 'group': 'month', 'messages': [get_time_period_message()]}
resp = client.post(get_spending_over_time_url(), content_type='application/json', data=json.dumps(test_payload))
assert (resp.status_code == status.HTTP_200_OK)
assert (expected_response == resp.data), 'Unexpected or missing content!'
confirm_proper_ordering(group, resp.data['results'])
group = 'month'
test_payload = {'group': group, 'subawards': False, 'filters': {'time_period': [{'date_type': 'date_signed', 'start_date': '2010-02-01', 'end_date': '2010-02-02'}]}, 'messages': [get_time_period_message()]}
expected_response = {'results': [{'aggregated_amount': 0, 'time_period': {'fiscal_year': '2010', 'month': '5'}}], 'group': 'month', 'messages': [get_time_period_message()]}
resp = client.post(get_spending_over_time_url(), content_type='application/json', data=json.dumps(test_payload))
assert (resp.status_code == status.HTTP_200_OK)
assert (expected_response == resp.data), 'Unexpected or missing content!'
confirm_proper_ordering(group, resp.data['results'])
group = 'month'
test_payload = {'group': group, 'subawards': False, 'filters': {'time_period': [{'date_type': 'date_signed', 'start_date': '2010-02-01', 'end_date': '2010-03-31'}, {'start_date': '2011-02-01', 'end_date': '2011-03-31'}]}, 'messages': [get_time_period_message()]}
expected_response = {'results': [{'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2010', 'month': '5'}}, {'aggregated_amount': 100.0, 'time_period': {'fiscal_year': '2010', 'month': '6'}}, {'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2010', 'month': '7'}}, {'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2010', 'month': '8'}}, {'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2010', 'month': '9'}}, {'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2010', 'month': '10'}}, {'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2010', 'month': '11'}}, {'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2010', 'month': '12'}}, {'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2011', 'month': '1'}}, {'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2011', 'month': '2'}}, {'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2011', 'month': '3'}}, {'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2011', 'month': '4'}}, {'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2011', 'month': '5'}}, {'aggregated_amount': 110.0, 'time_period': {'fiscal_year': '2011', 'month': '6'}}], 'group': 'month', 'messages': [get_time_period_message()]}
resp = client.post(get_spending_over_time_url(), content_type='application/json', data=json.dumps(test_payload))
assert (resp.status_code == status.HTTP_200_OK)
assert (expected_response == resp.data), 'Unexpected or missing content!'
confirm_proper_ordering(group, resp.data['results'])
group = 'month'
test_payload = {'group': group, 'subawards': False, 'filters': {'time_period': [{'date_type': 'new_awards_only', 'start_date': '2010-02-14', 'end_date': '2010-02-16'}]}, 'messages': [get_time_period_message()]}
expected_response = {'results': [{'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2010', 'month': '5'}}], 'group': 'month', 'messages': [get_time_period_message()]}
resp = client.post(get_spending_over_time_url(), content_type='application/json', data=json.dumps(test_payload))
assert (resp.status_code == status.HTTP_200_OK)
assert (expected_response == resp.data), 'Unexpected or missing content!'
confirm_proper_ordering(group, resp.data['results'])
group = 'month'
test_payload = {'group': group, 'subawards': False, 'filters': {'time_period': [{'date_type': 'new_awards_only', 'start_date': '2010-02-13', 'end_date': '2010-03-02'}]}, 'messages': [get_time_period_message()]}
expected_response = {'results': [{'aggregated_amount': 0.0, 'time_period': {'fiscal_year': '2010', 'month': '5'}}, {'aggregated_amount': 100.0, 'time_period': {'fiscal_year': '2010', 'month': '6'}}], 'group': 'month', 'messages': [get_time_period_message()]}
resp = client.post(get_spending_over_time_url(), content_type='application/json', data=json.dumps(test_payload))
assert (resp.status_code == status.HTTP_200_OK)
assert (expected_response == resp.data), 'Unexpected or missing content!'
confirm_proper_ordering(group, resp.data['results']) |
class PZEM():
VOLT = 0
AMP = 1
WATT = 3
WHR = 5
FREQ = 7
PWRF = 8
def __init__(self, port, slaveaddress, timeout=0.1):
self.busy = False
self.initialized = False
self.port = port
self.timeout = timeout
self.address = slaveaddress
self.connect()
def connect(self):
try:
self.dev = minimalmodbus.Instrument(self.port, self.address, close_port_after_each_call=True)
self.dev.serial.timeout = self.timeout
self.dev.serial.baudrate = 9600
self.initialized = True
self.busy = True
testval = self.readraw(0, self.VOLT, 1)
if ((testval == None) or (testval == False)):
time.sleep(0.5)
testval = self.readraw(1, self.AMP, 2)
if ((testval == None) or (testval == False)):
self.initialized = False
self.dev = None
self.busy = False
except Exception as e:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ('PZEM exception: ' + str(e)))
self.initialized = False
self.dev = None
self.busy = False
def readraw(self, readtype, val, vallen):
res = None
if (self.timeout < 1):
cc = 1
else:
cc = 3
while ((res is None) and (cc > 0)):
try:
if (readtype == 0):
res = self.dev.read_register(val, vallen, 4)
else:
res = self.dev.read_registers(val, vallen, 4)
except Exception as e:
if (self.timeout > 1):
print('Slow PZEM error:', str(e))
cc = (cc - 1)
return res
def read_value(self, valuetype=0):
res = None
if self.initialized:
if self.busy:
cc = 10
while (self.busy and (cc > 0)):
time.sleep(0.3)
cc = (cc - 1)
if (self.busy == False):
self.busy = True
vallen = 2
if (valuetype in [self.VOLT, self.FREQ]):
vallen = 1
try:
if (valuetype in [self.VOLT, self.FREQ, self.PWRF]):
res = self.readraw(0, int(valuetype), int(vallen))
else:
res = self.readraw(1, int(valuetype), int(vallen))
res = (res[0] + (res[1] * 65536))
except Exception as e:
pass
if (res is not None):
if (valuetype == self.AMP):
res = round((res * 0.001), 3)
elif (valuetype == self.WATT):
res = round((res * 0.1), 1)
self.busy = False
return res
def changeAddress(self, newaddress):
if ((newaddress == 0) or (newaddress > 247)):
return False
res = False
try:
self.dev.write_register(2, newaddress, 0, 6)
res = True
except:
res = False
if res:
self.address = newaddress
self.connect()
return res
def resetenergy(self):
res = False
try:
self.dev._perform_command(66, b'')
res = True
except Exception as e:
res = False
if (res == False):
try:
self.dev._performCommand(66, '')
res = True
except Exception as e:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ('PZEM016 reset error: ' + str(e)))
res = False
return res |
def export_speakers_csv(speakers):
headers = ['Speaker Name', 'Speaker Email', 'Speaker Session(s)', 'Speaker Mobile', 'Speaker Bio', 'Speaker Organisation', 'Speaker Position', 'Speaker Experience', 'Speaker Sponsorship Required', 'Speaker City', 'Speaker Country', 'Speaker Website', 'Speaker Twitter', 'Speaker Facebook', 'Speaker Github', 'Speaker LinkedIn']
rows = [headers]
for speaker in speakers:
column = [(speaker.name if speaker.name else ''), (speaker.email if speaker.email else '')]
if speaker.sessions:
session_details = ''
for session in speaker.sessions:
if (not session.deleted_at):
session_details += (((session.title + ' (') + session.state) + '); ')
column.append(session_details[:(- 2)])
else:
column.append('')
column.append((speaker.mobile if speaker.mobile else ''))
column.append((speaker.short_biography if speaker.short_biography else ''))
column.append((speaker.organisation if speaker.organisation else ''))
column.append((speaker.position if speaker.position else ''))
column.append((speaker.speaking_experience if speaker.speaking_experience else ''))
column.append((speaker.sponsorship_required if speaker.sponsorship_required else ''))
column.append((speaker.city if speaker.city else ''))
column.append((speaker.country if speaker.country else ''))
column.append((speaker.website if speaker.website else ''))
column.append((speaker.twitter if speaker.twitter else ''))
column.append((speaker.facebook if speaker.facebook else ''))
column.append((speaker.github if speaker.github else ''))
column.append((speaker.linkedin if speaker.linkedin else ''))
rows.append(column)
return rows |
(scope='function')
def redshift_connection_config(db: Session) -> Generator:
connection_config = ConnectionConfig.create(db=db, data={'name': str(uuid4()), 'key': 'my_redshift_config', 'connection_type': ConnectionType.redshift, 'access': AccessLevel.write})
host = (integration_config.get('redshift', {}).get('host') or os.environ.get('REDSHIFT_TEST_HOST'))
port = (integration_config.get('redshift', {}).get('port') or os.environ.get('REDSHIFT_TEST_PORT'))
user = (integration_config.get('redshift', {}).get('user') or os.environ.get('REDSHIFT_TEST_USER'))
password = (integration_config.get('redshift', {}).get('password') or os.environ.get('REDSHIFT_TEST_PASSWORD'))
database = (integration_config.get('redshift', {}).get('database') or os.environ.get('REDSHIFT_TEST_DATABASE'))
db_schema = (integration_config.get('redshift', {}).get('db_schema') or os.environ.get('REDSHIFT_TEST_DB_SCHEMA'))
if all([host, port, user, password, database, db_schema]):
schema = RedshiftSchema(host=host, port=(int(port) if (port and port.isdigit()) else None), user=user, password=password, database=database, db_schema=db_schema)
connection_config.secrets = schema.dict()
connection_config.save(db=db)
(yield connection_config)
connection_config.delete(db) |
def find_omorfi(large_coverage=False) -> str:
dirs = ['/usr/local/share/omorfi/', '/usr/share/omorfi/']
if os.getenv('HOME'):
dirs += [(os.getenv('HOME') + '/.local/omorfi/')]
if os.getcwd():
cwd = os.getcwd()
dirs += [(cwd + '/src/generated/'), (cwd + '/generated/'), (cwd + '/')]
for d in dirs:
if large_coverage:
filename = ((d + '/') + 'omorfi.describe.hfst')
else:
filename = ((d + '/') + 'omorfi.analyse.hfst')
if os.path.isfile(filename):
return filename
raise FileNotFoundError() |
class CheckGradientAction(argparse.Action):
def __call__(self, parser, namespace, value, option_string=None):
if is_color(value):
setattr(namespace, self.dest.upper(), value)
return
if REGEXP_PARAM_STRING.match(value):
(is_valid, reason) = is_rgbgradient(value)
if is_valid:
setattr(namespace, self.dest.upper(), value)
return
raise argparse.ArgumentError(self, ('%s' % reason))
else:
raise argparse.ArgumentError(self, ("not a valid color or rgbgradient: '%s'" % value)) |
def convert_partial_iso_format_to_full_iso_format(partial_iso_format_time: str) -> str:
try:
date = datetime.fromisoformat(partial_iso_format_time)
time_zone_name = date.strftime('%Z')
time_zone = (tz.gettz(time_zone_name) if time_zone_name else tz.UTC)
date_with_timezone = date.replace(tzinfo=time_zone, microsecond=0)
return date_with_timezone.isoformat()
except ValueError:
logger.exception(f'Failed to covert time string: "{partial_iso_format_time}" to ISO format')
return partial_iso_format_time |
def check_template(slug: str, tests_path: Path, tmpfile: Path):
try:
check_ok = True
if (not tmpfile.is_file()):
logger.debug(f'{slug}: tmp file {tmpfile} not found')
check_ok = False
if (not tests_path.is_file()):
logger.debug(f'{slug}: tests file {tests_path} not found')
check_ok = False
if (check_ok and (not filecmp.cmp(tmpfile, tests_path))):
with tests_path.open() as f:
current_lines = f.readlines()[3:]
with tmpfile.open() as f:
rendered_lines = f.readlines()[3:]
diff = list(difflib.unified_diff(current_lines, rendered_lines, fromfile=f'[current] {tests_path.name}', tofile=f'[generated] {tmpfile.name}', lineterm='\n'))
if (not diff):
check_ok = True
else:
logger.debug(f'{slug}: ##### DIFF START #####')
for line in diff:
logger.debug(line.strip())
logger.debug(f'{slug}: ##### DIFF END #####')
check_ok = False
if (not check_ok):
logger.error(f'{slug}: check failed; tests must be regenerated with bin/generate_tests.py')
return False
logger.debug(f'{slug}: check passed')
finally:
logger.debug(f'{slug}: removing tmp file {tmpfile}')
tmpfile.unlink()
return True |
def main() -> None:
parser = argparse.ArgumentParser(prog='GPTtrace', description='Use ChatGPT to write eBPF programs (bpftrace, etc.)')
parser.add_argument('-c', '--cmd', help='Use the bcc tool to complete the trace task', nargs=2, metavar=('CMD_NAME', 'QUERY'))
parser.add_argument('-v', '--verbose', help='Show more details', action='store_true')
parser.add_argument('-k', '--key', help='Openai api key, see ` or passed through `OPENAI_API_KEY`', metavar='OPENAI_API_KEY')
parser.add_argument('input_string', type=str, help='Your question or request for a bpf program')
args = parser.parse_args()
if (os.getenv('OPENAI_API_KEY', args.key) is None):
print(f'Either provide your access token through `-k` or through environment variable {OPENAI_API_KEY}')
return
if (args.cmd is not None):
cmd(args.cmd[0], args.cmd[1], args.verbose)
elif (args.input_string is not None):
execute(args.input_string, args.verbose)
else:
parser.print_help() |
def lazy_import():
from fastly.model.backend_response import BackendResponse
from fastly.model.cache_setting_response import CacheSettingResponse
from fastly.model.condition_response import ConditionResponse
from fastly.model.director import Director
from fastly.model.domain_response import DomainResponse
from fastly.model.gzip_response import GzipResponse
from fastly.model.header_response import HeaderResponse
from fastly.model.healthcheck_response import HealthcheckResponse
from fastly.model.request_settings_response import RequestSettingsResponse
from fastly.model.response_object_response import ResponseObjectResponse
from fastly.model.schemas_snippet_response import SchemasSnippetResponse
from fastly.model.schemas_vcl_response import SchemasVclResponse
from fastly.model.version_detail_settings import VersionDetailSettings
globals()['BackendResponse'] = BackendResponse
globals()['CacheSettingResponse'] = CacheSettingResponse
globals()['ConditionResponse'] = ConditionResponse
globals()['Director'] = Director
globals()['DomainResponse'] = DomainResponse
globals()['GzipResponse'] = GzipResponse
globals()['HeaderResponse'] = HeaderResponse
globals()['HealthcheckResponse'] = HealthcheckResponse
globals()['RequestSettingsResponse'] = RequestSettingsResponse
globals()['ResponseObjectResponse'] = ResponseObjectResponse
globals()['SchemasSnippetResponse'] = SchemasSnippetResponse
globals()['SchemasVclResponse'] = SchemasVclResponse
globals()['VersionDetailSettings'] = VersionDetailSettings |
class DownBlock(nn.Module):
def __init__(self, opt, scale, n_feat=None, in_channels=None, out_channels=None):
super(DownBlock, self).__init__()
negval = opt.negval
if (n_feat is None):
n_feat = opt.n_feats
if (in_channels is None):
in_channels = opt.n_colors
if (out_channels is None):
out_channels = opt.n_colors
dual_block = [nn.Sequential(nn.Conv2d(in_channels, n_feat, kernel_size=3, stride=2, padding=1, bias=False), nn.LeakyReLU(negative_slope=negval, inplace=True))]
for _ in range(1, int(np.log2(scale))):
dual_block.append(nn.Sequential(nn.Conv2d(n_feat, n_feat, kernel_size=3, stride=2, padding=1, bias=False), nn.LeakyReLU(negative_slope=negval, inplace=True)))
dual_block.append(nn.Conv2d(n_feat, out_channels, kernel_size=3, stride=1, padding=1, bias=False))
self.dual_module = nn.Sequential(*dual_block)
def forward(self, x):
x = self.dual_module(x)
return x |
class ShutdownSignalHandler():
def __init__(self):
self.shutdown_count: int = 0
signal.signal(signal.SIGINT, self._on_signal_received)
signal.signal(signal.SIGTERM, self._on_signal_received)
def is_shutting_down(self):
return (self.shutdown_count > 0)
def _on_signal_received(self, signal, frame):
if (self.shutdown_count > 1):
logger.warn('Forcibly killing exporter')
sys.exit(1)
logger.info('Exporter is shutting down')
self.shutdown_count += 1 |
class AccessManualWebhooks(FidesSchema):
fields: ManualWebhookFieldsList
class Config():
orm_mode = True
('fields')
def check_for_duplicates(cls, value: List[ManualWebhookField]) -> List[ManualWebhookField]:
unique_pii_fields: Set[str] = {field.pii_field for field in value}
if (len(value) != len(unique_pii_fields)):
raise ValueError('pii_fields must be unique')
for field in value:
if (not field.dsr_package_label):
field.dsr_package_label = DSRLabelFieldType(to_snake_case(field.pii_field))
unique_dsr_package_labels: Set[Optional[str]] = {field.dsr_package_label for field in value}
if (len(value) != len(unique_dsr_package_labels)):
raise ValueError('dsr_package_labels must be unique')
return value
('fields')
def fields_must_exist(cls, value: List[ManualWebhookField]) -> List[ManualWebhookField]:
unique_pii_fields: Set[str] = {field.pii_field for field in value}
if (len(value) != len(unique_pii_fields)):
raise ValueError('pii_fields must be unique')
for field in value:
if (not field.dsr_package_label):
field.dsr_package_label = DSRLabelFieldType(to_snake_case(field.pii_field))
unique_dsr_package_labels: Set[Optional[str]] = {field.dsr_package_label for field in value}
if (len(value) != len(unique_dsr_package_labels)):
raise ValueError('dsr_package_labels must be unique')
return value |
class TestTransactionManager():
()
def consumer(self):
return Mock(name='consumer', spec=Consumer)
()
def producer(self):
return Mock(name='producer', spec=Producer, create_topic=AsyncMock(), stop_transaction=AsyncMock(), maybe_begin_transaction=AsyncMock(), commit_transactions=AsyncMock(), send=AsyncMock(), flush=AsyncMock())
()
def transport(self, *, app):
return Mock(name='transport', spec=Transport, app=app)
()
def manager(self, *, consumer, producer, transport):
return TransactionManager(transport, consumer=consumer, producer=producer)
.asyncio
async def test_on_partitions_revoked(self, *, manager):
manager.flush = AsyncMock()
(await manager.on_partitions_revoked({TP1}))
manager.flush.assert_called_once_with()
.asyncio
async def test_on_rebalance(self, *, manager):
TP3_group = 0
TP2_group = 2
manager.app.assignor._topic_groups = {TP3.topic: TP3_group, TP2.topic: TP2_group}
assert (TP3.topic != TP2.topic)
manager._stop_transactions = AsyncMock()
manager._start_transactions = AsyncMock()
assigned = {TP2}
revoked = {TP3}
newly_assigned = {TP2}
(await manager.on_rebalance(assigned, revoked, newly_assigned))
manager._stop_transactions.assert_called_once_with([f'{manager.app.conf.id}-{TP3_group}-{TP3.partition}'])
manager._start_transactions.assert_called_once_with([f'{manager.app.conf.id}-{TP2_group}-{TP2.partition}'])
(await manager.on_rebalance(set(), set(), set()))
.skip('Needs fixing')
.asyncio
async def test__stop_transactions(self, *, manager, producer):
tids = ['0-0', '1-0']
manager._start_new_producer = AsyncMock()
(await manager._stop_transactions(tids))
producer.stop_transaction.assert_called()
producer.stop_transaction.assert_called_once_with([call('0-0'), call('1-0')])
.skip('Needs fixing')
.asyncio
async def test_start_transactions(self, *, manager, producer):
tids = ['0-0', '1-0']
manager._start_new_producer = AsyncMock()
(await manager._start_transactions(tids))
producer.maybe_begin_transaction.assert_has_calls([call('0-0'), call('1-0')])
.asyncio
async def test_send(self, *, manager, producer):
manager.app.assignor._topic_groups = {'t': 3}
manager.consumer.key_partition.return_value = 1
(await manager.send('t', 'k', 'v', partition=None, headers=None, timestamp=None))
manager.consumer.key_partition.assert_called_once_with('t', 'k', None)
producer.send.assert_called_once_with('t', 'k', 'v', 1, None, None, transactional_id='testid-3-1')
.asyncio
async def test_send__topic_not_transactive(self, *, manager, producer):
manager.app.assignor._topic_groups = {'t': 3}
manager.consumer.key_partition.return_value = None
(await manager.send('t', 'k', 'v', partition=None, headers=None, timestamp=None))
manager.consumer.key_partition.assert_called_once_with('t', 'k', None)
producer.send.assert_called_once_with('t', 'k', 'v', None, None, None, transactional_id=None)
def test_send_soon(self, *, manager):
with pytest.raises(NotImplementedError):
manager.send_soon(Mock(name='FutureMessage'))
.asyncio
async def test_send_and_wait(self, *, manager):
on_send = Mock()
async def send(*args, **kwargs):
on_send(*args, **kwargs)
return done_future()
manager.send = send
(await manager.send_and_wait('t', 'k', 'v', 3, 43.2, {}))
on_send.assert_called_once_with('t', 'k', 'v', 3, 43.2, {})
.asyncio
async def test_commit(self, *, manager, producer):
manager.app.assignor._topic_groups = {'foo': 1, 'bar': 2}
(await manager.commit({TP('foo', 0): 3003, TP('bar', 0): 3004, TP('foo', 3): 4004, TP('foo', 1): 4005}, start_new_transaction=False))
producer.commit_transactions.assert_called_once_with({'testid-1-0': {TP('foo', 0): 3003}, 'testid-1-3': {TP('foo', 3): 4004}, 'testid-1-1': {TP('foo', 1): 4005}, 'testid-2-0': {TP('bar', 0): 3004}}, 'testid', start_new_transaction=False)
.asyncio
async def test_commit__empty(self, *, manager):
(await manager.commit({}, start_new_transaction=False))
def test_key_partition(self, *, manager):
with pytest.raises(NotImplementedError):
manager.key_partition('topic', 'key')
.asyncio
async def test_flush(self, *, manager, producer):
(await manager.flush())
producer.flush.assert_called_once_with()
.asyncio
async def test_create_topic(self, *, manager):
(await manager.create_topic(topic='topic', partitions=100, replication=3, config={'C': 1}, timeout=30.0, retention=40.0, compacting=True, deleting=True, ensure_created=True))
manager.producer.create_topic.assert_called_once_with('topic', 100, 3, config={'C': 1}, timeout=30.0, retention=40.0, compacting=True, deleting=True, ensure_created=True)
def test_supports_headers(self, *, manager):
ret = manager.supports_headers()
assert (ret is manager.producer.supports_headers.return_value) |
class THBEventDispatcher(EventDispatcher):
game: 'THBattle'
def populate_handlers(self) -> List[EventHandler]:
from thb.actions import COMMON_EVENT_HANDLERS
g = self.game
ehclasses = (list(COMMON_EVENT_HANDLERS) + list(g.game_ehs))
for c in getattr(g, 'players', ()):
ehclasses.extend(c.eventhandlers)
return EventHandler.make_list(g, ehclasses) |
def lincomb(numbers, factors, adder=(lambda x, y: (x + y)), zero=0):
maxbitlen = max(((len(bin(f)) - 2) for f in factors))
subsets = [{i for i in range(len(numbers)) if (factors[i] & (1 << j))} for j in range((maxbitlen + 1))]
subset_sums = multisubset2(numbers, subsets, adder=adder, zero=zero)
o = zero
for i in range((len(subsets) - 1), (- 1), (- 1)):
o = adder(adder(o, o), subset_sums[i])
return o |
class Components():
def __init__(self, page: primitives.PageModel):
self.page = page
if (self.page.ext_packages is None):
self.page.ext_packages = {}
self.page.icons.add('bootstrap-icons', PkgImports.BOOTSTRAP)
self.page.imports.pkgs.bootstrap.version = '5.1.0'
self.page.jsImports.add('bootstrap')
self.page.cssImport.add('bootstrap')
self.select = self.lists.select
self.slider = self.sliders.slider
self.button = self.buttons.button
self.check = self.buttons.check
self.toggle = self.buttons.toggle
self.icon = self.icons.icon
self.table = self.tables.basic
self.grid = self.layouts.grid
self.row = self.layouts.row
self.col = self.layouts.col
self.div = self.layouts.container
def date(self, value: str=None, width: types.SIZE_TYPE=(None, 'px'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, profile: types.PROFILE_TYPE=None, options: dict=None) -> HtmlBsDate.BsDatePicker:
width = Arguments.size(width, unit='px')
height = Arguments.size(height, unit='px')
datepicker = HtmlBsDate.BsDatePicker(self.page, None, html_code, (options or {}), profile, {'width': width, 'height': height})
datepicker.options.formats.date_only()
if (value is not None):
datepicker.options.date = self.page.js.moment.new(value)
else:
datepicker.options.date = self.page.js.moment.now()
datepicker.options.buttons.showToday = True
datepicker.options.buttons.showClose = True
return datepicker
def time(self, hour: int=None, minute: int=0, second: int=0, width: types.SIZE_TYPE=(None, 'px'), height: types.SIZE_TYPE=(None, 'px'), html_code: str=None, profile: types.PROFILE_TYPE=None, options=None) -> HtmlBsDate.BsDatePicker:
width = Arguments.size(width, unit='px')
height = Arguments.size(height, unit='px')
timepicker = HtmlBsDate.BsDatePicker(self.page, None, html_code, (options or {}), profile, {'width': width, 'height': height})
if (hour is not None):
timepicker.options.date = self.page.js.moment.time(hour, minute, second)
else:
timepicker.options.date = self.page.js.moment.now()
timepicker.options.formats.time_only()
return timepicker
def loading(self, text: str='Loading...', width: types.SIZE_TYPE=(None, '%'), height: types.SIZE_TYPE=(None, '%'), category=None, options: dict=None, profile: types.PROFILE_TYPE=None):
options = (options or {})
component = self.page.web.std.div(width=width, height=height, profile=profile)
component.attr['class'].initialise([('spinner-%s' % options.get('kind', 'border'))])
if (category is not None):
component.attr['class'].add(('text-%s' % category))
component.attr['role'] = 'status'
component.span = self.page.web.std.texts.span(text)
if options.get('visible', False):
component.span.attr['class'].clear()
else:
component.span.attr['class'].initialise(['visually-hidden'])
return component
def icons(self) -> groups.BsCompIcons.Components:
self.page.cssImport.add('bootstrap-icons')
return groups.BsCompIcons.Components(self)
def images(self) -> groups.BsCompImages.Components:
return groups.BsCompImages.Components(self)
def fields(self) -> groups.BsCompFields.Components:
return groups.BsCompFields.Components(self)
def texts(self) -> groups.BsCompFields.TextComponents:
return groups.BsCompFields.TextComponents(self)
def tables(self) -> groups.BsCompTables.Components:
return groups.BsCompTables.Components(self)
def lists(self) -> groups.BsCompLists.Components:
return groups.BsCompLists.Components(self)
def buttons(self) -> groups.BsCompBtns.Components:
return groups.BsCompBtns.Components(self)
def toasts(self):
return groups.BsCompToasts.Components(self)
def sliders(self):
return groups.BsCompSliders.Components(self)
def inputs(self) -> groups.BsCompInputs.Components:
return groups.BsCompInputs.Components(self)
def alerts(self) -> groups.BsCompAlerts.Components:
return groups.BsCompAlerts.Components(self)
def modals(self) -> groups.BsCompModals.Components:
return groups.BsCompModals.Components(self)
def offcanvas(self) -> groups.BsCompModals.OffComponents:
return groups.BsCompModals.OffComponents(self)
def navbars(self) -> groups.BsCompNavs.Components:
return groups.BsCompNavs.Components(self)
def panels(self) -> groups.BsCompPanels.Components:
return groups.BsCompPanels.Components(self)
def layouts(self) -> groups.BsCompLayouts.Components:
return groups.BsCompLayouts.Components(self)
def accordion(self, values=None, html_code: str=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, '%'), profile: types.PROFILE_TYPE=None, options: dict=None) -> HtmlBsWidgets.BsAccordion:
width = Arguments.size(width, unit='px')
height = Arguments.size(height, unit='px')
component = HtmlBsWidgets.BsAccordion(self.page, None, html_code, (options or {}), profile, {'width': width, 'height': height})
if (values is not None):
for (k, v) in reversed(list(values.items())):
component.add_section(k, v)
return component
def breadcrumb(self, values: list=None, active: str=None, html_code: str=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, '%'), profile: types.PROFILE_TYPE=None, options: dict=None) -> HtmlBsWidgets.BsBreadcrumb:
width = Arguments.size(width, unit='px')
height = Arguments.size(height, unit='px')
component = HtmlBsWidgets.BsBreadcrumb(self.page, None, html_code, (options or {}), profile, {'width': width, 'height': height})
if (values is not None):
for v in values:
component.add_section(v, active=(v == active))
return component
def offcanva(self, values: list=None, position: str='start', html_code: str=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, '%'), profile: types.PROFILE_TYPE=None, options: dict=None) -> HtmlBsWidgets.BsOffCanvas:
width = Arguments.size(width, unit='px')
height = Arguments.size(height, unit='px')
component = HtmlBsWidgets.BsOffCanvas(self.page, None, html_code, (options or {}), profile, {'width': width, 'height': height})
component.add_style([('offcanvas-%s' % position)])
component.attr['aria-labelledby'] = 'offcanvasLabel'
component.attr['tabindex'] = '-1'
component.add_to_header(self.page.web.bs.offcanvas.dismiss(component))
if (values is not None):
for v in values:
component.add_to_body(v)
return component
def modal(self, values: dict=None, html_code: str=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, '%'), profile: types.PROFILE_TYPE=None, options: dict=None) -> HtmlBsWidgets.BsModal:
width = Arguments.size(width, unit='px')
height = Arguments.size(height, unit='px')
component = HtmlBsWidgets.BsModal(self.page, None, html_code, (options or {}), profile, {'width': width, 'height': height})
if (values is not None):
for v in values:
component.add_to_body(v)
component.attr['tabindex'] = '-1'
component.options.fade = True
return component
def navbar(self, values=None, html_code: str=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, '%'), profile: types.PROFILE_TYPE=None, options: dict=None) -> HtmlBsWidgets.BsNavBar:
width = Arguments.size(width, unit='px')
height = Arguments.size(height, unit='px')
component = HtmlBsWidgets.BsNavBar(self.page, None, html_code, (options or {}), profile, {'width': width, 'height': height})
component.attr['aria-labelledby'] = 'offcanvasLabel'
component.attr['tabindex'] = '-1'
component.add_to_header(self.page.web.bs.offcanvas.dismiss(component))
if (values is not None):
for v in values:
component.add_to_body(v)
return component
def scrollspy(self, values=None, html_code: str=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, '%'), profile: types.PROFILE_TYPE=None, options: dict=None):
pass
def toast(self, values: List[primitives.HtmlModel]=None, html_code: str=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, '%'), profile: types.PROFILE_TYPE=None, options: dict=None) -> HtmlBsWidgets.BsToast:
width = Arguments.size(width, unit='px')
height = Arguments.size(height, unit='px')
component = HtmlBsWidgets.BsToast(self.page, None, html_code, (options or {}), profile, {'width': width, 'height': height})
component.attr['role'] = 'alert'
component.aria.live = 'assertive'
component.aria.atomic = 'true'
if (values is not None):
for v in values:
component.add_to_body(v)
return component |
def test_defaults_legacy():
html = '\n<input type="text" name="foo" value="bar" />\n<input type="text" name="foo" value="biz" />\n<input type="text" name="foo" value="bash" />\n'
expected_html = '\n<input type="text" name="foo" value="bang" />\n<input type="text" name="foo" value="bang" />\n<input type="text" name="foo" value="bang" />\n'
rendered_html = htmlfill.render(html, defaults={'foo': 'bang'}, force_defaults=True)
assert (expected_html == rendered_html) |
def main():
global_clock_sources = ClockSources()
cmt_clock_sources = ClockSources()
cmt_fast_clock_sources = ClockSources(4)
bufr_clock_sources = ClockSources()
bufio_clock_sources = ClockSources()
site_to_cmt = dict(read_site_to_cmt())
clock_region_limit = dict()
clock_region_serdes_location = dict()
db = Database(util.get_db_root(), util.get_part())
grid = db.grid()
def gen_sites(desired_site_type):
for tile_name in sorted(grid.tiles()):
loc = grid.loc_of_tilename(tile_name)
gridinfo = grid.gridinfo_at_loc(loc)
for (site, site_type) in gridinfo.sites.items():
if (site_type == desired_site_type):
(yield (tile_name, site))
def serdes_relative_location(tile, site):
(serdes_loc_x, serdes_loc_y) = grid.loc_of_tilename(tile)
serdes_clk_reg = site_to_cmt[site]
for tile_name in sorted(grid.tiles()):
if ('HCLK_IOI3' in tile_name):
(hclk_tile_loc_x, hclk_tile_loc_y) = grid.loc_of_tilename(tile_name)
if (hclk_tile_loc_x == serdes_loc_x):
gridinfo = grid.gridinfo_at_loc((hclk_tile_loc_x, hclk_tile_loc_y))
random_site = next(iter(gridinfo.sites.keys()))
hclk_clk_reg = site_to_cmt[random_site]
if (hclk_clk_reg == serdes_clk_reg):
if (serdes_loc_y < hclk_tile_loc_y):
return 'TOP'
elif (serdes_loc_y > hclk_tile_loc_y):
return 'BOTTOM'
else:
assert False
clock_region_sites = set()
def get_clock_region_site(site_type, clk_reg):
for (site_name, reg) in site_to_cmt.items():
if (site_name.startswith(site_type) and (reg in clk_reg)):
if (site_name not in clock_region_sites):
clock_region_sites.add(site_name)
return site_name
print('\nmodule top();\n (* KEEP, DONT_TOUCH *)\n LUT6 dummy();\n ')
luts = LutMaker()
bufs = StringIO()
for (_, site) in gen_sites('MMCME2_ADV'):
mmcm_clocks = ['mmcm_clock_{site}_{idx}'.format(site=site, idx=idx) for idx in range(13)]
for (idx, clk) in enumerate(mmcm_clocks):
if (idx < 4):
cmt_fast_clock_sources.add_clock_source(clk, site_to_cmt[site])
else:
cmt_clock_sources.add_clock_source(clk, site_to_cmt[site])
print('\n wire cin1_{site}, cin2_{site}, clkfbin_{site}, {c0}, {c1}, {c2}, {c3}, {c4}, {c5};\n (* KEEP, DONT_TOUCH, LOC = "{site}" *)\n MMCME2_ADV pll_{site} (\n .CLKIN1(cin1_{site}),\n .CLKIN2(cin2_{site}),\n .CLKFBIN(clkfbin_{site}),\n .CLKOUT0({c0}),\n .CLKOUT0B({c4}),\n .CLKOUT1({c1}),\n .CLKOUT1B({c5}),\n .CLKOUT2({c2}),\n .CLKOUT2B({c6}),\n .CLKOUT3({c3}),\n .CLKOUT3B({c7}),\n .CLKOUT4({c8}),\n .CLKOUT5({c9}),\n .CLKOUT6({c10}),\n .CLKFBOUT({c11}),\n .CLKFBOUTB({c12})\n );\n '.format(site=site, c0=mmcm_clocks[0], c1=mmcm_clocks[1], c2=mmcm_clocks[2], c3=mmcm_clocks[3], c4=mmcm_clocks[4], c5=mmcm_clocks[5], c6=mmcm_clocks[6], c7=mmcm_clocks[7], c8=mmcm_clocks[8], c9=mmcm_clocks[9], c10=mmcm_clocks[10], c11=mmcm_clocks[11], c12=mmcm_clocks[12]))
for (_, site) in gen_sites('PLLE2_ADV'):
pll_clocks = ['pll_clock_{site}_{idx}'.format(site=site, idx=idx) for idx in range(7)]
for clk in pll_clocks:
cmt_clock_sources.add_clock_source(clk, site_to_cmt[site])
print('\n wire cin1_{site}, cin2_{site}, clkfbin_{site}, {c0}, {c1}, {c2}, {c3}, {c4}, {c5}, {c6};\n (* KEEP, DONT_TOUCH, LOC = "{site}" *)\n PLLE2_ADV pll_{site} (\n .CLKIN1(cin1_{site}),\n .CLKIN2(cin2_{site}),\n .CLKFBIN(clkfbin_{site}),\n .CLKOUT0({c0}),\n .CLKOUT1({c1}),\n .CLKOUT2({c2}),\n .CLKOUT3({c3}),\n .CLKOUT4({c4}),\n .CLKOUT5({c5}),\n .CLKFBOUT({c6})\n );\n '.format(site=site, c0=pll_clocks[0], c1=pll_clocks[1], c2=pll_clocks[2], c3=pll_clocks[3], c4=pll_clocks[4], c5=pll_clocks[5], c6=pll_clocks[6]))
for (tile_name, site) in gen_sites('BUFHCE'):
print('\n wire I_{site};\n wire O_{site};\n (* KEEP, DONT_TOUCH, LOC = "{site}" *)\n BUFHCE buf_{site} (\n .I(I_{site}),\n .O(O_{site})\n );'.format(site=site), file=bufs)
global_clock_sources.add_clock_source('O_{site}'.format(site=site), site_to_cmt[site])
hclks_used_by_clock_region = {}
for cmt in site_to_cmt.values():
hclks_used_by_clock_region[cmt] = set()
def check_hclk_src(src, src_cmt):
if ((len(hclks_used_by_clock_region[src_cmt]) >= 12) and (src not in hclks_used_by_clock_region[src_cmt])):
return None
else:
hclks_used_by_clock_region[src_cmt].add(src)
return src
cmt_clks_used_by_clock_region = {}
for cmt in site_to_cmt.values():
cmt_clks_used_by_clock_region[cmt] = list()
def check_cmt_clk_src(src, src_clock_region):
print('//src: {}, clk_reg: {}, len {}'.format(src, src_clock_region, len(cmt_clks_used_by_clock_region[src_clock_region])))
if (len(cmt_clks_used_by_clock_region[src_clock_region]) >= 4):
return None
else:
cmt_clks_used_by_clock_region[src_clock_region].append(src)
return src
idelayctrl_in_clock_region = {}
for cmt in site_to_cmt.values():
idelayctrl_in_clock_region[cmt] = False
for (_, site) in gen_sites('IDELAYCTRL'):
if (random.random() < 0.5):
wire_name = global_clock_sources.get_random_source(site_to_cmt[site], no_repeats=False)
if (wire_name is None):
continue
src_cmt = global_clock_sources.source_to_cmt[wire_name]
wire_name = check_hclk_src(wire_name, src_cmt)
if (wire_name is None):
continue
idelayctrl_in_clock_region[src_cmt] = True
print('\n assign I_{site} = {clock_source};\n (* KEEP, DONT_TOUCH, LOC = "{site}" *)\n IDELAYCTRL idelay_ctrl_{site} (\n .RDY(),\n .REFCLK(I_{site}),\n .RST()\n );'.format(site=site, clock_source=wire_name))
for (tile, site) in gen_sites('ILOGICE3'):
wire_name = None
clock_region = site_to_cmt[site]
if (clock_region not in clock_region_limit):
serdes_location = random.choice(['TOP', 'BOTTOM', 'ANY'])
if (serdes_location in 'ANY'):
if idelayctrl_in_clock_region[clock_region]:
clock_region_limit[clock_region] = (0 if (random.random() < 0.2) else 11)
else:
clock_region_limit[clock_region] = (0 if (random.random() < 0.2) else 12)
elif idelayctrl_in_clock_region[clock_region]:
clock_region_limit[clock_region] = (0 if (random.random() < 0.2) else 5)
else:
clock_region_limit[clock_region] = (0 if (random.random() < 0.2) else 6)
clock_region_serdes_location[clock_region] = serdes_location
if (clock_region_limit[clock_region] == 0):
continue
if ((clock_region_serdes_location[clock_region] not in 'ANY') and (serdes_relative_location(tile, site) != clock_region_serdes_location[clock_region])):
continue
if (random.random() > 0.1):
wire_name = global_clock_sources.get_random_source(site_to_cmt[site], no_repeats=True)
if (wire_name is None):
continue
src_cmt = global_clock_sources.source_to_cmt[wire_name]
wire_name = check_hclk_src(wire_name, src_cmt)
if (wire_name is None):
print('//wire is None')
continue
clock_region_limit[clock_region] -= 1
print('\n assign serdes_clk_{site} = {clock_source};'.format(site=site, clock_source=wire_name))
else:
wire_name = cmt_fast_clock_sources.get_random_source(site_to_cmt[site], no_repeats=False)
if (wire_name is None):
continue
src_cmt = cmt_fast_clock_sources.source_to_cmt[wire_name]
wire_name = check_cmt_clk_src(wire_name, src_cmt)
if (wire_name is None):
continue
bufio_site = get_clock_region_site('BUFIO', clock_region)
if (bufio_site is None):
continue
print('\n assign serdes_clk_{serdes_loc} = O_{site};\n assign I_{site} = {clock_source};\n (* KEEP, DONT_TOUCH, LOC = "{site}" *)\n BUFIO bufio_{site} (\n .O(O_{site}),\n .I(I_{site})\n );'.format(site=bufio_site, clock_source=wire_name, serdes_loc=site))
print('// clock_region: {} {}'.format(clock_region, clock_region_serdes_location[clock_region]))
print('\n (* KEEP, DONT_TOUCH, LOC = "{loc}" *)\n ISERDESE2 #(\n .DATA_RATE("SDR"),\n .DATA_WIDTH(4),\n .DYN_CLKDIV_INV_EN("FALSE"),\n .DYN_CLK_INV_EN("FALSE"),\n .INIT_Q1(1\'b0),\n .INIT_Q2(1\'b0),\n .INIT_Q3(1\'b0),\n .INIT_Q4(1\'b0),\n .INTERFACE_TYPE("OVERSAMPLE"),\n .IOBDELAY("NONE"),\n .NUM_CE(2),\n .OFB_USED("FALSE"),\n .SERDES_MODE("MASTER"),\n .SRVAL_Q1(1\'b0),\n .SRVAL_Q2(1\'b0),\n .SRVAL_Q3(1\'b0),\n .SRVAL_Q4(1\'b0)\n )\n ISERDESE2_inst_{loc} (\n .CLK(serdes_clk_{loc}),\n .CLKB(),\n .CLKDIV(),\n .D(1\'b0),\n .DDLY(),\n .OFB(),\n .OCLKB(),\n .RST(),\n .SHIFTIN1(),\n .SHIFTIN2()\n );\n '.format(loc=site, clock_source=wire_name))
for (_, site) in gen_sites('BUFR'):
if (random.random() < 0.6):
if (random.random() < 0.5):
wire_name = luts.get_next_output_net()
else:
wire_name = cmt_fast_clock_sources.get_random_source(site_to_cmt[site], no_repeats=False)
if (wire_name is None):
continue
src_cmt = cmt_fast_clock_sources.source_to_cmt[wire_name]
wire_name = check_cmt_clk_src(wire_name, src_cmt)
if (wire_name is None):
continue
bufr_clock_sources.add_clock_source('O_{site}'.format(site=site), site_to_cmt[site])
divide = 'BYPASS'
if (random.random() < 0.5):
divide = ''.format(random.randint(2, 8))
print('\n assign I_{site} = {clock_source};\n (* KEEP, DONT_TOUCH, LOC = "{site}" *)\n BUFR #(.BUFR_DIVIDE("{divide}")) bufr_{site} (\n .O(O_{site}),\n .I(I_{site})\n );'.format(site=site, clock_source=wire_name, divide=divide), file=bufs)
for (_, site) in gen_sites('MMCME2_ADV'):
wire_name = bufr_clock_sources.get_random_source(site_to_cmt[site], no_repeats=True)
if (wire_name is None):
continue
print('\n assign cin1_{site} = {wire_name};'.format(site=site, wire_name=wire_name))
print(bufs.getvalue())
for l in luts.create_wires_and_luts():
print(l)
print('endmodule') |
def test_strict_bytes_type_checking_turns_on_and_off(w3):
assert w3.strict_bytes_type_checking
assert (not w3.is_encodable('bytes2', b'\x01'))
w3.strict_bytes_type_checking = False
assert (not w3.strict_bytes_type_checking)
assert w3.is_encodable('bytes2', b'\x01')
w3.strict_bytes_type_checking = True
assert w3.strict_bytes_type_checking
assert (not w3.is_encodable('bytes2', b'\x01')) |
class OptionPlotoptionsFunnel3dDatalabels(Options):
def align(self):
return self._config_get('right')
def align(self, text: str):
self._config(text, js_type=False)
def allowOverlap(self):
return self._config_get(False)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self) -> 'OptionPlotoptionsFunnel3dDatalabelsAnimation':
return self._config_sub_data('animation', OptionPlotoptionsFunnel3dDatalabelsAnimation)
def backgroundColor(self):
return self._config_get(None)
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(0)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def crop(self):
return self._config_get(False)
def crop(self, flag: bool):
self._config(flag, js_type=False)
def defer(self):
return self._config_get(True)
def defer(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def filter(self) -> 'OptionPlotoptionsFunnel3dDatalabelsFilter':
return self._config_sub_data('filter', OptionPlotoptionsFunnel3dDatalabelsFilter)
def format(self):
return self._config_get('point.value')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get(None)
def formatter(self, value: Any):
self._config(value, js_type=False)
def inside(self):
return self._config_get(False)
def inside(self, flag: bool):
self._config(flag, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, flag: bool):
self._config(flag, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def overflow(self):
return self._config_get('allow')
def overflow(self, text: str):
self._config(text, js_type=False)
def padding(self):
return self._config_get(5)
def padding(self, num: float):
self._config(num, js_type=False)
def position(self):
return self._config_get('center')
def position(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def shape(self):
return self._config_get('square')
def shape(self, text: str):
self._config(text, js_type=False)
def style(self):
return self._config_get(None)
def style(self, value: Any):
self._config(value, js_type=False)
def textPath(self) -> 'OptionPlotoptionsFunnel3dDatalabelsTextpath':
return self._config_sub_data('textPath', OptionPlotoptionsFunnel3dDatalabelsTextpath)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('undefined')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get('undefined')
def y(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(6)
def zIndex(self, num: float):
self._config(num, js_type=False) |
class CheckMeasureDefinitionsTests(TestCase):
def setUpTestData(cls):
create_import_log()
set_up_bq()
upload_presentations()
def test_check_definition(self):
upload_dummy_prescribing(['0703021Q0AAAAAA', '0703021Q0BBAAAA'])
with patched_global_matrixstore_from_data_factory(build_factory()):
call_command('import_measures', measure='desogestrel', check=True)
_settings(MEASURE_DEFINITIONS_PATH=os.path.join(settings.MEASURE_DEFINITIONS_PATH, 'bad', 'json'))
def test_check_definition_bad_json(self):
with self.assertRaises(ValueError) as command_error:
call_command('import_measures', check=True)
self.assertIn('Problems parsing JSON', str(command_error.exception))
_settings(MEASURE_DEFINITIONS_PATH=os.path.join(settings.MEASURE_DEFINITIONS_PATH, 'bad', 'sql'))
def test_check_definition_bad_sql(self):
with self.assertRaises(BadRequest) as command_error:
call_command('import_measures', check=True)
self.assertIn('SQL error', str(command_error.exception)) |
def knapsack_top_down_alt(items, total_weight):
if ((items is None) or (total_weight is None)):
raise TypeError('input_items or total_weight cannot be None')
if ((not items) or (not total_weight)):
return 0
memo = {}
result = _knapsack_top_down_alt(items, total_weight, memo, index=0)
curr_item = result.item
curr_weight = curr_item.weight
picked_items = [curr_item]
while (curr_weight > 0):
total_weight -= curr_item.weight
curr_item = memo[(total_weight, (len(items) - len(picked_items)))].item
return result |
def hsluv_to_luv(hsluv: Vector) -> Vector:
(h, s, l) = hsluv
c = 0.0
if (l > (100 - 1e-07)):
l = 100.0
elif (l < 1e-08):
l = 0.0
else:
_hx_max = max_chroma_for_lh(l, h)
c = ((_hx_max / 100.0) * s)
(a, b) = alg.polar_to_rect(c, h)
return [l, a, b] |
def get_archlinux_aur_helper():
command = None
for helper in ['paru', 'pacaur', 'yay', 'yaourt', 'aura']:
if which(helper):
command = helper
break
if command:
return command
else:
print("Please install one of AUR's helper, such as 'pacaur', 'yay', 'yaourt', 'paru', etc.", file=sys.stderr)
sys.exit(1) |
def upgrade():
op.add_column('session', sa.Column('submission_date', sa.DateTime(), nullable=True))
op.drop_column('session', 'date_of_submission')
op.add_column('session_version', sa.Column('submission_date', sa.DateTime(), autoincrement=False, nullable=True))
op.drop_column('session_version', 'date_of_submission') |
def test_detect_invalid_initial_magic(tmpdir: Path):
filepath = (tmpdir / 'invalid_magic.mcap')
with open(filepath, 'w') as f:
f.write('some bytes longer than the initial magic bytes')
with open(filepath, 'rb') as f:
with pytest.raises(InvalidMagic):
SeekingReader(f)
with open(filepath, 'rb') as f:
with pytest.raises(InvalidMagic):
NonSeekingReader(f).get_header() |
def get_next_page(html, url):
if is_project(url):
page = int(parse_qs(urlparse(url).query)['page'][0])
total_page = math.ceil((json.loads(html)['total_count'] / EP_PER_PAGE))
return (update_qs(url, {'page': (page + 1)}) if (page < total_page) else None)
if is_user_home(url):
user = re.search('www\\.artstation\\.com/([^/]+)', url).group(1)
return urljoin(url, '/users/{user}/projects.json?page=1'.format(user=user)) |
class Solution():
def search(self, nums: List[int], target: int) -> bool:
(l, r) = (0, (len(nums) - 1))
while (l <= r):
mid = (l + ((r - l) // 2))
if (nums[mid] == target):
return True
while ((l < mid) and (nums[l] == nums[mid])):
l += 1
if (nums[l] <= nums[mid]):
if (nums[l] <= target < nums[mid]):
r = (mid - 1)
else:
l = (mid + 1)
elif (nums[mid] < target <= nums[r]):
l = (mid + 1)
else:
r = (mid - 1)
return False |
def generate_fixture_tests(metafunc: Any, base_fixture_path: str, filter_fn: Callable[(..., Any)]=identity, preprocess_fn: Callable[(..., Any)]=identity) -> None:
if ('fixture_data' in metafunc.fixturenames):
all_fixtures = find_fixtures(base_fixture_path)
if (not all_fixtures):
raise AssertionError(f'Suspiciously found zero fixtures: {base_fixture_path}')
filtered_fixtures = filter_fn(preprocess_fn(all_fixtures))
metafunc.parametrize('fixture_data', filtered_fixtures, ids=idfn) |
def upgrade():
op.create_table('settings', sa.Column('id', sa.Integer(), nullable=False), sa.Column('aws_key', sa.String(), nullable=True), sa.Column('aws_secret', sa.String(), nullable=True), sa.Column('aws_bucket_name', sa.String(), nullable=True), sa.Column('google_client_id', sa.String(), nullable=True), sa.Column('google_client_secret', sa.String(), nullable=True), sa.Column('fb_client_id', sa.String(), nullable=True), sa.Column('fb_client_secret', sa.String(), nullable=True), sa.Column('sendgrid_key', sa.String(), nullable=True), sa.Column('secret', sa.String(), nullable=True), sa.PrimaryKeyConstraint('id')) |
.django_db
def test_signals__encoding_failed(monkeypatch, mocker, local_video: models.Video) -> None:
encoding_format = tasks.settings.VIDEO_ENCODING_FORMATS['FFmpeg'][0]
monkeypatch.setattr(tasks.settings, 'VIDEO_ENCODING_FORMATS', {'FFmpeg': [encoding_format]})
mocker.patch.object(tasks, '_encode', side_effect=VideoEncodingError())
listener = mocker.MagicMock()
signals.format_started.connect(listener)
signals.format_finished.connect(listener)
tasks.convert_video(local_video.file)
assert (listener.call_count == 2)
(_, kwargs) = listener.call_args_list[0]
assert matches(kwargs, {'signal': signals.format_started, ...: ...})
(_, kwargs) = listener.call_args_list[1]
assert matches(kwargs, {'signal': signals.format_finished, 'sender': models.Format, 'instance': local_video, 'format': ..., 'result': signals.ConversionResult.FAILED})
assert isinstance(kwargs['format'], models.Format)
assert (kwargs['format'].format == encoding_format['name']) |
.feature('unit')
.story('payload_builder')
class TestPayloadBuilderCreate():
def test_insert_payload(self):
res = PayloadBuilder().INSERT(key='x').payload()
assert (_payload('data/payload_insert1.json') == json.loads(res))
def test_insert_into_payload(self):
res = PayloadBuilder().INSERT_INTO('test').payload()
assert (_payload('data/payload_from1.json') == json.loads(res)) |
def extractItsladygreyWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
('/mod_board/<board:board>/log')
('/mod_board/<board:board>/log/<int(max=14):page>')
def mod_board_log(board: BoardModel, page=0):
per_page = 100
pages = 15
moderator = request_moderator()
logs = moderator_service.user_get_logs(moderator, board, page, per_page)
def get_log_type(typeid):
try:
return ModeratorLogType(typeid).name
except ValueError:
return ''
return render_template('mod_board_log.html', board=board, page=page, pages=pages, logs=logs, get_log_type=get_log_type) |
def upgrade():
op.add_column('video_stream_moderators', sa.Column('email', sa.String(), nullable=False))
op.create_unique_constraint('uq_user_email_video_stream_moderator', 'video_stream_moderators', ['email', 'video_stream_id'])
op.drop_constraint('user_video_stream_id', 'video_stream_moderators', type_='unique')
op.drop_constraint('video_stream_moderators_user_id_fkey', 'video_stream_moderators', type_='foreignkey')
op.drop_column('video_stream_moderators', 'user_id') |
def main():
parser = argparse.ArgumentParser(description='Convert FPGA configuration description ("FPGA assembly") into binary frame equivalent')
util.db_root_arg(parser)
util.part_arg(parser)
parser.add_argument('--sparse', action='store_true', help="Don't zero fill all frames")
parser.add_argument('--roi', help='ROI design.json file defining which tiles are within the ROI.')
parser.add_argument('--emit_pudc_b_pullup', help='Emit an IBUF and PULLUP on the PUDC_B pin if unused', action='store_true')
parser.add_argument('--debug', action='store_true', help='Print debug dump')
parser.add_argument('fn_in', help='Input FPGA assembly (.fasm) file')
parser.add_argument('fn_out', default='/dev/stdout', nargs='?', help='Output FPGA frame (.frm) file')
args = parser.parse_args()
run(db_root=args.db_root, part=args.part, filename_in=args.fn_in, f_out=open(args.fn_out, 'w'), sparse=args.sparse, roi=args.roi, debug=args.debug, emit_pudc_b_pullup=args.emit_pudc_b_pullup) |
class Highlights(MixHtmlState.HtmlStates, Html.Html):
name = 'Highlights'
tag = 'div'
requirements = ('bootstrap',)
_option_cls = OptText.OptionsHighlights
def __init__(self, page: primitives.PageModel, text, title, icon, type, color, width, height, html_code, helper, options, profile):
super(Highlights, self).__init__(page, text, css_attrs={'width': width, 'height': height}, html_code=html_code, profile=profile, options=options)
self.add_helper(helper)
self.style.css.color = (color if (color is not None) else self.page.theme.greys[(- 1)])
self.add_title(title, css={'width': 'none', 'font-weight': 'bold', 'margin-top': 0}, options={'content_table': False})
self.add_icon(icon, {'float': 'left', 'color': 'inherit'}, html_code=self.htmlCode, family=options.get('icon_family'))
if ((self.icon is not None) and (self.icon != '')):
self.icon.style.css.font_factor(2)
self.css({'margin': '5px 0', 'padding': '5px', 'min-height': '25px'})
self.attr['class'].add(('alert alert-%s' % type))
self.set_attrs(name='role', value='alert')
self.dom.display_value = 'block'
def options(self) -> OptText.OptionsHighlights:
return super().options
def __str__(self):
self.onReady([self.dom.setAttribute('data-content', self.dom.content)])
val = (self.page.py.markdown.all(self.val) if (self.options.showdown is not False) else self.val)
if self.options.close:
return ("<%s %s>\n<span aria-hidden='true' style='float:right;font-size:20px;cursor:pointer' onclick='this.parentNode.remove()'>×</span>\n<div name='content'>%s</div></%s>%s" % (self.tag, self.get_attrs(css_class_names=self.style.get_classes()), val, self.tag, self.helper))
return ("<%s %s><div name='content'>%s</div></%s>%s\n " % (self.tag, self.get_attrs(css_class_names=self.style.get_classes()), val, self.tag, self.helper)) |
class Immutable():
__slots__: tuple[(Any, ...)] = ()
def __init__(self, **kwargs: Any) -> None:
for (k, v) in kwargs.items():
super(Immutable, self).__setattr__(k, v)
def __setattr__(self, name: str, value: Any) -> None:
raise AttributeError('Class is immutable!') |
def moyle_processor(df) -> list:
datapoints = []
for (index, row) in df.iterrows():
snapshot = {}
snapshot['datetime'] = add_default_tz(parser.parse(row['TimeStamp'], dayfirst=True))
snapshot['netFlow'] = row['Total_Moyle_Load_MW']
snapshot['source'] = 'soni.ltd.uk'
snapshot['sortedZoneKeys'] = 'GB->GB-NIR'
datapoints.append(snapshot)
return datapoints |
def main():
global_config = config['Global']
post_process_class = build_post_process(config['PostProcess'], global_config)
model = build_model(config['Architecture'])
load_model(config, model)
transforms = []
for op in config['Eval']['dataset']['transforms']:
op_name = list(op)[0]
if ('Label' in op_name):
continue
elif (op_name == 'KeepKeys'):
op[op_name]['keep_keys'] = ['image']
transforms.append(op)
global_config['infer_mode'] = True
ops = create_operators(transforms, global_config)
model.eval()
for file in get_image_file_list(config['Global']['infer_img']):
logger.info('infer_img: {}'.format(file))
with open(file, 'rb') as f:
img = f.read()
data = {'image': img}
batch = transform(data, ops)
images = np.expand_dims(batch[0], axis=0)
images = paddle.to_tensor(images)
preds = model(images)
post_result = post_process_class(preds)
for rec_reuslt in post_result:
logger.info('\t result: {}'.format(rec_reuslt))
logger.info('success!') |
def test_offset_limit(response_with_body):
config = OffsetPaginationConfiguration(incremental_param='page', increment_by=1, limit=10)
request_params: SaaSRequestParams = SaaSRequestParams(method=HTTPMethod.GET, path='/conversations', query_params={'page': 10})
paginator = OffsetPaginationStrategy(config)
next_request: SaaSRequestParams = paginator.get_next_request(request_params, {}, response_with_body, 'conversations')
assert (next_request is None) |
class TestMerging(object):
def commit_to_branches(self, to_branches, opts, version):
(srpm_path, package_desc) = get_srpm(version)
name = package_desc['package_name']
import_result = import_package(opts, 'bob/blah', to_branches, srpm_path, name)
return import_result['branch_commits']
def setup_method(self, method):
srpm_cache = {}
def test_merged_everything(self, initial_commit_everywhere, mc_setup_git_repo):
(branches, opts, v1_hash) = initial_commit_everywhere
(origin, all_branches, middle_branches, border_branches) = branches
result = self.commit_to_branches(border_branches, opts, 2)
v2_hash = compare_branches(border_branches, origin, result_hash=result)
unchanged = compare_branches(middle_branches, origin)
assert (unchanged == v1_hash)
assert (v2_hash != v1_hash)
result = self.commit_to_branches(all_branches, opts, 3)
v3_hash = compare_branches(all_branches, origin, result_hash=result)
assert (v3_hash != v1_hash)
assert (v3_hash != v2_hash)
def test_diverge_middle_branches(self, initial_commit_everywhere, mc_setup_git_repo):
(branches, opts, v1_hash) = initial_commit_everywhere
(origin, all_branches, middle_branches, border_branches) = branches
result = self.commit_to_branches(middle_branches, opts, 2)
v2_hash = compare_branches(middle_branches, origin, result_hash=result)
unchanged = compare_branches(border_branches, origin)
assert (unchanged == v1_hash)
assert (v2_hash != v1_hash)
result = self.commit_to_branches(all_branches, opts, 3)
v3_hash_a = compare_branches(middle_branches, origin, result_hash=result)
v3_hash_b = compare_branches(border_branches, origin, result_hash=result)
assert (v3_hash_a != v3_hash_b)
assert (v3_hash_a != v2_hash)
assert (v3_hash_b != v2_hash)
assert (v3_hash_a != v1_hash)
assert (v3_hash_b != v1_hash)
def test_no_op_1(self, initial_commit_everywhere, mc_setup_git_repo):
(branches, opts, v1_hash) = initial_commit_everywhere
(origin, all_branches, middle_branches, border_branches) = branches
result = self.commit_to_branches(all_branches, opts, 1)
unchanged = compare_branches(all_branches, origin, result_hash=result)
assert (unchanged == v1_hash)
result = self.commit_to_branches(border_branches, opts, 2)
v2_hash_a = compare_branches(border_branches, origin, result_hash=result)
assert (v2_hash_a != v1_hash)
result = self.commit_to_branches(all_branches, opts, 2)
v2_hash_b = compare_branches(all_branches, origin, result_hash=result)
assert (v2_hash_a == v2_hash_b)
result = self.commit_to_branches(middle_branches, opts, 3)
v3_hash_a = compare_branches(middle_branches, origin, result_hash=result)
assert (v3_hash_a != v2_hash_a)
time.sleep(1)
result = self.commit_to_branches(all_branches, opts, 3)
v3_hash_b = compare_branches(border_branches, origin, result_hash=result)
assert (v3_hash_a != v3_hash_b)
assert (v3_hash_a != v2_hash_a) |
def test():
print('Basic tests\n')
for alpha in (0.4, 0.38, 0.36, 0.34, 0.32, 0.3, 0.28):
race_results = [race(alpha, (1 - alpha), 1000) for j in range(100)]
probs = []
for i in range(1, 9):
probs.append((len([x for x in race_results if (x >= i)]) / 100))
print(('Probs at %.2f: %r' % (alpha, probs)))
print(('Standard race equiv rate: %r' % [standard_race_equiv_rate(x, (i + 1)) for (i, x) in enumerate(probs)]))
print('\nRequiring 2/2 notarization\n')
for alpha in (0.45, 0.44, 0.43, 0.42, 0.41, 0.4, 0.39):
race_results = [race((alpha ** 3), ((1 - alpha) ** 3), 1000) for j in range(100)]
probs = []
for i in range(1, 9):
probs.append((len([x for x in race_results if (x >= i)]) / 100))
print(('Probs at %.2f: %r' % (alpha, probs)))
print(('Standard race equiv rate: %r' % [standard_race_equiv_rate(x, (i + 1)) for (i, x) in enumerate(probs)])) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.