code stringlengths 281 23.7M |
|---|
class TransactionViewSet(APIView):
endpoint_doc = 'usaspending_api/api_contracts/contracts/v2/transactions.md'
transaction_lookup = {'id': 'transaction_unique_id', 'type': 'type', 'type_description': 'type_description', 'action_date': 'action_date', 'action_type': 'action_type', 'action_type_description': 'action_type_description', 'modification_number': 'modification_number', 'description': 'description', 'federal_action_obligation': 'federal_action_obligation', 'face_value_loan_guarantee': 'face_value_loan_guarantee', 'original_loan_subsidy_cost': 'original_loan_subsidy_cost', 'is_fpds': 'is_fpds', 'cfda_number': 'assistance_data__cfda_number'}
def __init__(self):
models = customize_pagination_with_sort_columns(list(TransactionViewSet.transaction_lookup.keys()), 'action_date')
models.extend([get_internal_or_generated_award_id_model(), {'key': 'idv', 'name': 'idv', 'type': 'boolean', 'default': True, 'optional': True}])
self._tiny_shield_models = update_model_in_list(model_list=models, model_name='limit', new_dict={'max': 5000})
super(TransactionViewSet, self).__init__()
def _parse_and_validate_request(self, request_dict: dict) -> dict:
return TinyShield(deepcopy(self._tiny_shield_models)).block(request_dict)
def _business_logic(self, request_data: dict) -> list:
award_id = request_data['award_id']
award_id_column = ('award_id' if (type(award_id) is int) else 'award__generated_unique_award_id')
filter = {award_id_column: award_id}
if (request_data['sort'] == 'cfda_number'):
request_data['sort'] = 'assistance_data__cfda_number'
lower_limit = ((request_data['page'] - 1) * request_data['limit'])
upper_limit = (request_data['page'] * request_data['limit'])
queryset = TransactionNormalized.objects.all().filter(**filter).select_related('assistance_data').values(*list(self.transaction_lookup.values()))
if (request_data['order'] == 'desc'):
queryset = queryset.order_by(F(request_data['sort']).desc(nulls_last=True))
else:
queryset = queryset.order_by(F(request_data['sort']).asc(nulls_first=True))
rows = list(queryset[lower_limit:(upper_limit + 1)])
return self._format_results(rows)
def _format_results(self, rows):
results = []
for row in rows:
unique_prefix = 'ASST_TX'
result = {k: row.get(v) for (k, v) in self.transaction_lookup.items() if (k != 'award_id')}
if result['is_fpds']:
unique_prefix = 'CONT_TX'
del result['cfda_number']
result['id'] = f"{unique_prefix}_{result['id']}"
del result['is_fpds']
results.append(result)
return results
_response()
def post(self, request: Request) -> Response:
request_data = self._parse_and_validate_request(request.data)
results = self._business_logic(request_data)
page_metadata = get_simple_pagination_metadata(len(results), request_data['limit'], request_data['page'])
response = {'page_metadata': page_metadata, 'results': results[:request_data['limit']]}
return Response(response) |
_type(BMP_MSG_STATISTICS_REPORT)
class BMPStatisticsReport(BMPPeerMessage):
_TLV_PACK_STR = '!HH'
_MIN_LEN = struct.calcsize(_TLV_PACK_STR)
def __init__(self, stats, peer_type, is_post_policy, peer_distinguisher, peer_address, peer_as, peer_bgp_id, timestamp, version=VERSION, type_=BMP_MSG_STATISTICS_REPORT, len_=None, is_adj_rib_out=False):
super(BMPStatisticsReport, self).__init__(peer_type=peer_type, is_post_policy=is_post_policy, peer_distinguisher=peer_distinguisher, peer_address=peer_address, peer_as=peer_as, peer_bgp_id=peer_bgp_id, timestamp=timestamp, len_=len_, type_=type_, version=version, is_adj_rib_out=is_adj_rib_out)
self.stats = stats
def parser(cls, buf):
(kwargs, rest) = super(BMPStatisticsReport, cls).parser(buf)
(stats_count,) = struct.unpack_from('!I', six.binary_type(rest))
buf = rest[struct.calcsize('!I'):]
stats = []
while len(buf):
if (len(buf) < cls._MIN_LEN):
raise stream_parser.StreamParser.TooSmallException(('%d < %d' % (len(buf), cls._MIN_LEN)))
(type_, len_) = struct.unpack_from(cls._TLV_PACK_STR, six.binary_type(buf))
if (len(buf) < (cls._MIN_LEN + len_)):
raise stream_parser.StreamParser.TooSmallException(('%d < %d' % (len(buf), (cls._MIN_LEN + len_))))
value = buf[cls._MIN_LEN:(cls._MIN_LEN + len_)]
if ((type_ == BMP_STAT_TYPE_REJECTED) or (type_ == BMP_STAT_TYPE_DUPLICATE_PREFIX) or (type_ == BMP_STAT_TYPE_DUPLICATE_WITHDRAW) or (type_ == BMP_STAT_TYPE_INV_UPDATE_DUE_TO_CLUSTER_LIST_LOOP) or (type_ == BMP_STAT_TYPE_INV_UPDATE_DUE_TO_AS_PATH_LOOP) or (type_ == BMP_STAT_TYPE_INV_UPDATE_DUE_TO_ORIGINATOR_ID) or (type_ == BMP_STAT_TYPE_INV_UPDATE_DUE_TO_AS_CONFED_LOOP)):
(value,) = struct.unpack_from('!I', six.binary_type(value))
elif ((type_ == BMP_STAT_TYPE_ADJ_RIB_IN) or (type_ == BMP_STAT_TYPE_LOC_RIB) or (type_ == BMP_STAT_TYPE_ADJ_RIB_OUT) or (type_ == BMP_STAT_TYPE_EXPORT_RIB)):
(value,) = struct.unpack_from('!Q', six.binary_type(value))
buf = buf[(cls._MIN_LEN + len_):]
stats.append({'type': type_, 'len': len_, 'value': value})
kwargs['stats'] = stats
return kwargs
def serialize_tail(self):
msg = super(BMPStatisticsReport, self).serialize_tail()
stats_count = len(self.stats)
msg += bytearray(struct.pack('!I', stats_count))
for v in self.stats:
t = v['type']
if ((t == BMP_STAT_TYPE_REJECTED) or (t == BMP_STAT_TYPE_DUPLICATE_PREFIX) or (t == BMP_STAT_TYPE_DUPLICATE_WITHDRAW) or (t == BMP_STAT_TYPE_INV_UPDATE_DUE_TO_CLUSTER_LIST_LOOP) or (t == BMP_STAT_TYPE_INV_UPDATE_DUE_TO_AS_PATH_LOOP) or (t == BMP_STAT_TYPE_INV_UPDATE_DUE_TO_ORIGINATOR_ID) or (t == BMP_STAT_TYPE_INV_UPDATE_DUE_TO_AS_CONFED_LOOP)):
valuepackstr = 'I'
elif ((t == BMP_STAT_TYPE_ADJ_RIB_IN) or (t == BMP_STAT_TYPE_LOC_RIB) or (t == BMP_STAT_TYPE_ADJ_RIB_OUT) or (t == BMP_STAT_TYPE_EXPORT_RIB)):
valuepackstr = 'Q'
else:
continue
v['len'] = struct.calcsize(valuepackstr)
msg += bytearray(struct.pack((self._TLV_PACK_STR + valuepackstr), t, v['len'], v['value']))
return msg |
class RiverSegment():
def __init__(self, grid, x, y, side, is_source=False):
self.grid = grid
self.x = x
self.y = y
self.side = side
self.is_source = is_source
self.next = None
self.id = uuid.uuid4()
def hex(self):
return self.grid.find_hex(self.x, self.y)
def edge(self):
return self.hex.get_edge(self.side)
def __repr__(self):
return '<RiverSegment X: {}, Y: {}, side: {}>'.format(self.x, self.y, self.side)
def size(self):
count = 1
river = self
while (river.next is not None):
count += 1
river = river.next
return count
def __eq__(self, other):
return (self.edge == other.edge) |
class SigningDialogue(BaseSigningDialogue):
__slots__ = ('_associated_fipa_dialogue',)
def __init__(self, dialogue_label: DialogueLabel, self_address: Address, role: Dialogue.Role, message_class: Type[SigningMessage]=SigningMessage) -> None:
BaseSigningDialogue.__init__(self, dialogue_label=dialogue_label, self_address=self_address, role=role, message_class=message_class)
self._associated_fipa_dialogue: Optional[FipaDialogue] = None
self._associated_cosm_trade_dialogue: Optional[CosmTradeDialogue] = None
def associated_fipa_dialogue(self) -> FipaDialogue:
if (self._associated_fipa_dialogue is None):
raise ValueError('associated_fipa_dialogue not set!')
return self._associated_fipa_dialogue
_fipa_dialogue.setter
def associated_fipa_dialogue(self, associated_fipa_dialogue: FipaDialogue) -> None:
enforce((self._associated_fipa_dialogue is None), 'associated_fipa_dialogue already set!')
self._associated_fipa_dialogue = associated_fipa_dialogue
def associated_cosm_trade_dialogue(self) -> Optional[CosmTradeDialogue]:
return self._associated_cosm_trade_dialogue
_cosm_trade_dialogue.setter
def associated_cosm_trade_dialogue(self, associated_cosm_trade_dialogue: CosmTradeDialogue) -> None:
enforce((self._associated_cosm_trade_dialogue is None), 'associated_cosm_trade_dialogue already set!')
self._associated_cosm_trade_dialogue = associated_cosm_trade_dialogue |
def dbus_introspection_add_properties(obj, data, interface):
modified = False
if hasattr(obj, '_fw_dbus_properties'):
dip = getattr(obj, '_fw_dbus_properties')
if (isinstance(dip, dict) and (interface in dip)):
doc = ET.fromstring(data)
for node in doc.iter('interface'):
if (('name' in node.attrib) and (node.attrib['name'] == interface)):
for (key, value) in dip[interface].items():
attrib = {'name': key, 'type': value['type'], 'access': value['access']}
ET.SubElement(node, 'property', attrib)
modified = True
if modified:
data = ET.tostring(doc, encoding='unicode')
log.debug10(data)
return data |
class ChainedWizard(Wizard):
next_wizard = Instance(IWizard)
def _controller_default(self):
from .chained_wizard_controller import ChainedWizardController
return ChainedWizardController()
('next_wizard')
def _reset_next_controller_and_update(self, event):
if (event.new is not None):
self.controller.next_controller = event.new.controller
if (self.control is not None):
self._update()
('controller')
def _reset_traits_on_controller_and_update(self, event):
if ((event.new is not None) and (self.next_wizard is not None)):
self.controller.next_controller = self.next_wizard.controller
if (self.control is not None):
self._update()
return |
def extractKylerwebWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
.parametrize('formatters, value, expected', LOOSE_SEQUENCE_FORMATTER_PARAMETERS)
def test_combine_argument_formatters(formatters, value, expected):
with pytest.warns(DeprecationWarning):
list_formatter = eth_utils.combine_argument_formatters(*formatters)
if (isinstance(expected, type) and issubclass(expected, Exception)):
with pytest.raises(expected):
list_formatter(value)
else:
assert (list_formatter(value) == expected) |
class SettingsTests(TestCase):
def test_perform_import(self):
f = settings.perform_import(id, '')
self.assertEqual(f, id)
f = settings.perform_import('datetime.timedelta', '')
self.assertEqual(f, timedelta)
def test_import_from_string_error(self):
with self.assertRaises(ImportError):
settings.import_from_string('import.error', '')
def test_reload_settings(self):
getattr(settings.jwt_settings, 'JWT_ALGORITHM')
settings.reload_settings(setting='TEST')
self.assertTrue(settings.jwt_settings._cached_attrs)
delattr(settings.jwt_settings, '_user_settings')
settings.jwt_settings.reload()
self.assertFalse(settings.jwt_settings._cached_attrs) |
def clean_pipelines(app='', settings=None):
pipelines = get_all_pipelines(app=app)
envs = settings['pipeline']['env']
LOG.debug('Find Regions in: %s', envs)
regions = set()
for env in envs:
try:
regions.update(settings[env]['regions'])
except KeyError:
error_msg = 'Missing "{}/application-master-{}.json".'.format(RUNWAY_BASE_PATH, env)
raise SpinnakerPipelineCreationFailed(error_msg)
LOG.debug('Regions defined: %s', regions)
for pipeline in pipelines:
pipeline_name = pipeline['name']
try:
region = check_managed_pipeline(name=pipeline_name, app_name=app)
except ValueError:
LOG.info('"%s" is not managed.', pipeline_name)
continue
LOG.debug('Check "%s" in defined Regions.', region)
if (region not in regions):
delete_pipeline(app=app, pipeline_name=pipeline_name)
return True |
def rgb2rgbp(rgb, gamma=None):
rgb = np.asarray(rgb)
if (gamma is None):
eps = 0.0031308
mask = (rgb < eps)
rgbp = np.empty_like(rgb)
rgbp[mask] = (12.92 * rgb[mask])
rgbp[(~ mask)] = ((1.055 * (rgb[(~ mask)] ** (1.0 / 2.4))) - 0.055)
return rgbp
else:
return (rgb ** (1.0 / gamma)) |
def tokenize_strmat_1(text):
tokens = text.split()
new_tokens = []
def aux(token):
start = next((t for t in STARTS if token.startswith(t)), None)
if start:
n = len(start)
new_tokens.append(token[:n])
aux(token[n:])
return
end = next((t for t in ENDS if token.endswith(t)), None)
if end:
n = len(end)
(t1, t2) = (token[:(- n)], token[(- n):])
if (not ((t1 in {'Mr', 'Ms'}) and (t2 == '.'))):
aux(t1)
new_tokens.append(t2)
return
new_tokens.append(token)
for token in tokens:
aux(token)
return new_tokens |
.parametrize('xml_string,output', [('', {}), ('<123>', {}), ('<d>blah</d>', {'d': ['blah']}), ('<a>1</a><a>2</a>', {'a': ['1', '2']}), ('<a>1</a><b>2</b>', {'a': ['1'], 'b': ['2']}), ('<a><a1>1</a1><a2>2</a2></a><b>2</b>', {'a': [{'a1': ['1'], 'a2': ['2']}], 'b': ['2']}), ('<a><a1>1</a1><a2>2</a2></a><b>2</b><a><a1>1</a1></a>', {'a': [{'a1': ['1'], 'a2': ['2']}, {'a1': ['1']}], 'b': ['2']})])
def test_xml_decode(xml_string: str, output: Any) -> None:
encoder = XMLEncoder()
assert (encoder.decode(xml_string) == output) |
def main(args):
logging.info('Converting %s%s to %s', ('input' if (args.infile is sys.stdin) else args.infile), ((' from ' + args.in_fmt) if (args.in_fmt != 'auto') else ''), args.out_fmt)
if (args.in_fmt == 'auto'):
args.in_fmt = tabio.sniff_region_format(args.infile)
kwargs = {}
if (args.in_fmt == 'gff'):
if args.gff_tag:
kwargs['tag'] = args.gff_tag
if args.gff_type:
kwargs['keep_type'] = args.gff_type
elif (args.in_fmt == 'refflat'):
if (args.refflat_type == 'exon'):
kwargs['exons'] = True
elif (args.refflat_type == 'cds'):
kwargs['cds'] = True
regions = tabio.read(args.infile, args.in_fmt, **kwargs)
if args.flatten:
regions = regions.flatten()
elif args.merge:
regions = regions.merge(bp=args.merge)
tabio.write(regions, args.output, args.out_fmt) |
class OptionSeriesParetoSonificationDefaultinstrumentoptionsMappingTremolo(Options):
def depth(self) -> 'OptionSeriesParetoSonificationDefaultinstrumentoptionsMappingTremoloDepth':
return self._config_sub_data('depth', OptionSeriesParetoSonificationDefaultinstrumentoptionsMappingTremoloDepth)
def speed(self) -> 'OptionSeriesParetoSonificationDefaultinstrumentoptionsMappingTremoloSpeed':
return self._config_sub_data('speed', OptionSeriesParetoSonificationDefaultinstrumentoptionsMappingTremoloSpeed) |
class ValenceDataset_CPCLoss(object):
def __init__(self, X, Mel, fnames):
self.X = X
self.Mel = Mel
self.labels_file = '/home1/srallaba/challenges/compare2020/ComParE2020_Elderly/lab/labels.csv'
self.column_num = 5
(self.low, self.medium, self.high, self.fname2label_dict) = get_label_arrays(self.labels_file, self.column_num)
self.fnames = fnames
self.labels = [0, 1, 2]
self.fnamearrays = [self.low, self.medium, self.high]
def __getitem__(self, idx):
label = self.X[idx]
mel = self.Mel[idx]
fname = self.fnames[idx]
contrastive_label = get_contrastive_label(self.labels, label)
random_contrastive_fname = random.choice(self.fnamearrays[contrastive_label])
contrastive_mel = np.load((('vox/festival/falcon_mfcc/' + random_contrastive_fname) + '.feats.npy'))
return (label, mel, contrastive_mel)
def __len__(self):
return len(self.X) |
class CreateCommandTestCase(TestCase):
def setUp(self):
self.runner = CliRunner()
def test_create_no_init(self):
result = self.runner.invoke(cli, [*CLI_LOG_OPTION, 'create', 'agent_name', '--author=some'], standalone_mode=False)
self.assertEqual(result.exception.message, "Author is not set up. Please use 'aea init' to initialize.")
('aea.cli.create.get_or_create_cli_config', return_value={})
def test_create_no_author_local(self, *mocks):
result = self.runner.invoke(cli, [*CLI_LOG_OPTION, 'create', '--local', 'agent_name'], standalone_mode=False)
expected_message = 'The AEA configurations are not initialized. Uses `aea init` before continuing or provide optional argument `--author`.'
self.assertEqual(result.exception.message, expected_message) |
def set_t_ids(save_stats: dict[(str, Any)]) -> dict[(str, Any)]:
unit_drops_stats = save_stats['unit_drops']
data = get_data(helper.check_data_is_jp(save_stats))
usr_t_ids = user_input_handler.get_range(user_input_handler.colored_input('Enter treasures ids (Look up item drop cats battle cats to find ids)(You can enter &all& to get all, a range e.g &1&-&50&, or ids separate by spaces e.g &5 4 7&):'), all_ids=data['t_ids'])
unit_drops_stats = set_t_ids_val(unit_drops_stats, data, usr_t_ids)
save_stats['unit_drops'] = unit_drops_stats
return save_stats |
.parametrize('backend', ['pycryptodome', 'pysha3'])
def test_load_by_env(monkeypatch, backend):
clean_module('eth_hash.auto')
from eth_hash.auto import keccak
monkeypatch.setenv('ETH_HASH_BACKEND', backend)
with mock.patch.dict('sys.modules', {'sha3': None, 'Crypto.Hash': None}):
with pytest.raises(ImportError) as excinfo:
keccak(b'triggered')
expected_msg = f"""The backend specified in ETH_HASH_BACKEND, '{backend}', is not installed. Install with `python -m pip install "eth-hash[{backend}]"`."""
assert (expected_msg in str(excinfo.value)) |
class TestMarkTopicsReadView(BaseClientTestCase):
(autouse=True)
def setup(self):
self.u1 = UserFactory.create()
self.u2 = UserFactory.create()
self.g1 = GroupFactory.create()
self.u1.groups.add(self.g1)
self.u2.groups.add(self.g1)
self.user.groups.add(self.g1)
self.perm_handler = PermissionHandler()
self.tracks_handler = TrackingHandler()
self.top_level_cat_1 = create_category_forum()
self.top_level_cat_2 = create_category_forum()
self.forum_1 = create_forum(parent=self.top_level_cat_1)
self.forum_2 = create_forum(parent=self.top_level_cat_1)
self.forum_2_child_1 = create_forum(parent=self.forum_2)
self.forum_3 = create_forum(parent=self.top_level_cat_1)
self.forum_4 = create_forum(parent=self.top_level_cat_2)
self.topic = create_topic(forum=self.forum_2, poster=self.u1)
PostFactory.create(topic=self.topic, poster=self.u1)
ForumReadTrackFactory.create(forum=self.forum_2, user=self.u2)
ForumReadTrackFactory.create(forum=self.forum_2, user=self.user)
assign_perm('can_read_forum', self.g1, self.top_level_cat_1)
assign_perm('can_read_forum', self.g1, self.top_level_cat_2)
assign_perm('can_read_forum', self.g1, self.forum_1)
assign_perm('can_read_forum', self.g1, self.forum_2)
assign_perm('can_read_forum', self.g1, self.forum_2_child_1)
assign_perm('can_read_forum', self.g1, self.forum_4)
def test_browsing_works(self):
correct_url = reverse('forum_tracking:mark_topics_read', kwargs={'pk': self.forum_2.pk})
response = self.client.get(correct_url, follow=True)
assert (response.status_code == 200)
def test_can_mark_forum_topics_read(self):
new_topic = create_topic(forum=self.forum_4, poster=self.u1)
PostFactory.create(topic=new_topic, poster=self.u1)
correct_url = reverse('forum_tracking:mark_topics_read', kwargs={'pk': self.forum_4.pk})
response = self.client.post(correct_url, follow=True)
assert (response.status_code == 200)
assert (list(self.tracks_handler.get_unread_topics(self.forum_4.topics.all(), self.user)) == [])
def test_do_not_perform_anything_if_the_user_has_not_the_required_permission(self):
self.user.groups.clear()
correct_url = reverse('forum_tracking:mark_topics_read', kwargs={'pk': self.forum_2.pk})
response = self.client.get(correct_url, follow=True)
assert (response.status_code == 403) |
class Widget(app.JsComponent):
DEFAULT_MIN_SIZE = (0, 0)
CSS = '\n\n .flx-Widget {\n box-sizing: border-box;\n overflow: hidden;\n position: relative; /* helps with absolute positioning of content */\n }\n\n /* Main widget to fill the whole page */\n .flx-main-widget {\n position: absolute;\n left: 0;\n right: 0;\n width: 100%;\n top: 0;\n bottom: 0;\n height: 100%;\n }\n\n /* to position children absolute */\n .flx-abs-children > .flx-Widget {\n position: absolute;\n }\n\n /* Fix issue flexbox > Widget > layout on Chrome */\n .flx-Widget:not(.flx-Layout) > .flx-Layout {\n position: absolute;\n }\n '
container = event.StringProp('', settable=True, doc="\n The id of the DOM element that contains this widget if\n parent is None. Use 'body' to make this widget the root.\n ")
parent = event.ComponentProp(None, doc='\n The parent widget, or None if it has no parent. Setting\n this property will update the "children" property of the\n old and new parent.\n ')
children = app.LocalProperty((), doc='\n The child widgets of this widget. This property is not settable and\n only present in JavaScript.\n ')
title = event.StringProp('', settable=True, doc="\n The string title of this widget. This is used to mark\n the widget in e.g. a tab layout or form layout, and is used\n as the app's title if this is the main widget.\n ")
icon = app.LocalProperty('', settable=False, doc="\n The icon for this widget. This is used is some widgets classes,\n and is used as the app's icon if this is the main widget.\n It is settable from Python, but only present in JavaScript.\n ")
css_class = event.StringProp('', settable=True, doc="\n The extra CSS class name to asign to the DOM element.\n Spaces can be used to delimit multiple names. Note that the\n DOM element already has a css class-name corresponding to\n its class (e.g. 'flx-Widget) and all its superclasses.\n ")
flex = event.FloatPairProp((0, 0), settable=True, doc='\n How much space this widget takes (relative to the other\n widgets) when contained in a flexible layout such as HBox,\n HFix, HSplit or FormLayout. A flex of 0 means to take\n the minimum size. Flex is a two-element tuple, but both values\n can be specified at once by specifying a scalar.\n ')
size = event.FloatPairProp((0, 0), settable=False, doc="\n The actual size of the widget (readonly). Flexx tries to keep\n this value up-to-date, but in e.g. a box layout, a change in a\n Button's text can change the size of sibling widgets.\n ")
minsize = event.FloatPairProp((0, 0), settable=True, doc='\n The user-defined minimum size (width, height) of this widget in pixels.\n The default value differs per widget (``Widget.DEFAULT_MIN_SIZE``).\n Note that using "min-width" or "min-height" in ``apply_style()``.\n (and in the ``style`` kwarg) also set this property. Minimum sizes set\n in CSS are ignored.\n ')
minsize_from_children = event.BoolProp(True, settable=True, doc="\n Whether the children are taken into account to calculate this\n widget's size constraints. Default True: both the ``minsize``\n of this widget and the size constraints of its children (plus\n spacing and padding for layout widgets) are used to calculate\n the size constraints for this widget.\n\n Set to False to prevent the content in this widget to affect\n the parent's layout, e.g. to allow fully collapsing this widget\n when the parent is a splitter. If this widget has a lot of\n content, you may want to combine with ``style='overflow-y: auto'``.\n ")
maxsize = event.FloatPairProp((.0, .0), settable=True, doc='\n The user-defined maximum size (width, height) of this widget in pixels.\n Note that using "max-width" or "max-height" in ``apply_style()``.\n (and in the ``style`` kwarg) also set this property. Maximum sizes set\n in CSS are ignored.\n ')
_size_limits = event.TupleProp((0, .0, 0, .0), settable=True, doc='\n A 4-element tuple (minWidth, maxWidth, minHeight, maxHeight) in pixels,\n based on ``minsize``, ``maxsize`` (and for some layouts the size limits\n of the children). Private prop for internal use.\n ')
tabindex = event.IntProp((- 2), settable=True, doc='\n The index used to determine widget order when the user\n iterates through the widgets using tab. This also determines\n whether a widget is able to receive key events. Flexx automatically\n sets this property when it should emit key events.\n Effect of possible values on underlying DOM element:\n\n * -2: element cannot have focus unless its a special element like\n a link or form control (default).\n * -1: element can have focus, but is not reachable via tab.\n * 0: element can have focus, and is reachable via tab in the order\n at which the element is defined.\n * 1 and up: element can have focus, and the tab-order is determined\n by the value of tabindex.\n ')
capture_mouse = event.IntProp(1, settable=True, doc='\n To what extend the mouse is "captured".\n\n * If 0, the mouse is not captured, and move events are only emitted\n when the mouse is pressed down (not recommended).\n * If 1 (default) the mouse is captured when pressed down, so move\n and up events are received also when the mouse is outside the widget.\n * If 2, move events are also emitted when the mouse is not pressed down\n and inside the widget.\n ')
def set_icon(self, val):
if (not isinstance(val, str)):
raise TypeError('Icon must be a string')
self._mutate_icon(val)
def __init__(self, *init_args, **kwargs):
try:
given_parent = parent = kwargs.pop('parent')
parent_given = True
except KeyError:
given_parent = parent = None
parent_given = False
if (parent is None):
active_components = loop.get_active_components()
for active_component in reversed(active_components):
if isinstance(active_component, Widget):
parent = active_component
break
if ((parent is not None) and (not kwargs.get('flx_session', None))):
kwargs['flx_session'] = parent.session
style = kwargs.pop('style', '')
is_app = kwargs.get('flx_is_app', False)
super().__init__(*init_args, **kwargs)
if (parent_given is True):
self.set_parent(given_parent)
elif (parent is not None):
self.set_parent(parent)
elif (self.container == ''):
if window.flexx.need_main_widget:
window.flexx.need_main_widget = False
self.set_container('body')
if (kwargs.get('minsize', None) is None):
self.set_minsize(self.DEFAULT_MIN_SIZE)
if style:
self.apply_style(style)
def _comp_init_property_values(self, property_values):
super()._comp_init_property_values(property_values)
nodes = self._create_dom()
assert (nodes is not None)
if (not isinstance(nodes, list)):
nodes = [nodes]
assert ((len(nodes) == 1) or (len(nodes) == 2))
if (len(nodes) == 1):
self.outernode = self.node = self.__render_resolve(nodes[0])
else:
self.outernode = self.__render_resolve(nodes[0])
self.node = self.__render_resolve(nodes[1])
cls = self.__class__
for i in range(32):
self.outernode.classList.add(('flx-' + cls.__name__))
if (cls is Widget.prototype):
break
cls = cls._base_class
else:
raise RuntimeError(('Error determining class names for %s' % self.id))
self._init_events()
def init(self):
pass
def _create_dom(self):
return create_element('div')
def _render_dom(self):
nodes = []
for i in range(len(self.outernode.childNodes)):
node = self.outernode.childNodes[i]
if (not (node.classList and node.classList.contains('flx-Widget'))):
nodes.push(node)
for widget in self.children:
nodes.push(widget.outernode)
return nodes
def __render(self):
vnode = self._render_dom()
if ((vnode is None) or (vnode is self.outernode)):
return
elif isinstance(vnode, list):
vnode = dict(type=self.outernode.nodeName, props={}, children=vnode)
elif isinstance(vnode, dict):
if (vnode.type.toLowerCase() != self.outernode.nodeName.toLowerCase()):
raise ValueError('Widget._render_dom() must return root node with same element type as outernode.')
else:
raise TypeError('Widget._render_dom() must return None, list or dict.')
node = self.__render_resolve(vnode, self.outernode)
assert (node is self.outernode)
def __render_resolve(self, vnode, node=None):
if (vnode and vnode.nodeName):
return vnode
elif isinstance(vnode, str):
return window.document.createTextNode(vnode)
elif (not isinstance(vnode, dict)):
raise TypeError(('Widget._render_dom() needs virtual nodes to be dicts, not ' + vnode))
if (not isinstance(vnode.type, str)):
raise TypeError(('Widget._render_dom() needs virtual node type to be str, not ' + vnode.type))
if (not isinstance(vnode.props, dict)):
raise TypeError(('Widget._render_dom() needs virtual node props as dict, not ' + vnode.props))
if ((node is None) or (node.nodeName.toLowerCase() != vnode.type.toLowerCase())):
node = window.document.createElement(vnode.type)
map = {'css_class': 'className', 'class': 'className'}
for (key, val) in vnode.props.items():
ob = node
parts = key.replace('__', '.').split('.')
for i in range((len(parts) - 1)):
ob = ob[parts[i]]
key = parts[(len(parts) - 1)]
ob[map.get(key, key)] = val
if (vnode.children is None):
pass
elif isinstance(vnode.children, list):
while (len(node.childNodes) > len(vnode.children)):
node.removeChild(node.childNodes[(len(node.childNodes) - 1)])
i1 = (- 1)
for i2 in range(len(vnode.children)):
i1 += 1
vsubnode = vnode.children[i2]
subnode = None
if (i1 < len(node.childNodes)):
subnode = node.childNodes[i1]
if ((subnode.nodeName == '#text') and isinstance(vsubnode, str)):
if (subnode.data != vsubnode):
subnode.data = vsubnode
continue
new_subnode = self.__render_resolve(vsubnode, subnode)
if (subnode is None):
node.appendChild(new_subnode)
elif (subnode is not new_subnode):
node.insertBefore(new_subnode, subnode)
node.removeChild(subnode)
else:
window.flexx_vnode = vnode
raise TypeError(('Widget._render_dom() needs virtual node children to be None or list, not %s' % vnode.children))
return node
def _repr_html_(self):
if self.container:
return ('<i>Th widget %s is already shown in this notebook</i>' % self.id)
container_id = (self.id + '_container')
self.set_container(container_id)
return ("<div class='flx-container' id='%s' />" % container_id)
def dispose(self):
children = self.children
for child in children:
child.dispose()
super().dispose()
self.set_parent(None)
self._children_value = ()
def apply_style(self, style):
if isinstance(style, dict):
style = [('%s: %s' % (k, v)) for (k, v) in style.items()]
style = '; '.join(style)
d = {}
if style:
for part in style.split(';'):
if (':' in part):
(key, val) = part.split(':', 1)
(key, val) = (key.trim(), val.trim())
self.outernode.style[key] = val
d[key] = val
(w1, h1) = self.minsize
(w2, h2) = self.maxsize
mima = (w1, w2, h1, h2)
size_limits_keys = ('min-width', 'max-width', 'min-height', 'max-height')
size_limits_changed = False
for i in range(4):
key = size_limits_keys[i]
if (key in d):
val = d[key]
if (val == '0'):
mima[i] = 0
size_limits_changed = True
elif val.endswith('px'):
mima[i] = float(val[:(- 2)])
size_limits_changed = True
if size_limits_changed:
self.set_minsize((mima[0], mima[2]))
self.set_maxsize((mima[1], mima[3]))
('css_class')
def __css_class_changed(self, *events):
if len(events):
for cn in events[0].old_value.split(' '):
if cn:
self.outernode.classList.remove(cn)
for cn in events[(- 1)].new_value.split(' '):
if cn:
self.outernode.classList.add(cn)
('title')
def __title_changed(self, *events):
if ((self.parent is None) and (self.container == 'body')):
window.document.title = (self.title or 'Flexx app')
('icon')
def __icon_changed(self, *events):
if ((self.parent is None) and (self.container == 'body')):
window.document.title = (self.title or 'Flexx app')
link = window.document.createElement('link')
oldLink = window.document.getElementById('flexx-favicon')
link.id = 'flexx-favicon'
link.rel = 'shortcut icon'
link.href = events[(- 1)].new_value
if oldLink:
window.document.head.removeChild(oldLink)
window.document.head.appendChild(link)
def __update_tabindex(self, *events):
ti = self.tabindex
if (ti < (- 1)):
self.node.removeAttribute('tabIndex')
else:
self.node.tabIndex = ti
def _update_minmaxsize(self):
(w1, w2, h1, h2) = self._query_min_max_size()
w1 = max(0, w1)
h1 = max(0, h1)
self._set_size_limits((w1, w2, h1, h2))
s = self.outernode.style
s['min-width'] = (w1 + 'px')
s['max-width'] = (w2 + 'px')
s['min-height'] = (h1 + 'px')
s['max-height'] = (h2 + 'px')
def _query_min_max_size(self):
(w1, h1) = self.minsize
(w2, h2) = self.maxsize
if (self.outernode.classList.contains('flx-Layout') is False):
if ((self.minsize_from_children is True) and (len(self.children) == 1)):
child = self.children[0]
if (child.outernode.classList.contains('flx-Layout') is True):
(w3, w4, h3, h4) = child._query_min_max_size()
(w1, w2) = (max(w1, w3), min(w2, w4))
(h1, h2) = (max(h1, h3), min(h2, h4))
return (w1, w2, h1, h2)
def check_real_size(self):
n = self.outernode
cursize = self.size
if ((cursize[0] != n.clientWidth) or (cursize[1] != n.clientHeight)):
self._mutate_size([n.clientWidth, n.clientHeight])
('container', 'parent.size', 'children')
def __size_may_have_changed(self, *events):
self.check_real_size()
def _set_size(self, prefix, w, h):
size = (w, h)
for i in range(2):
if ((size[i] <= 0) or (size is None) or (size is undefined)):
size[i] = ''
elif (size[i] > 1):
size[i] = (size[i] + 'px')
else:
size[i] = ((size[i] * 100) + '%')
self.outernode.style[(prefix + 'width')] = size[0]
self.outernode.style[(prefix + 'height')] = size[1]
def set_parent(self, parent, pos=None):
old_parent = self.parent
new_parent = parent
if ((new_parent is old_parent) and (pos is None)):
return
if (not ((new_parent is None) or isinstance(new_parent, Widget))):
raise ValueError(('%s.parent must be a Widget or None' % self.id))
self._mutate_parent(new_parent)
if (old_parent is not None):
children = []
for i in range(len(old_parent.children)):
child = old_parent.children[i]
if (child is not self):
children.push(child)
if (old_parent is not new_parent):
old_parent._mutate_children(children)
if (new_parent is not None):
if (old_parent is not new_parent):
children = []
for i in range(len(new_parent.children)):
child = new_parent.children[i]
if (child is not self):
children.push(child)
if (pos is None):
children.push(self)
elif (pos >= 0):
children.insert(pos, self)
elif (pos < 0):
children.append(None)
children.insert(pos, self)
children.pop((- 1))
else:
children.push(self)
new_parent._mutate_children(children)
('container')
def __container_changed(self, *events):
id = self.container
self.outernode.classList.remove('flx-main-widget')
if self.parent:
return
self._session.keep_checking_size_of(self, bool(id))
if id:
if (id == 'body'):
el = window.document.body
self.outernode.classList.add('flx-main-widget')
window.document.title = (self.title or 'Flexx app')
else:
el = window.document.getElementById(id)
if (el is None):
window.setTimeout(self.__container_changed, 100)
return
el.appendChild(self.outernode)
def _release_child(self, widget):
pass
def _registered_reactions_hook(self):
event_types = super()._registered_reactions_hook()
if (self.tabindex < (- 1)):
for event_type in event_types:
if (event_type in ('key_down', 'key_up', 'key_press')):
self.set_tabindex((- 1))
return event_types
def _init_events(self):
self._addEventListener(self.node, 'wheel', self.pointer_wheel, 0)
self._addEventListener(self.node, 'keydown', self.key_down, 0)
self._addEventListener(self.node, 'keyup', self.key_up, 0)
self._addEventListener(self.node, 'keypress', self.key_press, 0)
self._addEventListener(self.node, 'mousedown', self.pointer_down, 0)
self._addEventListener(self.node, 'click', self.pointer_click, 0)
self._addEventListener(self.node, 'dblclick', self.pointer_double_click, 0)
self._addEventListener(self.node, 'touchstart', self.pointer_down, 0)
self._addEventListener(self.node, 'touchmove', self.pointer_move, 0)
self._addEventListener(self.node, 'touchend', self.pointer_up, 0)
self._addEventListener(self.node, 'touchcancel', self.pointer_cancel, 0)
self._capture_flag = 0
def mdown(e):
if (self.capture_mouse == 0):
self._capture_flag = 1
else:
self._capture_flag = 2
window.document.addEventListener('mousemove', mmove_outside, True)
window.document.addEventListener('mouseup', mup_outside, True)
def mmove_inside(e):
if (self._capture_flag == (- 1)):
self._capture_flag = 0
elif (self._capture_flag == 1):
self.pointer_move(e)
elif ((self._capture_flag == 0) and (self.capture_mouse > 1)):
self.pointer_move(e)
def mup_inside(e):
if (self._capture_flag == 1):
self.pointer_up(e)
self._capture_flag = 0
def mmove_outside(e):
if (self._capture_flag == 2):
e = (window.event if window.event else e)
self.pointer_move(e)
def mup_outside(e):
if (self._capture_flag == 2):
e = (window.event if window.event else e)
stopcapture()
self.pointer_up(e)
def stopcapture():
if (self._capture_flag == 2):
self._capture_flag = (- 1)
window.document.removeEventListener('mousemove', mmove_outside, True)
window.document.removeEventListener('mouseup', mup_outside, True)
def losecapture(e):
stopcapture()
self.pointer_cancel(e)
self._addEventListener(self.node, 'mousedown', mdown, True)
self._addEventListener(self.node, 'losecapture', losecapture)
self._addEventListener(self.node, 'mousemove', mmove_inside, False)
self._addEventListener(self.node, 'mouseup', mup_inside, False)
def pointer_down(self, e):
return self._create_pointer_event(e)
def pointer_up(self, e):
return self._create_pointer_event(e)
def pointer_cancel(self, e):
return self._create_pointer_event(e)
def pointer_click(self, e):
return self._create_pointer_event(e)
def pointer_double_click(self, e):
return self._create_pointer_event(e)
def pointer_move(self, e):
ev = self._create_pointer_event(e)
ev.button = 0
return ev
def pointer_wheel(self, e):
ev = self._create_pointer_event(e)
ev.button = 0
ev.hscroll = (e.deltaX * [1, 16, 600][e.deltaMode])
ev.vscroll = (e.deltaY * [1, 16, 600][e.deltaMode])
return ev
def _create_pointer_event(self, e):
rect = self.node.getBoundingClientRect()
offset = (rect.left, rect.top)
if e.type.startswith('touch'):
t = e.changedTouches[0]
pos = (float((t.clientX - offset[0])), float((t.clientY - offset[1])))
page_pos = (t.pageX, t.pageY)
button = 0
buttons = []
touches = {}
for i in range(e.changedTouches.length):
t = e.changedTouches[i]
if (t.target is not e.target):
continue
touches[t.identifier] = (float((t.clientX - offset[0])), float((t.clientY - offset[1])), t.force)
else:
pos = (float((e.clientX - offset[0])), float((e.clientY - offset[1])))
page_pos = (e.pageX, e.pageY)
if e.buttons:
buttons_mask = RawJS("e.buttons.toString(2).split('').reverse().join('')")
else:
buttons_mask = [e.button.toString(2)]
buttons = [(i + 1) for i in range(5) if (buttons_mask[i] == '1')]
button = {0: 1, 1: 3, 2: 2, 3: 4, 4: 5}[e.button]
touches = {(- 1): (pos[0], pos[1], 1)}
modifiers = [n for n in ('Alt', 'Shift', 'Ctrl', 'Meta') if e[(n.toLowerCase() + 'Key')]]
return dict(pos=pos, page_pos=page_pos, touches=touches, button=button, buttons=buttons, modifiers=modifiers)
def key_down(self, e):
return self._create_key_event(e)
def key_up(self, e):
return self._create_key_event(e)
def key_press(self, e):
return self._create_key_event(e)
def _create_key_event(self, e):
modifiers = [n for n in ('Alt', 'Shift', 'Ctrl', 'Meta') if e[(n.toLowerCase() + 'Key')]]
key = e.key
if ((not key) and e.code):
key = e.code
if key.startswith('Key'):
key = key[3:]
if ('Shift' not in modifiers):
key = key.toLowerCase()
elif key.startswith('Digit'):
key = key[5:]
key = {'Esc': 'Escape', 'Del': 'Delete'}.get(key, key)
return dict(key=key, modifiers=modifiers) |
def import_comments_from_node(node: cst.SimpleStatementLine) -> ImportComments:
comments = ImportComments()
assert (len(node.body) == 1), 'lines with multiple statements are unsupported'
assert isinstance(node.body[0], (cst.Import, cst.ImportFrom))
imp: Union[(cst.Import, cst.ImportFrom)] = node.body[0]
for line in node.leading_lines:
if line.comment:
comments.before.append(line.comment.value)
else:
comments.before.append('')
if isinstance(imp, cst.ImportFrom):
if imp.lpar:
if isinstance(imp.lpar.whitespace_after, cst.ParenthesizedWhitespace):
ws = imp.lpar.whitespace_after
if ws.first_line.comment:
comments.first_inline.extend(split_inline_comment(ws.first_line.comment.value))
comments.initial.extend((line.comment.value for line in ws.empty_lines if line.comment))
assert (imp.rpar is not None)
if isinstance(imp.rpar.whitespace_before, cst.ParenthesizedWhitespace):
comments.final.extend((line.comment.value for line in imp.rpar.whitespace_before.empty_lines if line.comment))
if imp.rpar.whitespace_before.first_line.comment:
comments.inline.extend(split_inline_comment(imp.rpar.whitespace_before.first_line.comment.value))
if (node.trailing_whitespace and node.trailing_whitespace.comment):
comments.last_inline.extend(split_inline_comment(node.trailing_whitespace.comment.value))
elif (node.trailing_whitespace and node.trailing_whitespace.comment):
comments.first_inline.extend(split_inline_comment(node.trailing_whitespace.comment.value))
elif isinstance(imp, cst.Import):
if (node.trailing_whitespace and node.trailing_whitespace.comment):
comments.first_inline.extend(split_inline_comment(node.trailing_whitespace.comment.value))
else:
raise TypeError
return comments |
def _manage_post(details: ManagePostDetails, moderator: ModeratorModel):
post = posts.find_post_by_id(details.post_id, include_thread=True)
if (not post):
raise BadRequestError((MESSAGE_NO_POST_ID if (not details.post_id) else MESSAGE_POST_NOT_FOUND))
if (details.mode == ManagePostDetails.DELETE):
_manage_delete(details, moderator, post)
elif (details.mode == ManagePostDetails.REPORT):
_manage_report(details, moderator, post) |
class TestJSONField(FieldValues):
valid_inputs = [({'a': 1, 'b': ['some', 'list', True, 1.23], '3': None}, {'a': 1, 'b': ['some', 'list', True, 1.23], '3': None})]
invalid_inputs = [({'a': set()}, ['Value must be valid JSON.']), ({'a': float('inf')}, ['Value must be valid JSON.'])]
outputs = [({'a': 1, 'b': ['some', 'list', True, 1.23], '3': 3}, {'a': 1, 'b': ['some', 'list', True, 1.23], '3': 3})]
field = serializers.JSONField()
def test_html_input_as_json_string(self):
class TestSerializer(serializers.Serializer):
config = serializers.JSONField()
data = QueryDict(mutable=True)
data.update({'config': '{"a":1}'})
serializer = TestSerializer(data=data)
assert serializer.is_valid()
assert (serializer.validated_data == {'config': {'a': 1}}) |
class map(_coconut_baseclass, _coconut.map):
__slots__ = ('func', 'iters')
__doc__ = getattr(_coconut.map, '__doc__', '<see help(py_map)>')
def __new__(cls, function, *iterables, **kwargs):
strict = kwargs.pop('strict', False)
if kwargs:
raise _coconut.TypeError(((cls.__name__ + '() got unexpected keyword arguments ') + _coconut.repr(kwargs)))
if (strict and (_coconut.len(iterables) > 1)):
return starmap(function, zip(*iterables, strict=True))
self = _coconut.map.__new__(cls, function, *iterables)
self.func = function
self.iters = iterables
return self
def __getitem__(self, index):
if _coconut.isinstance(index, _coconut.slice):
return self.__class__(self.func, *(_coconut_iter_getitem(it, index) for it in self.iters))
return self.func(*(_coconut_iter_getitem(it, index) for it in self.iters))
def __reversed__(self):
return self.__class__(self.func, *(reversed(it) for it in self.iters))
def __len__(self):
if (not _coconut.all((_coconut.isinstance(it, _coconut.abc.Sized) for it in self.iters))):
return _coconut.NotImplemented
return _coconut.min((_coconut.len(it) for it in self.iters))
def __repr__(self):
return ('%s(%r, %s)' % (self.__class__.__name__, self.func, ', '.join((_coconut.repr(it) for it in self.iters))))
def __reduce__(self):
return (self.__class__, ((self.func,) + self.iters))
def __copy__(self):
self.iters = _coconut.tuple((reiterable(it) for it in self.iters))
return self.__class__(self.func, *self.iters)
def __iter__(self):
return _coconut.map(self.func, *self.iters)
def __fmap__(self, func):
return self.__class__(_coconut_forward_compose(self.func, func), *self.iters) |
class FQ12(FQP):
degree = 12
FQ12_MODULUS_COEFFS = None
def __init__(self, coeffs: Sequence[IntOrFQ]) -> None:
if (self.FQ12_MODULUS_COEFFS is None):
raise AttributeError("FQ12 Modulus Coeffs haven't been specified")
self.mc_tuples = [(i, c) for (i, c) in enumerate(self.FQ12_MODULUS_COEFFS) if c]
super().__init__(coeffs, self.FQ12_MODULUS_COEFFS) |
def position_window(window, width=None, height=None, parent=None):
(dx, dy) = window.GetSize()
width = (width or dx)
height = (height or dy)
if (parent is None):
parent = window._parent
if (parent is None):
window.SetSize(((SystemMetrics().screen_width - width) // 2), ((SystemMetrics().screen_height - height) // 2), width, height)
return
if isinstance(parent, wx.Window):
(x, y) = parent.ClientToScreen(0, 0)
(parent_dx, parent_dy) = parent.GetSize()
else:
(x, y, parent_dx, parent_dy) = parent
adjacent = (getattr(window, '_kind', 'popup') == 'popup')
width = min(max(parent_dx, width), SystemMetrics().screen_width)
height = min(height, SystemMetrics().screen_height)
closest = find_closest_display(x, y)
if adjacent:
y += parent_dy
(x, y, dx, dy) = get_position_for_display(x, y, width, height, closest)
window.SetSize(x, y, dx, dy) |
class BoundLibraryFunction(BoundFunctionBase):
def setup_impl(self, call_info: FunctionCallInfo):
returns = [ir.Argument(call_info.ast_node) for _ in range(call_info.result_arity)]
continuation = ir.Block(call_info.ast_node, returns, info='CONTINUATION')
destination = ir.JumpDestination(self.member_access, self.member_access.referenced_declaration)
implemented = (call_info.ast_node.resolve_reference(self.member_access.referenced_declaration) is not None)
if implemented:
arguments = ([self.bound_expression] + call_info.arguments)
transfer = ir.Jump(call_info.ast_node, destination, continuation, arguments, call_info.ast_node.names)
self.flattened_expression_values = returns
self.cfg = ((self.bound_cfg >> transfer) >> continuation)
else:
pass |
class Use():
def __init__(self, mod_name: str, only_list: set[str]=None, rename_map: dict[(str, str)]=None, line_number: int=0):
if (only_list is None):
only_list = set()
if (rename_map is None):
rename_map = {}
self.mod_name: str = mod_name.lower()
self._line_no: int = line_number
self.only_list: set[str] = only_list
self.rename_map: dict[(str, str)] = rename_map
if only_list:
self.only_list: set[str] = {only.lower() for only in only_list}
if rename_map:
self.rename_map = {k.lower(): v.lower() for (k, v) in rename_map.items()}
def line_number(self):
return self._line_no
_number.setter
def line_number(self, line_number: int):
self._line_no = line_number
def rename(self, only_list: list[str]=None):
if (only_list is None):
only_list = []
if (not only_list):
only_list = self.only_list
return [self.rename_map.get(only_name, only_name) for only_name in only_list] |
class OptionSeriesPieSonificationContexttracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.skipcomplex(reason="Hypre doesn't support complex mode")
def test_homogeneous_field_matfree():
mesh = UnitCubeMesh(5, 5, 5)
V = FunctionSpace(mesh, 'N1curl', 1)
V0 = VectorFunctionSpace(mesh, 'DG', 0)
u = TrialFunction(V)
v = TestFunction(V)
a = (inner(curl(u), curl(v)) * dx)
L = (inner(Constant((0.0, 0.0, 0.0)), v) * dx)
(x, y, z) = SpatialCoordinate(mesh)
B0 = 1
constant_field = as_vector([(((- 0.5) * B0) * (y - 0.5)), ((0.5 * B0) * (x - 0.5)), 0])
bc = DirichletBC(V, constant_field, (1, 2, 3, 4))
params = {'snes_type': 'ksponly', 'mat_type': 'matfree', 'ksp_type': 'cg', 'ksp_max_it': '30', 'ksp_rtol': '2e-15', 'pc_type': 'python', 'pc_python_type': 'firedrake.AssembledPC', 'assembled_pc_type': 'python', 'assembled_pc_python_type': 'firedrake.HypreAMS', 'assembled_pc_hypre_ams_zero_beta_poisson': True}
A = Function(V)
solve((a == L), A, bc, solver_parameters=params)
B = project(curl(A), V0)
assert numpy.allclose(B.dat.data_ro, numpy.array((0.0, 0.0, 1.0)), atol=1e-06) |
class PygmentsHighlighter(QtGui.QSyntaxHighlighter):
def __init__(self, parent, lexer=None):
super().__init__(parent)
try:
self._lexer = get_lexer_by_name(lexer)
except:
self._lexer = PythonLexer()
self._style = DefaultStyle
self._brushes = {}
self._formats = {}
def highlightBlock(self, qstring):
qstring = str(qstring)
prev_data = self.previous_block_data()
if (prev_data is not None):
self._lexer._saved_state_stack = prev_data.syntax_stack
elif hasattr(self._lexer, '_saved_state_stack'):
del self._lexer._saved_state_stack
index = 0
for (token, text) in self._lexer.get_tokens(qstring):
l = len(text)
format = self._get_format(token)
if (format is not None):
self.setFormat(index, l, format)
index += l
if hasattr(self._lexer, '_saved_state_stack'):
data = BlockUserData(syntax_stack=self._lexer._saved_state_stack)
self.currentBlock().setUserData(data)
data = self.currentBlock().userData()
del self._lexer._saved_state_stack
def previous_block_data(self):
return self.currentBlock().previous().userData()
def _get_format(self, token):
if (token in self._formats):
return self._formats[token]
result = None
while (not self._style.styles_token(token)):
token = token.parent
for (key, value) in self._style.style_for_token(token).items():
if value:
if (result is None):
result = QtGui.QTextCharFormat()
if (key == 'color'):
result.setForeground(self._get_brush(value))
elif (key == 'bgcolor'):
result.setBackground(self._get_brush(value))
elif (key == 'bold'):
result.setFontWeight(QtGui.QFont.Weight.Bold)
elif (key == 'italic'):
result.setFontItalic(True)
elif (key == 'underline'):
result.setUnderlineStyle(QtGui.QTextCharFormat.UnderlineStyle.SingleUnderline)
elif (key == 'sans'):
result.setFontStyleHint(QtGui.QFont.SansSerif)
elif (key == 'roman'):
result.setFontStyleHint(QtGui.QFont.StyleHint.Times)
elif (key == 'mono'):
result.setFontStyleHint(QtGui.QFont.TypeWriter)
elif (key == 'border'):
result.setUnderlineStyle(QtGui.QTextCharFormat.UnderlineStyle.WaveUnderline)
result.setUnderlineColor(self._get_color(value))
self._formats[token] = result
return result
def _get_brush(self, color):
result = self._brushes.get(color)
if (result is None):
qcolor = self._get_color(color)
result = QtGui.QBrush(qcolor)
self._brushes[color] = result
return result
def _get_color(self, color):
qcolor = QtGui.QColor()
qcolor.setRgb(int(color[:2], base=16), int(color[2:4], base=16), int(color[4:6], base=16))
return qcolor |
class SVRegTestnet(object):
REGTEST_FUNDS_PRIVATE_KEY: PrivateKey = PrivateKey(bytes.fromhex('a2d9803c912ab380c1491d3bd1aaab34ca06742d7885a224ec8d386182d26ed2'), coin=BitcoinRegtest)
REGTEST_FUNDS_PRIVATE_KEY_WIF = REGTEST_FUNDS_PRIVATE_KEY.to_WIF()
REGTEST_FUNDS_PUBLIC_KEY: PublicKey = REGTEST_FUNDS_PRIVATE_KEY.public_key
REGTEST_P2PKH_ADDRESS: P2PKH_Address = REGTEST_FUNDS_PUBLIC_KEY.to_address().to_string()
REGTEST_DEFAULT_ACCOUNT_SEED = 'tprv8ZgxMBicQKsPd4wsdaJ11eH84eq4hHLX1K6Mx8EQQhJzq8jr25WH1m8hgGkCqnksJDCZPZbDoMbQ6QtroyCyn5ZckCmsLeiHDb1MAxhNUHN'
MIN_CHECKPOINT_HEIGHT = 0
ADDRTYPE_P2PKH = 111
ADDRTYPE_P2SH = 196
CASHADDR_PREFIX = 'bchtest'
DEFAULT_PORTS = {'t': '51001'}
DEFAULT_SERVERS = read_json_dict('servers_regtest.json')
GENESIS = 'ea01ad0eebaaec3ced90fa3ff8d77f4943'
NAME = 'regtest'
BITCOIN_URI_PREFIX = 'bitcoin'
PAY_URI_PREFIX = 'pay'
WIF_PREFIX = 239
BIP276_VERSION = 2
COIN = BitcoinRegtest
CHECKPOINT = CheckPoint(bytes.fromhex('ba3edfd7a7b12b27ac72c3e67768f617fc81bc3888a51323a9fb8aa4b1e5e4adae5494dffff001d1aa4ae18'), height=0, prev_work=0)
VERIFICATION_BLOCK_MERKLE_ROOT = None
BIP44_COIN_TYPE = 1
BLOCK_EXPLORERS: Dict[(str, Tuple[(str, Dict[(str, str)])])] = {}
FAUCET_URL = ''
KEEPKEY_DISPLAY_COIN_NAME = 'Testnet'
TREZOR_COIN_NAME = 'Bcash Testnet'
TWENTY_MINUTE_RULE = True |
class OptionPlotoptionsScatter3dSonificationTracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class VideoStreamExtraOptionsSchema(JsonSchema):
record = fields.Boolean(default=True)
autoStartRecording = fields.Boolean(default=False)
muteOnStart = fields.Boolean(default=True)
welcome = fields.String(required=False, allow_none=True)
maxParticipants = fields.Integer(required=False, allow_none=True)
duration = fields.Integer(required=False, allow_none=True)
moderatorOnlyMessage = fields.String(required=False, allow_none=True)
logo = fields.URL(required=False, allow_none=True)
bannerText = fields.String(required=False, allow_none=True)
bannerColor = fields.String(required=False, allow_none=True)
guestPolicy = fields.String(required=False, allow_none=True)
allowModsToUnmuteUsers = fields.Boolean(default=True)
endCurrentMeeting = fields.Boolean(default=False) |
def test_sync_checkpoint_restore_default_path(tmpdir):
td_path = Path(tmpdir)
dest = td_path.joinpath('dest')
dest.mkdir()
src = td_path.joinpath('src')
src.mkdir()
prev = src.joinpath('prev')
p = b'prev-bytes'
with prev.open('wb') as f:
f.write(p)
cp = SyncCheckpoint(checkpoint_dest=str(dest), checkpoint_src=str(src))
assert (cp.read() == p)
assert (cp._prev_download_path is not None)
assert (cp.restore() == cp._prev_download_path) |
def bool_comparison_fixer(bmg: BMGraphBuilder, typer: LatticeTyper) -> NodeFixer:
bcf = BoolComparisonFixer(bmg, typer)
return node_fixer_first_match([type_guard(bn.EqualNode, bcf._replace_bool_equals), type_guard(bn.GreaterThanEqualNode, bcf._replace_bool_gte), type_guard(bn.GreaterThanNode, bcf._replace_bool_gt), type_guard(bn.LessThanEqualNode, bcf._replace_bool_lte), type_guard(bn.LessThanNode, bcf._replace_bool_lt), type_guard(bn.NotEqualNode, bcf._replace_bool_not_equals)]) |
def topological_sort(digraph, nodes_sort=node_list_sort):
tsort = []
visited = []
def visit(node):
if (node not in visited):
visited.append(node)
for out_node in nodes_sort(node.outgoing):
visit(out_node)
tsort.append(node)
for node in nodes_sort(digraph.nodes):
visit(node)
return tsort |
def cmd_queue_remove(jobs: Jobs, reqid: RequestID) -> None:
reqid = RequestID.from_raw(reqid)
logger.info('Removing job %s from the queue...', reqid)
job = jobs.get(reqid)
if (not job):
logger.warning('request %s not found', reqid)
return
if jobs.queues[reqid.workerid].paused:
logger.warning('job queue is paused')
status = job.get_status()
if (not status):
logger.warning('request %s not found', reqid)
elif (status is not Result.STATUS.PENDING):
logger.warning('request %s has been updated since queued', reqid)
try:
jobs.queues[reqid.workerid].remove(reqid)
except JobNotQueuedError:
logger.warning('%s was not queued', reqid)
if (status is Result.STATUS.PENDING):
job.set_status('created')
logger.info('...done!') |
class ListProduct():
def __init__(self, iter_list: List[Any]):
self.iter_list: List[Any] = iter_list
def __iter__(self):
return self.Iterator(self.iter_list, ([None] * len(self.iter_list)), 0)
class Iterator():
def __init__(self, iter_list: List[Any], val_list: List[Any], idx: int):
self.generator = self._generate_next(iter_list, val_list, idx)
def __iter__(self):
return self
def _generate_next(self, iter_list: List[Any], val_list: List[Any], idx: int):
if iter_list:
if (type(iter_list[0]) in iterable_types):
for i in iter_list[0]:
val_list[idx] = i
if (len(iter_list) == 1):
(yield val_list)
else:
(yield from self._generate_next(iter_list[1:], val_list, (idx + 1)))
else:
val_list[idx] = iter_list[0]
if (len(iter_list) == 1):
(yield val_list)
else:
(yield from self._generate_next(iter_list[1:], val_list, (idx + 1)))
else:
(yield iter_list)
def __next__(self):
return next(self.generator) |
def fmap(func, obj, **kwargs):
starmap_over_mappings = kwargs.pop('starmap_over_mappings', False)
fallback_to_init = kwargs.pop('fallback_to_init', False)
if kwargs:
raise _coconut.TypeError(('fmap() got unexpected keyword arguments ' + _coconut.repr(kwargs)))
obj_fmap = _coconut.getattr(obj, '__fmap__', None)
if (obj_fmap is not None):
try:
result = obj_fmap(func)
except _coconut.NotImplementedError:
pass
else:
if (result is not _coconut.NotImplemented):
return result
obj_module = _coconut_get_base_module(obj)
if (obj_module in _coconut.pandas_numpy_modules):
if (obj.ndim <= 1):
return obj.apply(func)
return obj.apply(func, axis=(obj.ndim - 1))
if (obj_module in _coconut.jax_numpy_modules):
import jax.numpy as jnp
return jnp.vectorize(func)(obj)
if (obj_module in _coconut.numpy_modules):
return _coconut.numpy.vectorize(func)(obj)
obj_aiter = _coconut.getattr(obj, '__aiter__', None)
if ((obj_aiter is not None) and (_coconut_amap is not None)):
try:
aiter = obj_aiter()
except _coconut.NotImplementedError:
pass
else:
if (aiter is not _coconut.NotImplemented):
return _coconut_amap(func, aiter)
if starmap_over_mappings:
return _coconut_base_makedata(obj.__class__, (_coconut_starmap(func, obj.items()) if _coconut.isinstance(obj, _coconut.abc.Mapping) else _coconut_map(func, obj)), from_fmap=True, fallback_to_init=fallback_to_init)
else:
return _coconut_base_makedata(obj.__class__, _coconut_map(func, (obj.items() if _coconut.isinstance(obj, _coconut.abc.Mapping) else obj)), from_fmap=True, fallback_to_init=fallback_to_init) |
def test_index_can_be_created_with_settings_and_mappings(write_client):
i = Index('test-blog', using=write_client)
i.document(Post)
i.settings(number_of_replicas=0, number_of_shards=1)
i.create()
assert ({'test-blog': {'mappings': {'properties': {'title': {'type': 'text', 'analyzer': 'my_analyzer'}, 'published_from': {'type': 'date'}}}}} == write_client.indices.get_mapping(index='test-blog'))
settings = write_client.indices.get_settings(index='test-blog')
assert (settings['test-blog']['settings']['index']['number_of_replicas'] == '0')
assert (settings['test-blog']['settings']['index']['number_of_shards'] == '1')
assert (settings['test-blog']['settings']['index']['analysis'] == {'analyzer': {'my_analyzer': {'type': 'custom', 'tokenizer': 'keyword'}}}) |
('urllib3.poolmanager.PoolManager.urlopen')
def test_generic_error(mock_urlopen, elasticapm_client):
(url, status, message, body) = (' 418, "I'm a teapot", 'Nothing')
elasticapm_client.server_version = (8, 0, 0)
transport = Transport(url, client=elasticapm_client)
transport.start_thread()
mock_urlopen.side_effect = Exception('Oopsie')
try:
with pytest.raises(TransportException) as exc_info:
transport.send('x')
assert ('Oopsie' in str(exc_info.value))
finally:
transport.close() |
class DataStabilityTestPreset(TestPreset):
columns: Optional[List[str]]
def __init__(self, columns: Optional[List[str]]=None):
super().__init__()
self.columns = columns
def generate_tests(self, data_definition: DataDefinition, additional_data: Optional[Dict[(str, Any)]]):
return [TestNumberOfRows(), TestNumberOfColumns(), TestColumnsType(), TestAllColumnsShareOfMissingValues(columns=self.columns), TestNumColumnsOutOfRangeValues(columns=self.columns), TestCatColumnsOutOfListValues(columns=self.columns), TestNumColumnsMeanInNSigmas(columns=self.columns)] |
def test_dummy_heuristic_on_standard_jump_table_block():
v0 = BasicBlock(0, instructions=[Assignment(a1, a0), Assignment(a2, Variable('x')), Branch(gt([a2, Constant(7)]))])
v1 = BasicBlock(1, instructions=[Assignment(a3, shl([a2, Constant(2)])), Assignment(a4, dereference([add([add([a3, Constant(JT_OFFSET)]), Constant(JT_OFFSET)])])), IndirectBranch(a4)])
v2 = BasicBlock(2, instructions=[call_assignment(Call(function_symbol('func1'), []))])
v3 = BasicBlock(3, instructions=[call_assignment(Call(function_symbol('func2'), []))])
v4 = BasicBlock(4, instructions=[call_assignment(Call(function_symbol('func3'), []))])
v5 = BasicBlock(5, instructions=[call_assignment(Call(function_symbol('error'), []))])
v6 = BasicBlock(6, instructions=[Return([Constant(0)])])
cfg = ControlFlowGraph()
cfg.add_edge(UnconditionalEdge(v0, v1))
cfg.add_edge(UnconditionalEdge(v0, v5))
cfg.add_edge(SwitchCase(v1, v2, []))
cfg.add_edge(SwitchCase(v1, v3, []))
cfg.add_edge(SwitchCase(v1, v4, []))
cfg.add_edge(SwitchCase(v1, v5, []))
cfg.add_edge(UnconditionalEdge(v5, v6))
task = MockTask(cfg)
SwitchVariableDetection().run(task)
assert (v1.instructions == [Assignment(a3, shl([a2, Constant(2)])), Assignment(a4, dereference([add([add([a3, Constant(JT_OFFSET)]), Constant(JT_OFFSET)])])), IndirectBranch(a2)])
assert (v2.instructions == [call_assignment(Call(function_symbol('func1'), []))])
assert (v3.instructions == [call_assignment(Call(function_symbol('func2'), []))])
assert (v4.instructions == [call_assignment(Call(function_symbol('func3'), []))])
assert (v5.instructions == [call_assignment(Call(function_symbol('error'), []))])
assert (v6.instructions == [Return([Constant(0)])]) |
def _fuse_split_and_strided_op(sorted_graph: List[Tensor]) -> List[Tensor]:
sorted_ops = graph_utils.get_sorted_ops(sorted_graph)
for op in sorted_ops:
op_type = op._attrs['op']
if (op_type != 'split'):
continue
split_op = op
split_dim = split_op._attrs['split_dim']
split_input = split_op._attrs['inputs'][0]
if (not transform_strided_ops_utils.cat_split_dim_is_static(split_op, split_dim)):
continue
outputs = split_op._attrs['outputs']
can_fuse_split = True
stride = get_stride(split_input, split_dim)
dim_offset = 0
output_offsets = []
total_elems_from_split_dim = (stride * split_input._attrs['shape'][split_dim].value())
for output in outputs:
can_fuse_split &= ((len(output.dst_ops()) > 0) and all((((_is_supported_op(next_op._attrs['op']) and _check_alignment(next_op, (dim_offset * stride), total_elems_from_split_dim) and (len(output.dst_ops()) == 1)) or ((next_op._attrs['op'] == 'concatenate') and (next_op._attrs['concat_dim'] == split_dim) and _check_alignment(next_op, (dim_offset * stride), total_elems_from_split_dim))) for next_op in output.dst_ops())))
for next_op in output.dst_ops():
for (idx, input) in enumerate(next_op._attrs['inputs']):
if (input == output):
can_fuse_split = (can_fuse_split and transform_strided_ops_utils.gemm_stride_checker(next_op._attrs['input_accessors'][idx], split_dim))
output_offsets.append(dim_offset)
dim_offset += output._size(split_dim).value()
if (not can_fuse_split):
continue
_LOGGER.debug('Remove split from graph')
split_input.dst_ops().remove(split_op)
for (output, offset) in zip(outputs, output_offsets):
for next_op in output.dst_ops():
for (idx, input) in enumerate(next_op._attrs['inputs']):
if (input == output):
next_op._attrs['input_accessors'][idx].update_base_tensor(split_input, split_dim, offset)
next_op._attrs['inputs'][idx] = split_input
split_input.dst_ops().add(next_op)
for output in outputs:
transform_utils.remove_tensor_from_sorted_graph(output)
return transform_utils.sanitize_sorted_graph(sorted_graph) |
class OptionPlotoptionsVectorSonificationContexttracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
(slots=True, repr=False)
class XTxOutput(TxOutput):
script_type: ScriptType = attr.ib(default=ScriptType.NONE)
x_pubkeys: List[XPublicKey] = attr.ib(default=attr.Factory(list))
def estimated_size(self) -> int:
return len(self.to_bytes())
def __repr__(self):
return f'XTxOutput(value={self.value}, script_pubkey="{self.script_pubkey}", script_type={self.script_type}, x_pubkeys={self.x_pubkeys})' |
class TestHasCorrectScopes():
def test_missing_scopes(self):
assert (not has_scope_subset(user_scopes=[DATASET_CREATE_OR_UPDATE, USER_READ], endpoint_scopes=SecurityScopes([PRIVACY_REQUEST_READ])))
def test_has_correct_scopes(self):
assert has_scope_subset(user_scopes=[DATASET_CREATE_OR_UPDATE, USER_READ, PRIVACY_REQUEST_READ], endpoint_scopes=SecurityScopes([PRIVACY_REQUEST_READ])) |
class TestESQLRules(BaseRuleTest):
def run_esql_test(self, esql_query, expectation, message):
rc = RuleCollection()
file_path = Path(get_path('tests', 'data', 'command_control_dummy_production_rule.toml'))
original_production_rule = load_rule_contents(file_path)
production_rule = deepcopy(original_production_rule)[0]
production_rule['rule']['query'] = esql_query
expectation.match_expr = message
with expectation:
rc.load_dict(production_rule)
def test_esql_queries(self): |
class OptionPlotoptionsBarSonificationTracksMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesFunnel3dLabel(Options):
def boxesToAvoid(self):
return self._config_get(None)
def boxesToAvoid(self, value: Any):
self._config(value, js_type=False)
def connectorAllowed(self):
return self._config_get(False)
def connectorAllowed(self, flag: bool):
self._config(flag, js_type=False)
def connectorNeighbourDistance(self):
return self._config_get(24)
def connectorNeighbourDistance(self, num: float):
self._config(num, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get('undefined')
def formatter(self, value: Any):
self._config(value, js_type=False)
def maxFontSize(self):
return self._config_get(None)
def maxFontSize(self, num: float):
self._config(num, js_type=False)
def minFontSize(self):
return self._config_get(None)
def minFontSize(self, num: float):
self._config(num, js_type=False)
def onArea(self):
return self._config_get(None)
def onArea(self, flag: bool):
self._config(flag, js_type=False)
def style(self) -> 'OptionSeriesFunnel3dLabelStyle':
return self._config_sub_data('style', OptionSeriesFunnel3dLabelStyle)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False) |
def extractLyran1Mtranslation(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return False
tagmap = [('Villainess Brother Reincarnation', 'Villainess Brother Reincarnation', 'translated')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class ConnectionBinarySensor(EntityBase, BinarySensorEntity):
def available(self) -> bool:
return True
def is_on(self) -> bool:
return self._appliance.connected
async def async_on_update(self, appliance: Appliance, key: str, value) -> None:
self.async_write_ha_state() |
def _allow_deprecated_init(func):
(func)
def wrapper(cls, *args, **kwargs):
if (args and args[0] and (not isinstance(args[0][0], Well))):
warnings.warn("Initializing directly from file name is deprecated and will be removed in xtgeo version 4.0. Use: mywells = xtgeo.wells_from_files(['some_name.w']) instead", DeprecationWarning)
return func(xtgeo.wells_from_files(*args, **kwargs))
return func(cls, *args, **kwargs)
return wrapper |
def set_trade_progress(save_stats: dict[(str, Any)]) -> dict[(str, Any)]:
trade_progress = save_stats['trade_progress']
max_value = helper.clamp((299 - save_stats['rare_tickets']['Value']), 0, 299)
storage = save_stats['cat_storage']
tickets = item.IntItem(name='Rare Tickets', max_value=max_value, value=item.Int(save_stats['rare_tickets']['Value']))
tickets.edit()
trade_progress['Value'] = (tickets.get_value() * 5)
(storage, has_space) = set_trade_progress_val(storage)
if (not has_space):
helper.colored_text('Your cat storage is full, please free 1 space!')
return save_stats
save_stats['cat_storage'] = storage
save_stats['trade_progress'] = trade_progress
helper.colored_text('You now need to go into your storage and press &"Use all"& and then press &"Trade for Ticket"&')
return save_stats |
def extractLilpaintedlifeblBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('rebirth: recapture the entertainment industry', 'rebirth: recapture the entertainment industry', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class JsHtmlLink(JsHtml):
def content(self):
return ContentFormatters(self.page, ('%s.innerText' % self.varName))
def url(self, url: str):
url = JsUtils.jsConvertData(url, None)
return JsFncs.JsFunctions(('%s.href = %s' % (self.varName, url)))
def href(self, url: str):
url = JsUtils.jsConvertData(url, None)
return JsFncs.JsFunctions(('%s.href = %s' % (self.varName, url)))
def target(self, name: str):
name = JsUtils.jsConvertData(name, None)
return JsFncs.JsFunctions(('%s.target = %s' % (self.varName, name))) |
def reinterpret_windows1252_as_utf8(wrong_text):
altered_bytes = []
for char in wrong_text:
if (ord(char) in WINDOWS_1252_GREMLINS):
altered_bytes.append(py2_encode(char, 'WINDOWS_1252'))
else:
altered_bytes.append(py2_encode(char, 'latin-1', 'replace'))
return py2_decode(''.join(altered_bytes), 'utf-8', 'replace') |
_PREPARE_FOR_EXPORT_REGISTRY.register()
def default_rcnn_prepare_for_export(self, cfg, inputs, predictor_type):
pytorch_model = self
if (('_ops' in predictor_type) or ('caffe2' in predictor_type) or ('onnx' in predictor_type)):
from detectron2.export.caffe2_modeling import META_ARCH_CAFFE2_EXPORT_TYPE_MAP
C2MetaArch = META_ARCH_CAFFE2_EXPORT_TYPE_MAP[cfg.MODEL.META_ARCHITECTURE]
c2_compatible_model = C2MetaArch(cfg, pytorch_model)
preprocess_info = FuncInfo.gen_func_info(D2Caffe2MetaArchPreprocessFunc, params=D2Caffe2MetaArchPreprocessFunc.get_params(cfg, c2_compatible_model))
postprocess_info = FuncInfo.gen_func_info(D2Caffe2MetaArchPostprocessFunc, params=D2Caffe2MetaArchPostprocessFunc.get_params(cfg, c2_compatible_model))
preprocess_func = preprocess_info.instantiate()
model_export_kwargs = {}
if ('torchscript' in predictor_type):
model_export_kwargs['force_disable_tracing_adapter'] = True
return PredictorExportConfig(model=c2_compatible_model, data_generator=(lambda x: (preprocess_func(x),)), model_export_method=predictor_type.replace('_ops', '', 1), model_export_kwargs=model_export_kwargs, preprocess_info=preprocess_info, postprocess_info=postprocess_info)
else:
do_postprocess = cfg.RCNN_EXPORT.INCLUDE_POSTPROCESS
preprocess_info = FuncInfo.gen_func_info(D2RCNNInferenceWrapper.Preprocess, params={})
preprocess_func = preprocess_info.instantiate()
return PredictorExportConfig(model=D2RCNNInferenceWrapper(pytorch_model, do_postprocess=do_postprocess), data_generator=(lambda x: (preprocess_func(x),)), model_export_method=predictor_type, preprocess_info=preprocess_info, postprocess_info=FuncInfo.gen_func_info(D2RCNNInferenceWrapper.Postprocess, params={'detector_postprocess_done_in_model': do_postprocess})) |
def decode(byte_string):
values = []
number = 0
for (idx, byte) in enumerate(byte_string):
number <<= 7
number += (byte & SEVEN_BIT_MASK)
if ((byte & EIGHT_BIT_MASK) == 0):
values.append(number)
number = 0
elif (idx == (len(byte_string) - 1)):
raise ValueError('incomplete sequence')
return values |
def gen_function_call(func_attrs, indent=' ') -> str:
x = func_attrs['inputs'][0]
y = func_attrs['outputs'][0]
axes = func_attrs['reduction_axes']
if (not (len(axes) == 1)):
raise NotImplementedError('Multiple reduction axes are not supported yet')
x_shape = x._attrs['shape']
x_dims = ', '.join([dim._attrs['name'] for dim in x_shape])
y_shape = func_attrs['output_accessors'][0].original_shapes
y_dim_refs = ', '.join([('&' + dim._attrs['name']) for dim in y_shape])
keep_dim = ('true' if func_attrs['keepdim'] else 'false')
return FUNC_CALL_TEMPLATE.render(indent=indent, func_name=func_attrs['name'], output_name=y._attrs['name'], input_name=x._attrs['name'], input_dims=x_dims, output_dim_refs=y_dim_refs, reduction_axis=axes[0], rank=str(len(x_shape)), keep_dim=keep_dim) |
def test_serializable_deserialization(type_1_a, type_1_b, type_2):
serial_1_a = RLPType1.serialize(type_1_a)
serial_1_b = RLPType1.serialize(type_1_b)
serial_2 = RLPType2.serialize(type_2)
res_type_1_a = RLPType1.deserialize(serial_1_a)
res_type_1_b = RLPType1.deserialize(serial_1_b)
res_type_2 = RLPType2.deserialize(serial_2)
assert (res_type_1_a == type_1_a)
assert (res_type_1_b == type_1_b)
assert (res_type_2 == type_2) |
class ConfigDialog(wx.Dialog):
def __init__(self, config, *args, **kwargs):
self.config = None
self.button = {}
self.checkbox = {}
self.combobox = {}
self.textWidth = 150
self.contWidth = 350
self.height = 500
self.yPos = 10
self.H = 30
self.textPanel = None
self.contPanel = None
self.sizer = None
self.config = config
wx.Dialog.__init__(self, *args, **kwargs)
self.SetTitle(lt('_configure'))
self.sizer = wx.BoxSizer(wx.HORIZONTAL)
self.textPanel = wx.Panel(self, size=(self.textWidth, self.height))
self.textPanel.SetBackgroundColour('#E5E5E5')
self.contPanel = wx.Panel(self, size=(self.contWidth, self.height))
self.bottomPanel = wx.Panel(self.contPanel, size=(self.contWidth, self.H), pos=((0.5 * self.H), (self.height - (2 * self.H))))
self.sizer.Add(self.textPanel)
self.sizer.Add(self.contPanel)
self.addText(lt('Geekey Enabled'))
self.addCheckBox('geekeyenabled', lt('Enable GeeKey HotKey system'))
self.addSpacer()
self.addText(lt('GeeKey Mode'))
self.combobox['geekeymode'] = wx.ComboBox(self.contPanel, pos=self.contPos(), size=self.contSize(), choices=[lt('block'), lt('longblock')])
self.combobox['geekeymode'].SetValue((lt('block') if (self.config['geekeymode'] in ('block', 'Block', '')) else lt('longblock')))
self.addText('')
wx.StaticText(self.contPanel, pos=self.contPos(), size=((self.contWidth - (3 * self.H)), (self.H * 1.5)), label=lt("'block' mode blocks original function. While 'longblock' mode blocks only when pressed longer or in combo keys."))
self.addSpacer()
self.addText(lt('_language'))
self.combobox['language'] = wx.ComboBox(self.contPanel, pos=self.contPos(), size=self.contSize(), choices=['', 'English'])
self.combobox['language'].SetValue(('English' if (self.config['language'] in ('en', 'en_US')) else ''))
self.addSpacer()
self.addText(lt('_general'))
self.addCheckBox('startup', lt('_startup'))
self.addText(lt(''))
self.addCheckBox('startvim', lt('_startvim'))
self.addText(lt(''))
self.addCheckBox('doubleclickfix', lt('_doubleclickfix'))
self.addText(lt(''))
self.addCheckBox('printkeyevent', lt('_print_key_event'))
self.addSpacer()
self.button['ok'] = wx.Button(self.bottomPanel, label=lt('OK'), pos=(self.H, 0), size=(80, self.H))
self.button['cancel'] = wx.Button(self.bottomPanel, label=lt('Cancel'), pos=(((2 * self.H) + 80), 0), size=(80, self.H))
self.button['ok'].Bind(wx.EVT_BUTTON, self.OnOk)
self.button['cancel'].Bind(wx.EVT_BUTTON, self.OnCancel)
self.SetSizerAndFit(self.sizer)
self.Bind(wx.EVT_CHAR_HOOK, self.OnKeyUP)
pass
def contSize(self):
return ((self.contWidth - (3 * self.H)), self.H)
def contPos(self):
return ((1.5 * self.H), self.yPos)
def addConText(self, cont, height):
text = wx.StaticText(self.contPanel, label=cont, pos=self.contPos(), size=self.contSize(y=(height * self.H)))
return text
def addText(self, label):
self.yPos += self.H
text = wx.StaticText(self.textPanel, label=label, pos=(0, self.yPos), size=((self.textWidth - self.H), self.H), style=wx.ALIGN_RIGHT)
return text
def addSpacer(self, h=None):
self.yPos += (self.H if (not h) else h)
def addCheckBox(self, key, label):
self.checkbox[key] = wx.CheckBox(self.contPanel, label=label, size=self.contSize(), pos=self.contPos())
self.checkbox[key].SetValue(str2bool(self.config[key]))
pass
def OnCancel(self, evt):
self.EndModal(wx.ID_CANCEL)
pass
def OnOk(self, evt):
for (key, value) in self.checkbox.items():
self.config[key] = str(value.GetValue())
pass
self.config['language'] = ('en' if (self.combobox['language'].GetValue() == 'English') else 'zh')
self.config['geekeymode'] = ('block' if (self.combobox['geekeymode'].GetValue() in ('block', '')) else 'longblock')
self.EndModal(wx.ID_OK)
pass
def GetConfig(self):
return self.config
def OnKeyUP(self, evt):
if (evt.GetKeyCode() == wx.WXK_ESCAPE):
self.OnCancel(evt)
if (evt.GetKeyCode() == wx.WXK_RETURN):
self.OnOk(evt)
evt.Skip()
pass
pass |
def extractMashintransWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Shinka no Mi', 'Shinka no Mi', 'translated'), ('about my login bonus after i was transferred to another world being obviously too strong', 'about my login bonus after i was transferred to another world being obviously too strong', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_profile_string_escaping():
source = '\n {\n server {\n output {\n append " append "double escaped \\\\u1234 unicode";\n append "double escaped \\\\xff hex";\n append "\\u0050wned \\xff\\x00\\xbb";\n append "H\\x65x";\n append "escaped chars \\r\\n\\t\\\\ \\"foo\\" \\\'bar\\\' \'test\'";\n append "mw-redirectedfrom>(Redirected from <a href=/w/";\n print;\n }\n }\n }\n '
profile = c2profile.C2Profile.from_text(source)
assert (profile.properties[' == [('append', b' ('append', b'double escaped \\u1234 unicode'), ('append', b'double escaped \\xff hex'), ('append', b'Pwned \xff\x00\xbb'), ('append', b'Hex'), ('append', b'escaped chars \r\n\t\\ "foo" \'bar\' \'test\''), ('append', b'mw-redirectedfrom>\xc2\xa0\xc2\xa0(Redirected from <a href=/w/'), 'print']) |
def generate_graph(data, path):
title = 'How long before devs submit a successfull package after a failure?'
chart = pygal.Bar(title=title, print_values=True, legend_at_bottom=True)
for (key, value) in data.items():
label = label_for_group(key)
chart.add(label, value)
chart.render_to_file(path)
return path |
def deletePiece(tpc, fpc):
if ((fpc.color != tpc.color) and (tpc.color != 'none')):
globVar.removed = True
globVar.removed_label = tpc.label
globVar.removed_color = tpc.color
index = findIndex(tpc.label, tpc.color)
if (fpc.color == 'W'):
globVar.r_b_pieces.append(tpc)
globVar.b_pieces.pop(index)
globVar.b_NumPieces -= 1
globVar.r_b_NumPieces += 1
if (tpc.type == 'king'):
globVar.no_b_king = True
else:
globVar.r_w_pieces.append(tpc)
globVar.w_pieces.pop(index)
globVar.w_NumPieces -= 1
globVar.r_w_NumPieces += 1
if (tpc.type == 'king'):
globVar.no_w_king = True
remove_from_board(tpc.label, tpc.color) |
def build_workflow_rule_index(workflow_dict: Dict[(int, Workflow)], workflow_trigger_dict: Dict[(int, WorkflowEventTriggerMeta)]) -> Dict[(EventKeyTuple, Set[int])]:
workflow_rule_index = {}
for workflow_trigger_meta in workflow_trigger_dict.values():
if (workflow_trigger_meta.workflow_id not in workflow_dict):
continue
try:
workflow: Workflow = workflow_dict.get(workflow_trigger_meta.workflow_id)
rule: WorkflowRule = cloudpickle.loads(workflow_trigger_meta.rule)
expect_keys = expect_keys_to_tuple_set(workflow.namespace, rule.condition.expect_event_keys)
for key in expect_keys:
if (key not in workflow_rule_index):
workflow_rule_index[key] = set()
workflow_rule_index[key].add(workflow_trigger_meta.workflow_id)
except Exception as e:
logging.exception('Failed to load workflow trigger: %s, %s', workflow_trigger_meta.id, e)
return workflow_rule_index |
def action_stubber():
stubbies = []
def _do_stub(service, action, body=None, errors=None, side_effect=None):
stubby = stub_action(service, action, body, errors, side_effect)
stubbies.append(stubby)
return stubby.__enter__()
(yield _do_stub)
for stub in stubbies[::(- 1)]:
stub.__exit__() |
class Job():
plotter: 'plotman.plotters.Plotter'
logfile: typing.Optional[str] = None
job_id: int = 0
proc: psutil.Process
def get_running_jobs(cls, logroot: str, cached_jobs: typing.Sequence['Job']=()) -> typing.List['Job']:
jobs: typing.List[Job] = []
cached_jobs_by_pid = {j.proc.pid: j for j in cached_jobs}
with contextlib.ExitStack() as exit_stack:
processes = []
pids = set()
ppids = set()
for process in psutil.process_iter():
with contextlib.suppress(psutil.NoSuchProcess, psutil.AccessDenied):
exit_stack.enter_context(process.oneshot())
import plotman.plotters
if plotman.plotters.is_plotting_command_line(process.cmdline()):
ppids.add(process.ppid())
pids.add(process.pid)
processes.append(process)
wanted_pids = (pids - ppids)
wanted_processes = [process for process in processes if (process.pid in wanted_pids)]
for proc in wanted_processes:
with contextlib.suppress(psutil.NoSuchProcess, psutil.AccessDenied):
if (proc.pid in cached_jobs_by_pid.keys()):
jobs.append(cached_jobs_by_pid[proc.pid])
else:
with proc.oneshot():
command_line = list(proc.cmdline())
if (len(command_line) == 0):
continue
import plotman.plotters
plotter_type = plotman.plotters.get_plotter_from_command_line(command_line=command_line)
plotter = plotter_type()
plotter.parse_command_line(command_line=command_line, cwd=proc.cwd())
if (plotter.parsed_command_line is None):
continue
if (plotter.parsed_command_line.error is not None):
continue
if plotter.parsed_command_line.help:
continue
job = cls(proc=proc, plotter=plotter, logroot=logroot)
if (job.logfile is not None):
with open(job.logfile, 'rb') as f:
r = f.read()
job.plotter.update(chunk=r)
jobs.append(job)
return jobs
def __init__(self, proc: psutil.Process, plotter: 'plotman.plotters.Plotter', logroot: str) -> None:
self.proc = proc
self.plotter = plotter
for f in self.proc.open_files():
if (logroot in f.path):
if self.logfile:
assert (self.logfile == f.path)
else:
self.logfile = f.path
break
def progress(self) -> Phase:
return self.plotter.common_info().phase
def plot_id_prefix(self) -> str:
plot_id = self.plotter.common_info().plot_id
if (plot_id is None):
return ''
return plot_id[:8]
def status_str_long(self) -> str:
info = self.plotter.common_info()
return '{plot_id}\npid:{pid}\ntmp:{tmp}\ndst:{dst}\nlogfile:{logfile}'.format(plot_id=info.plot_id, pid=self.proc.pid, tmp=info.tmpdir, dst=info.dstdir, logfile=self.logfile)
def print_logs(self, follow: bool=False) -> None:
if (self.logfile is None):
print('no log file available for this plotting process', file=sys.stderr)
return
with open(self.logfile, 'r') as f:
if follow:
line = ''
while True:
tmp = f.readline()
if (tmp is not None):
line += tmp
if line.endswith('\n'):
print(line.rstrip('\n'))
line = ''
else:
time.sleep(0.1)
else:
print(f.read())
def to_dict(self) -> typing.Dict[(str, object)]:
info = self.plotter.common_info()
return dict(plot_id=self.plot_id_prefix(), tmp_dir=info.tmpdir, dst_dir=info.dstdir, progress=str(self.progress()), tmp_usage=self.get_tmp_usage(), pid=self.proc.pid, run_status=self.get_run_status(), mem_usage=self.get_mem_usage(), time_wall=self.get_time_wall(), time_user=self.get_time_user(), time_sys=self.get_time_sys(), time_iowait=self.get_time_iowait())
def get_mem_usage(self) -> int:
return self.proc.memory_info().vms
def get_tmp_usage(self) -> int:
total_bytes = 0
info = self.plotter.common_info()
with contextlib.suppress(FileNotFoundError):
with os.scandir(info.tmpdir) as it:
for entry in it:
if ((info.plot_id is not None) and (info.plot_id in entry.name)):
with contextlib.suppress(FileNotFoundError):
total_bytes += entry.stat().st_size
return total_bytes
def get_run_status(self) -> str:
status = self.proc.status()
if (status == psutil.STATUS_RUNNING):
return 'RUN'
elif (status == psutil.STATUS_SLEEPING):
return 'SLP'
elif (status == psutil.STATUS_DISK_SLEEP):
return 'DSK'
elif (status == psutil.STATUS_STOPPED):
return 'STP'
else:
return self.proc.status()
def get_time_wall(self) -> int:
create_time = datetime.fromtimestamp(self.proc.create_time())
return int((datetime.now() - create_time).total_seconds())
def get_time_user(self) -> int:
return int(self.proc.cpu_times().user)
def get_time_sys(self) -> int:
return int(self.proc.cpu_times().system)
def get_time_iowait(self) -> typing.Optional[int]:
cpu_times = self.proc.cpu_times()
iowait = getattr(cpu_times, 'iowait', None)
if (iowait is None):
return None
return int(iowait)
def suspend(self, reason: str='') -> None:
self.proc.suspend()
self.status_note = reason
def resume(self) -> None:
self.proc.resume()
def get_temp_files(self) -> typing.Set[str]:
temp_files = set([])
info = self.plotter.common_info()
for dir in [info.tmpdir, info.tmp2dir, info.dstdir]:
if (dir is not None):
temp_files.update(glob.glob(os.path.join(dir, f'plot-*-{info.plot_id}*.tmp')))
return temp_files
def cancel(self) -> None:
self.proc.resume()
self.proc.terminate() |
class MacOSDNSForcePublicDNSServersDisrupter(Disrupter):
def __init__(self, device, parameters):
super().__init__(device, parameters)
self._restrict_parameters(must_disrupt=True, must_restore=False, must_wait=False)
self.primary_service = self._find_primary_service()
self.original_dns_servers = self.primary_service.dns_servers(include_dhcp_servers=False)
def _find_primary_service(self):
services = self._device['network_tool'].network_services_in_priority_order()
primary_service = [service for service in services if service.active()][0]
L.info('Primary network service is {}'.format(primary_service.name()))
return primary_service
def _restore_dns_servers(self):
if self.original_dns_servers:
self.primary_service.set_dns_servers(self.original_dns_servers)
else:
self.primary_service.set_dns_servers(['Empty'])
def disrupt(self):
L.describe('Set the primary service DNS servers to public DNS servers')
self.primary_service.set_dns_servers(['37.235.1.174', '37.235.1.177'])
def teardown(self):
self._restore_dns_servers()
super().teardown() |
class IOTests(unittest.TestCase):
def test_empty(self):
for fmt in ('auto', 'bed', 'interval', 'tab', 'text'):
regions = tabio.read('formats/empty', fmt=fmt)
self.assertEqual(len(regions), 0)
def test_read_auto(self):
for (fname, nrows) in (('formats/empty', 0), ('formats/agilent.bed', 11), ('formats/amplicon.bed', 1433), ('formats/amplicon.text', 1433), ('formats/nv2_baits.interval_list', 6809), ('formats/refflat-mini.txt', 100), ('formats/example.gff', 6)):
self.assertEqual(len(tabio.read_auto(fname)), nrows)
with open(fname) as handle:
self.assertEqual(len(tabio.read_auto(handle)), nrows)
def test_read_bed(self):
fname = 'formats/amplicon.bed'
regions = tabio.read(fname, 'bed')
self.assertEqual(len(regions), linecount(fname))
self.assertEqual(regions.sample_id, 'amplicon')
def test_read_gff(self):
for (fname, nrows, sample_id) in (('formats/example.gff', 6, 'example'), ('formats/GRCh37_BRAF.gff.gz', 49, 'GRCh37_BRAF')):
regions = tabio.read(fname, 'gff')
self.assertEqual(len(regions), nrows)
self.assertEqual(regions.sample_id, sample_id)
def test_read_ilist(self):
regions = tabio.read('formats/nv2_baits.interval_list', 'interval')
self.assertEqual(len(regions), 6809)
self.assertEqual(regions.sample_id, 'nv2_baits')
def test_read_picardhs(self):
fname = 'picard/p2-5_5.antitargetcoverage.csv'
cna = tabio.read(fname, 'picardhs')
self.assertEqual(len(cna), (linecount(fname) - 1))
self.assertEqual(cna.sample_id, 'p2-5_5')
def test_read_refflat(self):
fname = 'formats/refflat-mini.txt'
regions = tabio.read(fname, 'refflat')
self.assertEqual(len(regions), linecount(fname))
self.assertEqual(13, regions.chromosome.nunique())
def test_read_seg(self):
for (fname, header_len, args) in (('formats/cw-tr-log2.seg', 1, ({'23': 'X', '24': 'Y', '25': 'M'}, 'chr', False)), ('formats/acgh-log10.seg', 1, (None, None, True)), ('formats/warning.seg', 2, (None, None, False))):
expect_lines = (linecount(fname) - header_len)
seen_lines = 0
for (_sample_id, dframe) in tabio.seg.parse_seg(fname, *args):
seen_lines += len(dframe)
self.assertEqual(seen_lines, expect_lines)
def test_read_text(self):
fname = 'formats/amplicon.text'
regions = tabio.read(fname, 'text')
self.assertEqual(len(regions), linecount(fname))
self.assertEqual(regions.sample_id, 'amplicon')
def test_read_vcf(self):
fname = 'formats/na12878_na12882_mix.vcf'
v1 = tabio.read(fname, 'vcf')
self.assertLess(len(v1), linecount(fname))
self.assertLess(0, len(v1))
for sid in ('NA12882', 'NA12878'):
v2 = tabio.read(fname, 'vcf', sample_id=sid)
self.assertEqual(v2.sample_id, sid)
self.assertEqual(len(v1), len(v2))
for kwarg in ({'min_depth': 100}, {'skip_somatic': True}, {'skip_reject': True}):
v3 = tabio.read(fname, 'vcf', **kwarg)
self.assertLess(len(v3), len(v1))
self.assertLess(0, len(v3), f'{len(v3)} variants left after filter {list(kwarg)[0]!r}')
v4 = tabio.read('formats/nosample.vcf', 'vcf')
self.assertEqual(len(v4), 0)
self.assertEqual(v4.sample_id, 'nosample')
v5 = tabio.read('formats/blank.vcf', 'vcf', sample_id='Blank')
self.assertEqual(len(v5), 0)
self.assertEqual(v5.sample_id, 'Blank')
v6 = tabio.read('formats/gatk-emptyalt.vcf', 'vcf', sample_id='sample1')
self.assertEqual(len(v6), 0) |
def get_extras():
extras = set()
try:
with open('data/extra-stars.txt', 'r', encoding='utf-8') as fh:
for line in fh:
name = line.partition('#')[0].strip().upper()
if name:
extras.add(name)
except FileNotFoundError:
pass
return extras |
class EvenniaWebTest(BaseEvenniaTest):
account_typeclass = settings.BASE_ACCOUNT_TYPECLASS
object_typeclass = settings.BASE_OBJECT_TYPECLASS
character_typeclass = settings.BASE_CHARACTER_TYPECLASS
exit_typeclass = settings.BASE_EXIT_TYPECLASS
room_typeclass = settings.BASE_ROOM_TYPECLASS
script_typeclass = settings.BASE_SCRIPT_TYPECLASS
channel_typeclass = settings.BASE_CHANNEL_TYPECLASS
url_name = 'index'
unauthenticated_response = 200
authenticated_response = 200
def setUp(self):
super().setUp()
self.account.characters.add(self.char1)
self.account2.characters.add(self.char2)
for account in (self.account, self.account2):
account.permissions.add('Player')
account.permissions.remove('Developer')
for char in account.characters:
char.locks.add(('edit:id(%s) or perm(Admin)' % account.pk))
char.locks.add(('delete:id(%s) or perm(Admin)' % account.pk))
char.locks.add('view:all()')
def test_valid_chars(self):
self.assertTrue((self.char1 in self.account.characters))
self.assertTrue((self.char2 in self.account2.characters))
def get_kwargs(self):
return {}
def test_get(self):
response = self.client.get(reverse(self.url_name, kwargs=self.get_kwargs()))
self.assertEqual(response.status_code, self.unauthenticated_response)
def login(self):
return self.client.login(username='TestAccount', password='testpassword')
def test_get_authenticated(self):
logged_in = self.login()
self.assertTrue(logged_in, 'Account failed to log in!')
response = self.client.get(reverse(self.url_name, kwargs=self.get_kwargs()), follow=True)
self.assertEqual(response.status_code, self.authenticated_response) |
def convert_relative_to_absolute(m3u8_content, base_url):
lines = m3u8_content.split('\n')
absolute_lines = []
for line in lines:
if (line.startswith('#') or (line.strip() == '')):
absolute_lines.append(line)
else:
absolute_url = parse.urljoin(base_url, line)
m3uurl = ('/mp2t?' + parse.urlencode(encode_obj({'url': absolute_url})))
absolute_lines.append(m3uurl)
return '\n'.join(absolute_lines) |
def get_flow_desc_stats(dp, waiters, flow=None, to_user=True):
flow = (flow if flow else {})
table_id = UTIL.ofp_table_from_user(flow.get('table_id', dp.ofproto.OFPTT_ALL))
flags = str_to_int(flow.get('flags', 0))
out_port = UTIL.ofp_port_from_user(flow.get('out_port', dp.ofproto.OFPP_ANY))
out_group = UTIL.ofp_group_from_user(flow.get('out_group', dp.ofproto.OFPG_ANY))
cookie = str_to_int(flow.get('cookie', 0))
cookie_mask = str_to_int(flow.get('cookie_mask', 0))
match = to_match(dp, flow.get('match', {}))
priority = str_to_int(flow.get('priority', (- 1)))
stats = dp.ofproto_parser.OFPFlowDescStatsRequest(dp, flags, table_id, out_port, out_group, cookie, cookie_mask, match)
msgs = []
ofctl_utils.send_stats_request(dp, stats, waiters, msgs, LOG)
flows = []
for msg in msgs:
for stats in msg.body:
if (0 <= priority != stats.priority):
continue
s = stats.to_jsondict()[stats.__class__.__name__]
s['instructions'] = instructions_to_str(stats.instructions)
s['stats'] = stats_to_str(stats.stats)
s['match'] = match_to_str(stats.match)
flows.append(s)
return wrap_dpid_dict(dp, flows, to_user) |
class OptionPlotoptionsDependencywheelSonificationContexttracksMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def top_harness(DIN_N, DOUT_N, f=sys.stdout):
f.write(('\nmodule top(input clk, stb, di, output do);\n localparam integer DIN_N = %d;\n localparam integer DOUT_N = %d;\n\n reg [DIN_N-1:0] din;\n wire [DOUT_N-1:0] dout;\n\n reg [DIN_N-1:0] din_shr;\n reg [DOUT_N-1:0] dout_shr;\n\n always (posedge clk) begin\n din_shr <= {din_shr, di};\n dout_shr <= {dout_shr, din_shr[DIN_N-1]};\n if (stb) begin\n din <= din_shr;\n dout_shr <= dout;\n end\n end\n\n assign do = dout_shr[DOUT_N-1];\n\n roi roi (\n .clk(clk),\n .din(din),\n .dout(dout)\n );\nendmodule\n' % (DIN_N, DOUT_N))) |
class Logger():
def __init__(self):
self.log_path = path_conf.LogPath
def log(self) -> loguru.Logger:
if (not os.path.exists(self.log_path)):
os.mkdir(self.log_path)
log_stdout_file = os.path.join(self.log_path, settings.LOG_STDOUT_FILENAME)
log_stderr_file = os.path.join(self.log_path, settings.LOG_STDERR_FILENAME)
log_config = dict(rotation='10 MB', retention='15 days', compression='tar.gz', enqueue=True)
logger.add(log_stdout_file, level='INFO', filter=(lambda record: ((record['level'].name == 'INFO') or (record['level'].no <= 25))), **log_config, backtrace=False, diagnose=False)
logger.add(log_stderr_file, level='ERROR', filter=(lambda record: ((record['level'].name == 'ERROR') or (record['level'].no >= 30))), **log_config, backtrace=True, diagnose=True)
return logger |
class DataSubject(Base, FidesBase):
__tablename__ = 'ctl_data_subjects'
rights = Column(JSON, nullable=True)
automated_decisions_or_profiling = Column(BOOLEAN, nullable=True)
active = Column(BOOLEAN, default=True, nullable=False)
is_default = Column(BOOLEAN, default=False)
version_added = Column(Text)
version_deprecated = Column(Text)
replaced_by = Column(Text) |
class SetFlagStateTests(DatabaseTestCase):
def setUp(self):
super().setUp()
session = Session()
self.user = models.User(email='', username='user')
user_social_auth = social_models.UserSocialAuth(user_id=self.user.id, user=self.user)
session.add(self.user)
session.add(user_social_auth)
self.admin = models.User(email='', username='admin')
admin_social_auth = social_models.UserSocialAuth(user_id=self.admin.id, user=self.admin)
self.project1 = models.Project(name='test_project', homepage=' backend='PyPI')
self.project2 = models.Project(name='project2', homepage=' backend='PyPI')
self.flag1 = models.ProjectFlag(reason='I wanted to flag it', user='user', project=self.project1)
self.flag2 = models.ProjectFlag(reason='This project is wrong', user='user', project=self.project2)
session.add_all([admin_social_auth, self.admin, self.project1, self.project2, self.flag1, self.flag2])
session.commit()
mock_config = mock.patch.dict(models.anitya_config, {'ANITYA_WEB_ADMINS': [six.text_type(self.admin.id)]})
mock_config.start()
self.addCleanup(mock_config.stop)
self.client = self.flask_app.test_client()
def test_non_admin_post(self):
with login_user(self.flask_app, self.user):
output = self.client.post('/flags/1/set/closed')
self.assertEqual(401, output.status_code)
def test_bad_state(self):
with login_user(self.flask_app, self.admin):
output = self.client.post('/flags/1/set/deferred')
self.assertEqual(422, output.status_code)
def test_missing(self):
with login_user(self.flask_app, self.admin):
output = self.client.post('/flags/42/set/closed')
self.assertEqual(404, output.status_code)
def test_set_flag(self):
with login_user(self.flask_app, self.admin):
output = self.client.get('/flags')
csrf_token = output.data.split(b'name="csrf_token" type="hidden" value="')[1].split(b'">')[0]
with fml_testing.mock_sends(anitya_schema.ProjectFlagSet):
output = self.client.post('/flags/1/set/closed', data={'csrf_token': csrf_token}, follow_redirects=True)
self.assertEqual(200, output.status_code)
self.assertTrue((b'Flag 1 set to closed' in output.data)) |
class JobQueueFS(_utils.FSTree):
def __init__(self, datadir: Union[(str, _utils.FSTree)]):
super().__init__(str(datadir))
self.data = f'{self.root}/queue.json'
self.lock = f'{self.root}/queue.lock'
self.log = f'{self.root}/queue.log'
def __str__(self):
return self.data
def __fspath__(self):
return self.data |
def test_downloader_not_existing_url_triggers_failed(qtbot: QtBot):
downloader = Downloader()
wrong_url = '
with qtbot.wait_signal(downloader.com.on_download_failed) as result:
downloader.get(wrong_url)
assert result.signal_triggered
assert result.args
assert (wrong_url in result.args[0])
assert ('download error' in result.args[0].lower()) |
class Command(BaseCommand):
help = 'Generate CSV files for provided TAS to help track disaster spending'
def add_arguments(self, parser):
default_dir = os.path.dirname(os.path.abspath(__file__))
parser.add_argument('-d', '--destination', default=default_dir, type=str, help='Location of output file')
parser.add_argument('-k', '--keep-files', action='store_true', help="If provided, don't delete the temp files")
parser.add_argument('-p', '--print-zip-path', action='store_true', help='Return the zip path and exit')
def handle(self, *args, **options):
script_start = perf_counter()
self.contract_columns = []
self.assistance_columns = []
self.temporary_dir = os.path.dirname((options['destination'] + '/temp_disaster_tas_csv/'))
self.destination = os.path.dirname((options['destination'] + '/OMB_DHS_disaster_report/'))
self.keep_files = options['keep_files']
self.verbose = (True if (options['verbosity'] > 1) else False)
self.zip_filepath = '{}_{}.zip'.format(self.destination, datetime.utcnow().strftime('%Y%m%d%H%M_utc'))
if options['print_zip_path']:
print(self.zip_filepath)
raise SystemExit
logger.info('Starting Disaster Spending Report script...')
if (not os.path.exists(self.temporary_dir)):
os.makedirs(self.temporary_dir)
if (not os.path.exists(self.destination)):
os.makedirs(self.destination)
tas_dict_list = self.gather_tas_from_file()
self.query_database(tas_dict_list)
self.assemble_csv_files()
self.cleanup_files()
logger.info('Success! New zip file: {}'.format(self.zip_filepath))
logger.info('Script completed in {}s'.format((perf_counter() - script_start)))
def gather_tas_from_file(self):
wb = load_workbook(filename=TAS_XLSX_FILE, read_only=True)
ws = wb['DEFC']
ws.calculate_dimension()
tas_code_header = ws['A8':'G8']
expected_headers = ['ATA', 'AID', 'BPOA', 'EPOA', 'AvailType Code', 'Main', 'Sub']
headers = []
for header in tas_code_header:
for cell in header:
headers.append(cell.value.replace('\n', '').strip())
if (expected_headers != headers):
raise Exception("Headers {} Don't match expected: {}".format(headers, expected_headers))
tas_code_rows = ws['A9':'G100']
tas_dicts = [{key: (cell.value.strip() if cell.value else None) for (key, cell) in itertools.zip_longest(headers, row)} for row in tas_code_rows]
return tas_dicts
def query_database(self, tas_dict_list):
db_start = perf_counter()
for (i, tas) in enumerate(tas_dict_list, 1):
contract_results = self.single_tas_query(tas, 'contract')
filepath = '{}/{}_fpds.csv'.format(self.temporary_dir, generate_tas_rendering_label(tas))
dump_to_csv(filepath, contract_results)
assistance_results = self.single_tas_query(tas, 'assistance')
filepath = '{}/{}_fabs.csv'.format(self.temporary_dir, generate_tas_rendering_label(tas))
dump_to_csv(filepath, assistance_results)
if (self.verbose and (((len(tas_dict_list) - i) % 10) == 0)):
logger.info('{:2>} TAS left'.format((len(tas_dict_list) - i)))
if self.verbose:
logger.info(self.contract_columns)
logger.info(self.assistance_columns)
logger.info('Completed all database queries in {}s'.format((perf_counter() - db_start)))
def single_tas_query(self, tas_dict, transaction_type='contract'):
single_db_query = perf_counter()
if (transaction_type == 'contract'):
sql_string = CONTRACT_SQL
else:
sql_string = ASSISTANCE_SQL
formatted_dict = {k: ("= '{}'".format(v) if v else 'IS NULL') for (k, v) in tas_dict.items()}
sql_string = sql_string.format(**formatted_dict)
results = []
with connection.cursor() as cursor:
cursor.execute(sql_string)
if ((transaction_type == 'contract') and (not self.contract_columns)):
self.contract_columns = [col[0] for col in cursor.description]
elif ((transaction_type == 'assistance') and (not self.assistance_columns)):
self.assistance_columns = [col[0] for col in cursor.description]
results = cursor.fetchall()
if self.verbose:
logger.info(json.dumps(tas_dict))
logger.info('Query for {}s using above TAS took {}s returning {} rows'.format(transaction_type, (perf_counter() - single_db_query), len(results)))
return results
def assemble_csv_files(self):
logger.info('Using pandas to read temporary .csv files')
start_pandas = perf_counter()
files = glob.glob((self.temporary_dir + '/*_fpds.csv'))
df = pd.concat([pd.read_csv(f, dtype=str, header=None, names=self.contract_columns) for f in files])
self.write_pandas(df, 'fpds')
files = glob.glob((self.temporary_dir + '/*_fabs.csv'))
df = pd.concat([pd.read_csv(f, dtype=str, header=None, names=self.assistance_columns) for f in files])
self.write_pandas(df, 'fabs')
logger.info('Assembling data and saving to final .csv files took {}s'.format((perf_counter() - start_pandas)))
def write_pandas(self, df, award_type):
df = df.replace({np.nan: None})
df = df.replace({'NaN': None})
df['submission_period'] = pd.to_datetime(df['submission_period'])
df['_fyq'] = df.apply((lambda x: generate_fiscal_year_and_quarter(x['submission_period'])), axis=1)
if self.verbose:
logger.info('Completed pandas dataframe for all {} records'.format(award_type))
for quarter in pd.unique(df['_fyq']):
filepath = '{}/{}_{}.csv'.format(self.destination, quarter, award_type)
temp_df = df.loc[(df['_fyq'] == quarter)]
del temp_df['_fyq']
temp_df.to_csv(path_or_buf=filepath, index=False)
def cleanup_files(self):
csv_files = glob.glob((self.destination + '/*.csv'))
zipped_csvs = zipfile.ZipFile(self.zip_filepath, 'a', compression=zipfile.ZIP_DEFLATED, allowZip64=True)
for csv_file in csv_files:
zipped_csvs.write(csv_file, os.path.basename(csv_file))
if (not self.keep_files):
logger.info('Removing temporary directories along with temporary files')
shutil.rmtree(self.temporary_dir)
shutil.rmtree(self.destination) |
_os(*metadata.platforms)
def main():
vssadmin = 'C:\\Windows\\System32\\vssadmin.exe'
ren_vssadmin = 'C:\\Users\\Public\\renvssadmin.exe'
common.copy_file(vssadmin, ren_vssadmin)
common.log('Deleting Shadow Copies using a renamed Vssadmin')
common.execute([ren_vssadmin, 'delete', 'shadows', '/For=C:'], timeout=10)
common.remove_file(ren_vssadmin) |
class TestCaseNameDirective(Directive):
def name(cls):
return 'test_name'
def get_full_grammar(cls):
return ((Literal('test name') + ':') + Word((alphanums + '_'))('name'))
def ingest_from_parsed_test_fixture(self, action_case, test_case, parse_results, file_name, line_number):
if ('name' in test_case):
raise FixtureSyntaxError('Duplicate test name directive for test case', file_name, line_number)
path_put(test_case, 'name', parse_results.name)
def assert_test_case_action_results(*args, **kwargs):
pass |
class SumType(Type):
def __init__(self, types):
self.types = frozenset(types)
def create(cls, types):
x = set()
for t in types:
if isinstance(t, SumType):
x |= set(t.types)
else:
x.add(t)
if (len(x) == 1):
return list(x)[0]
return cls(x)
def __repr__(self):
return ('(%s)' % '+'.join(map(repr, self.types)))
def __le__(self, other):
return all(((t <= other) for t in self.types))
def __ge__(self, other):
return any(((other <= t) for t in self.types))
def __eq__(self, other):
if isinstance(other, SumType):
return (self.types == other.types)
return NotImplemented
def __hash__(self):
return hash(frozenset(self.types)) |
class OptionSeriesItemSonificationTracksMapping(Options):
def frequency(self) -> 'OptionSeriesItemSonificationTracksMappingFrequency':
return self._config_sub_data('frequency', OptionSeriesItemSonificationTracksMappingFrequency)
def gapBetweenNotes(self) -> 'OptionSeriesItemSonificationTracksMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionSeriesItemSonificationTracksMappingGapbetweennotes)
def highpass(self) -> 'OptionSeriesItemSonificationTracksMappingHighpass':
return self._config_sub_data('highpass', OptionSeriesItemSonificationTracksMappingHighpass)
def lowpass(self) -> 'OptionSeriesItemSonificationTracksMappingLowpass':
return self._config_sub_data('lowpass', OptionSeriesItemSonificationTracksMappingLowpass)
def noteDuration(self) -> 'OptionSeriesItemSonificationTracksMappingNoteduration':
return self._config_sub_data('noteDuration', OptionSeriesItemSonificationTracksMappingNoteduration)
def pan(self) -> 'OptionSeriesItemSonificationTracksMappingPan':
return self._config_sub_data('pan', OptionSeriesItemSonificationTracksMappingPan)
def pitch(self) -> 'OptionSeriesItemSonificationTracksMappingPitch':
return self._config_sub_data('pitch', OptionSeriesItemSonificationTracksMappingPitch)
def playDelay(self) -> 'OptionSeriesItemSonificationTracksMappingPlaydelay':
return self._config_sub_data('playDelay', OptionSeriesItemSonificationTracksMappingPlaydelay)
def rate(self) -> 'OptionSeriesItemSonificationTracksMappingRate':
return self._config_sub_data('rate', OptionSeriesItemSonificationTracksMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionSeriesItemSonificationTracksMappingTime':
return self._config_sub_data('time', OptionSeriesItemSonificationTracksMappingTime)
def tremolo(self) -> 'OptionSeriesItemSonificationTracksMappingTremolo':
return self._config_sub_data('tremolo', OptionSeriesItemSonificationTracksMappingTremolo)
def volume(self) -> 'OptionSeriesItemSonificationTracksMappingVolume':
return self._config_sub_data('volume', OptionSeriesItemSonificationTracksMappingVolume) |
def all_england_low_priority_savings(entity_type, date):
target_costs = MeasureGlobal.objects.get(month=date, measure_id='lpzomnibus').percentiles[entity_type.lower()]
return MeasureValue.objects.filter_by_org_type(entity_type.lower()).filter(month=date, measure_id='lpzomnibus').calculate_cost_savings(target_costs) |
class OptionSeriesBulletSonificationTracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class WithStringDefault():
def __init__(self, no_default: str, default_str: str='Bond, James Bond', none_str: Optional[str]=None):
self.no_default = no_default
self.default_str = default_str
self.none_str = none_str
def __eq__(self, other):
return (isinstance(other, type(self)) and (self.no_default == other.no_default) and (self.default_str == other.default_str) and (self.none_str == other.none_str)) |
_set_stats_type(ofproto.OFPMP_METER_DESC, OFPMeterDescStats)
_set_msg_type(ofproto.OFPT_MULTIPART_REQUEST)
class OFPMeterDescStatsRequest(OFPMultipartRequest):
def __init__(self, datapath, flags=0, meter_id=ofproto.OFPM_ALL, type_=None):
super(OFPMeterDescStatsRequest, self).__init__(datapath, flags)
self.meter_id = meter_id
def _serialize_stats_body(self):
msg_pack_into(ofproto.OFP_METER_MULTIPART_REQUEST_PACK_STR, self.buf, ofproto.OFP_MULTIPART_REQUEST_SIZE, self.meter_id) |
def gather_details():
try:
data = {'kernel': platform.uname(), 'distribution': platform.linux_distribution(), 'libc': platform.libc_ver(), 'arch': platform.machine(), 'python_version': platform.python_version(), 'os_name': platform.system(), 'static_hostname': platform.node(), 'cpu': platform.processor(), 'fqdn': socket.getfqdn()}
except AttributeError:
return {}
return data |
def clean_filename(name, replace_empty=''):
warnings.warn('clean_filename is deprecated and will eventually be removed', DeprecationWarning, stacklevel=2)
name = unicodedata.normalize('NFKD', name).encode('ascii', 'ignore').decode('ascii')
name = re.sub('[^\\w\\s-]', '', name).strip().lower()
safe_name = re.sub('[-\\s]+', '-', name)
if (safe_name == ''):
return replace_empty
return safe_name |
class MacOSCleanup(CleanupVPNs):
VPN_PROCESS_NAMES = ['openvpn', 'racoon', 'pppd']
VPN_APPLICATIONS = ['/Applications/ExpressVPN.app/Contents/MacOS/ExpressVPN']
UNKILLABLE_APPLICATIONS = []
def __init__(self, device, config):
super().__init__(device, config, MacOSCleanup.VPN_PROCESS_NAMES, MacOSCleanup.VPN_APPLICATIONS, MacOSCleanup.UNKILLABLE_APPLICATIONS) |
class DurationAttrMixinTestCase(unittest.TestCase):
def test_duration_argument_skipped(self):
d = DurationMixin()
self.assertEqual(d.duration, 0)
def test_duration_argument_is_not_an_integer(self):
with self.assertRaises(TypeError) as cm:
DurationMixin(duration='not an integer')
self.assertEqual(cm.exception.message, 'DurationMixin.duration should be an non-negative float, not str')
def test_duration_attribute_is_not_an_integer(self):
d = DurationMixin(duration=10)
with self.assertRaises(TypeError) as cm:
d.duration = 'not an integer'
self.assertEqual(cm.exception.message, 'DurationMixin.duration should be an non-negative float, not str')
def test_duration_argument_is_negative(self):
with self.assertRaises(ValueError) as cm:
DurationMixin(duration=(- 10))
self.assertEqual(cm.exception.message, 'DurationMixin.duration should be an non-negative float')
def test_duration_attribute_is_negative(self):
d = DurationMixin(duration=10)
with self.assertRaises(ValueError) as cm:
d.duration = (- 10)
self.assertEqual(cm.exception.message, 'DurationMixin.duration should be an non-negative float')
def test_duration_argument_is_working_properly(self):
d = DurationMixin(duration=10)
self.assertEqual(10, d.duration)
def test_duration_attribute_is_working_properly(self):
d = DurationMixin(duration=10)
d.duration = 15
self.assertEqual(15, d.duration) |
class AccountingMethod(AbstractAccountingMethod):
def seek_non_exhausted_acquired_lot(self, lot_candidates: AcquiredLotCandidates, taxable_event: Optional[AbstractTransaction], taxable_event_amount: RP2Decimal) -> Optional[AcquiredLotAndAmount]:
selected_acquired_lot_amount: RP2Decimal = ZERO
selected_acquired_lot: Optional[InTransaction] = None
acquired_lot: InTransaction
for acquired_lot in lot_candidates:
acquired_lot_amount: RP2Decimal = ZERO
if (not lot_candidates.has_partial_amount(acquired_lot)):
acquired_lot_amount = acquired_lot.crypto_in
elif (lot_candidates.get_partial_amount(acquired_lot) > ZERO):
acquired_lot_amount = lot_candidates.get_partial_amount(acquired_lot)
else:
continue
if ((selected_acquired_lot is None) or (selected_acquired_lot.spot_price < acquired_lot.spot_price)):
selected_acquired_lot_amount = acquired_lot_amount
selected_acquired_lot = acquired_lot
if ((selected_acquired_lot_amount > ZERO) and selected_acquired_lot):
lot_candidates.clear_partial_amount(selected_acquired_lot)
return AcquiredLotAndAmount(acquired_lot=selected_acquired_lot, amount=selected_acquired_lot_amount)
return None
def lot_candidates_order(self) -> AcquiredLotCandidatesOrder:
return AcquiredLotCandidatesOrder.OLDER_TO_NEWER |
def internal_send(phone, message):
dc1_settings = DefaultDc().settings
if (not dc1_settings.SMS_ENABLED):
logger.warning('SMS module is disabled -> ignoring SMS send request to %s!', phone)
return None
sms_service = get_current_service(settings=dc1_settings)
logger.debug('Using SMS service %s imported from %s', sms_service.PROVIDER_NAME, sms_service.__name__)
if (not phone):
logger.error('Phone number for SMS was not filled in.')
raise InvalidSMSInput('Missing phone number')
if (not message):
logger.error('Message body for SMS was not filled in.')
raise InvalidSMSInput('Missing SMS body')
try:
error = sms_service.sms_send(phone, message, username=dc1_settings.SMS_SERVICE_USERNAME, password=dc1_settings.SMS_SERVICE_PASSWORD, from_=dc1_settings.SMS_FROM_NUMBER, expire_hours=dc1_settings.SMS_EXPIRATION_HOURS)
except Exception as e:
logger.critical('SMS sending to %s failed!', phone)
logger.exception(e)
raise SMSSendFailed(('SMS provider error: %s' % e))
if error:
logger.error('SMS to %s was not sent', phone)
raise SMSSendFailed(('SMS send failed: %s' % error))
logger.info('SMS has been sent to %s', phone)
return None |
class CLICommand(RegionalCommand):
def regional_from_cli(cls, parser, argv, cfg):
parser.add_argument('--summary', '-s', action='store_true', help='display summary by role', default=cfg('summary', type=Bool))
args = parser.parse_args(argv)
return cls(**vars(args))
def __init__(self, regions, summary):
super().__init__(regions)
self.show_summary_only = summary
def regional_execute(self, session, acct, region):
out = io.StringIO()
by_role = defaultdict(list)
aws_lambda = session.client('lambda', region_name=region)
paginator = aws_lambda.get_paginator('list_functions')
for fn_page in paginator.paginate():
for fn in fn_page['Functions']:
if self.show_summary_only:
by_role[fn['Role']].append(fn)
continue
print(f"{acct}/{region}: name={fn['FunctionName']} runtime={fn['Runtime']} role={fn['Role']} public={_is_public(fn)}", file=out)
if self.show_summary_only:
for role in by_role:
total = len(by_role[role])
public = len([fn for fn in by_role[role] if _is_public(fn)])
print(f'{acct}/{region}: role={role} total={total} private={(total - public)} public={public}', file=out)
return out.getvalue() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.