code stringlengths 281 23.7M |
|---|
def link_functions(functions, functions_mro):
functions_by_name = {f.ast_node.name: f for (f, _) in reversed(functions_mro)}
functions_by_id = {f.ast_node.id: f for (f, _) in reversed(functions_mro)}
for (_, blocks) in functions.values():
for block in blocks:
if isinstance(block.transfer, ir.Jump):
assert isinstance(block.transfer.dst, ir.JumpDestination), block.transfer.dst
function_id = block.transfer.dst.function
if isinstance(function_id, int):
block.transfer.dst = functions_by_id.get(function_id, None)
elif isinstance(function_id, str):
block.transfer.dst = functions_by_name.get(function_id, None)
else:
raise NotImplementedError('Unexpected function id type', function_id)
assert (block.transfer.dst is not None)
elif isinstance(block.transfer, ir.Call):
assert isinstance(block.transfer.dst, ir.CallTarget), block.transfer.dst |
def _print_info(portal_info_dict, server_info_dict):
ind = (' ' * 8)
def _prepare_dict(dct):
out = {}
for (key, value) in dct.items():
if isinstance(value, list):
value = '\n{}'.format(ind).join((str(val) for val in value))
out[key] = value
return out
def _strip_empty_lines(string):
return '\n'.join((line for line in string.split('\n') if line.strip()))
(pstr, sstr) = ('', '')
if portal_info_dict:
pdict = _prepare_dict(portal_info_dict)
pstr = _strip_empty_lines(PORTAL_INFO.format_map(pdict))
if server_info_dict:
sdict = _prepare_dict(server_info_dict)
sstr = _strip_empty_lines(SERVER_INFO.format_map(sdict))
info = (pstr + (('\n\n' + sstr) if sstr else ''))
maxwidth = max((len(line) for line in info.split('\n')))
top_border = ((('-' * (maxwidth - 11)) + ' Evennia ') + '---')
border = ('-' * (maxwidth + 1))
print(((((top_border + '\n') + info) + '\n') + border)) |
def lazy_import():
from fastly.model.schemas_waf_firewall_version import SchemasWafFirewallVersion
from fastly.model.schemas_waf_firewall_version_data import SchemasWafFirewallVersionData
from fastly.model.type_waf_rule_revision import TypeWafRuleRevision
from fastly.model.waf_rule_revision import WafRuleRevision
from fastly.model.waf_rule_revision_attributes import WafRuleRevisionAttributes
globals()['SchemasWafFirewallVersion'] = SchemasWafFirewallVersion
globals()['SchemasWafFirewallVersionData'] = SchemasWafFirewallVersionData
globals()['TypeWafRuleRevision'] = TypeWafRuleRevision
globals()['WafRuleRevision'] = WafRuleRevision
globals()['WafRuleRevisionAttributes'] = WafRuleRevisionAttributes |
class SevenSegmentClock(SevenSegmentPattern):
description = 'You need a LED stripe that is wound through all segments in an S pattern and then continuing to the next digit while the stripe being upright on its side. Selecting *debug* gives a better idea how things fit together.\n\nAdding a diffuser on top or at the bottom of the segment holes will probably enhance the visuals. Just using paper may be enough.\n\nThere is currently not a lot of space for electronics and this generator is still untested. Good luck!\n'
ui_group = 'Misc'
ui_group = 'Unstable'
def __init__(self):
Boxes.__init__(self)
self.addSettingsArgs(edges.FingerJointSettings)
self.argparser.add_argument('--height', action='store', type=float, default=100.0, help='height of the front panel (with walls if outside is selected) in mm')
self.argparser.add_argument('--h', action='store', type=float, default=20.0, help='depth (with walls if outside is selected) in mm')
self.buildArgParser(outside=False)
def frontCB(self):
x = self.height
self.hole((1.27 * x), (0.4 * x), (0.05 * x))
self.hole((1.27 * x), (0.6 * x), (0.05 * x))
self.moveTo((0.1 * x), (0.1 * x))
for i in range(2):
for j in range(2):
self.seven_segments((0.8 * x))
self.moveTo((0.6 * x))
self.moveTo((0.1 * x))
def backCB(self):
x = self.height
self.moveTo((0.1 * x), (0.1 * x))
for i in range(2):
for j in range(2):
self.seven_segment_holes((0.8 * x))
self.moveTo((0.6 * x))
self.moveTo((0.1 * x))
def render(self):
(height, h) = (self.height, self.h)
if self.outside:
height = self.height = self.adjustSize(height)
h = self.h = self.adjustSize(h)
t = self.thickness
y = (((((3 * 0.6) + 0.1) + 0.2) * height) + ((0.55 * 0.8) * height))
self.rectangularWall(height, h, 'FFFF', move='right')
self.rectangularWall(y, h, 'FfFf', move='up')
self.rectangularWall(y, h, 'FfFf')
self.rectangularWall(height, h, 'FFFF', move='left up')
with self.saved_context():
self.rectangularWall(y, height, 'ffff', callback=[self.frontCB], move='right')
self.rectangularWall(y, height, 'ffff', callback=[self.backCB], move='right')
self.rectangularWall(y, height, 'ffff', move='up only')
self.seven_segment_separators((0.8 * height), h, 4) |
class TestAsyncExampleWeights():
.parametrize('example_weight_config, example_weight_class', AsyncExampleWeightsTestUtils.EXAMPLE_WEIGHT_TEST_CONFIGS)
def test_string_conversion(self, example_weight_config: AsyncExampleWeightConfig, example_weight_class: ExampleWeight) -> None:
obj = instantiate(example_weight_config)
assertEqual(obj.__class__, example_weight_class)
.parametrize('example_weight_config, example_weight_class', AsyncExampleWeightsTestUtils.EXAMPLE_WEIGHT_TEST_CONFIGS)
.parametrize('avg_num_examples', AsyncExampleWeightsTestUtils.AVG_NUMBER_OF_EXAMPLES)
def test_example_weight_compute(self, example_weight_config: AsyncExampleWeightConfig, example_weight_class: ExampleWeight, avg_num_examples: int) -> None:
max_num_examples = 10000
for _ in range(10):
num_examples = np.random.randint(1, max_num_examples)
example_weight_config.avg_num_examples = avg_num_examples
obj = instantiate(example_weight_config)
assertEqual(obj.weight(num_examples), AsyncExampleWeightsTestUtils.expected_weight(avg_num_examples=avg_num_examples, num_examples=num_examples, example_weight_class=example_weight_class)) |
class SpinnakerDns():
def __init__(self, app=None, env=None, region=None, elb_subnet=None, prop_path=None):
self.log = logging.getLogger(__name__)
self.domain = DOMAIN
self.env = env
self.region = region
self.elb_subnet = elb_subnet
self.app = app
self.generated = get_details(app, env=self.env, region=self.region)
self.app_name = self.generated.app_name()
self.properties = get_properties(properties_file=prop_path, env=self.env, region=self.region)
self.dns_ttl = self.properties['dns']['ttl']
self.header = {'content-type': 'application/json'}
def create_elb_dns(self, regionspecific=False):
if regionspecific:
dns_elb = self.generated.dns()['elb_region']
else:
dns_elb = self.generated.dns()['elb']
dns_elb_aws = find_elb(name=self.app_name, env=self.env, region=self.region)
zone_ids = get_dns_zone_ids(env=self.env, facing=self.elb_subnet)
self.log.info('Updating Application URL: %s', dns_elb)
dns_kwargs = {'dns_name': dns_elb, 'dns_name_aws': dns_elb_aws, 'dns_ttl': self.dns_ttl}
for zone_id in zone_ids:
self.log.debug('zone_id: %s', zone_id)
update_dns_zone_record(self.env, zone_id, **dns_kwargs)
return dns_elb
def create_failover_dns(self, primary_region='us-east-1'):
dns_record = self.generated.dns()['global']
zone_ids = get_dns_zone_ids(env=self.env, facing=self.elb_subnet)
elb_dns_aws = find_elb(name=self.app_name, env=self.env, region=self.region)
elb_dns_zone_id = find_elb_dns_zone_id(name=self.app_name, env=self.env, region=self.region)
if (primary_region in elb_dns_aws):
failover_state = 'PRIMARY'
else:
failover_state = 'SECONDARY'
self.log.info('%s set as %s record', elb_dns_aws, failover_state)
self.log.info('Updating Application Failover URL: %s', dns_record)
dns_kwargs = {'dns_name': dns_record, 'elb_dns_zone_id': elb_dns_zone_id, 'elb_aws_dns': elb_dns_aws, 'dns_ttl': self.dns_ttl, 'failover_state': failover_state}
for zone_id in zone_ids:
self.log.debug('zone_id: %s', zone_id)
update_failover_dns_record(self.env, zone_id, **dns_kwargs)
return dns_record |
def create_file_csv(userCheckIns):
headers = ['Ticket Id', 'Date Time', 'Track Name', 'Session Name', 'Speaker Name', 'Type']
columns = ['ticket_holder_id', 'created_at', 'track_name', 'session_name', 'speaker_name', 'type']
rows = [headers]
for userCheckIn in userCheckIns:
data = []
for column in columns:
if (column == 'type'):
data.append(userCheckIn.station.station_type)
continue
if (column == 'created_at'):
data.append(userCheckIn.created_at.strftime('%Y-%m-%d %H:%M:%S'))
continue
data.append(getattr(userCheckIn, column))
rows.append(data)
return rows |
class Technology(Base):
__tablename__ = 'technology'
technology_id = Column(Integer, primary_key=True)
country_id = Column(ForeignKey(Country.country_id), index=True)
technology_name_id = Column(ForeignKey(SharedDescription.description_id))
is_completed = Column(Boolean, index=True, default=False)
db_description = relationship('SharedDescription')
country = relationship('Country', back_populates='technologies')
def name(self):
return game_info.convert_id_to_name(self.db_description.text) |
def set_my_commands(token, commands, scope=None, language_code=None):
method_url = 'setMyCommands'
payload = {'commands': _convert_list_json_serializable(commands)}
if scope:
payload['scope'] = scope.to_json()
if language_code:
payload['language_code'] = language_code
return _make_request(token, method_url, params=payload, method='post') |
def process_value(setting_info, colors):
header_length = setting_info['rgbgradient_header']['header_length']
led_id_offsets = setting_info['rgbgradient_header']['led_id_offsets']
duration_offset = setting_info['rgbgradient_header']['duration_offset']
duration_length = setting_info['rgbgradient_header']['duration_length']
repeat_offset = setting_info['rgbgradient_header']['repeat_offset']
triggers_offset = setting_info['rgbgradient_header']['triggers_offset']
color_count_offset = setting_info['rgbgradient_header']['color_count_offset']
is_gradient = False
duration = _default_duration
repeat = 0
triggers = 0
led_id = setting_info['led_id']
gradient = []
if isinstance(colors, (tuple, list)):
is_gradient = False
gradient = _handle_color_tuple(colors)
elif (isinstance(colors, str) and is_color(colors)):
is_gradient = False
gradient = _handle_color_string(colors)
elif isinstance(colors, dict):
is_gradient = True
(duration, gradient) = _handle_rgbgradient_dict(colors)
elif is_rgbgradient(colors)[0]:
is_gradient = True
(duration, gradient) = _handle_rgbgradient_string(colors)
else:
raise ValueError(('Not a valid color or rgbgradient %s' % str(colors)))
if ((not is_gradient) or (triggers != 0)):
repeat = 1
if (len(gradient) == 0):
raise ValueError(('no color: %s' % str(colors)))
if (len(gradient) > 14):
raise ValueError('a maximum of 14 color stops are allowed')
header = ([0] * header_length)
header[repeat_offset] = repeat
header[triggers_offset] = triggers
header[color_count_offset] = len(gradient)
for led_id_offset in led_id_offsets:
header[led_id_offset] = led_id
offset = 0
for b in uint_to_little_endian_bytearray(duration, duration_length):
header[(duration_offset + offset)] = b
offset += 1
body = list(gradient[0]['color'])
last_real_pos = 0
for (pos, color) in [(item['pos'], item['color']) for item in gradient]:
real_pos = int(((pos * 255) / 100))
body = merge_bytes(body, color, (real_pos - last_real_pos))
last_real_pos = real_pos
return merge_bytes(header, body) |
def extractPhosphorescesWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('ilybf', 'Ive Liked Your Boyfriend for A Long Time', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
(name='max_abs_diff_analytic')
def fixture_max_abs_diff_analytic(bottom, top, quadratic_params, quadratic_density, straight_line_analytic):
(factor, vertex_radius, _) = quadratic_params
(density_bottom, density_top) = (quadratic_density(bottom), quadratic_density(top))
slope = ((density_top - density_bottom) / (top - bottom))
radius_split = (((0.5 * slope) / factor) + vertex_radius)
max_diff = np.abs((quadratic_density(radius_split) - straight_line_analytic(radius_split)))
return (radius_split, max_diff) |
.parametrize('center, ref_candidates', ((5, (8, 7, 0)), (4, (12, 13, 3)), (0, (6, 1, 5))))
def test_find_candidates(center, ref_candidates, geom):
bond_sets = [set(bond) for bond in find_bonds(geom.atoms, geom.coords3d).tolist()]
candidates = find_candidates(center, bond_sets)
assert (set(candidates) == set(ref_candidates)) |
class Modals():
def __init__(self, ui):
self.page = ui.page
def forms(self, components: html.Html.Html, action: str, method: str, header=None, footer=None, helper: types.HELPER_TYPE=None) -> html.HtmlContainer.Modal:
if (not (type(components) == list)):
components = [components]
form = html.HtmlContainer.Form(self.page, components, helper)
form.submit(method, action)
modal = html.HtmlContainer.Modal(self.page, [], header, footer, False, helper)
modal += form
modal.form = form
html.Html.set_component_skin(modal)
return modal
def disclaimer(self, disc_list, header=None, footer=None, submit: bool=True, validation_text: str='AGREE', action: str=None, add_buttons=None, helper: types.HELPER_TYPE=None) -> html.HtmlContainer.Modal:
for obj in disc_list:
obj.css({'margin': '40px', 'width': 'auto', 'text-align': 'justify'})
modal = html.HtmlContainer.Modal(self.page, disc_list, header, footer, False, helper)
modal.col.css({'width': '450px', 'height': '700px'})
if (add_buttons or submit):
submit_row = (self.page.ui.row([]) if (not add_buttons) else self.page.ui.row(add_buttons))
if submit:
submit_btn = self.page.ui.buttons.important(validation_text)
if action:
submit_btn.click(action)
else:
submit_btn.click(modal.close())
(submit_row + submit_btn)
(modal.col + submit_row)
html.Html.set_component_skin(modal)
return modal
def dialog(self, text, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(20, 'px'), html_code: str=None, helper: types.HELPER_TYPE=None, options: dict=None, profile: types.PROFILE_TYPE=None) -> html.HtmlEvent.Dialog:
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
html_pr = html.HtmlEvent.Dialog(self.page, text, width, height, helper, (options or {}), html_code, profile)
html.Html.set_component_skin(html_pr)
return html_pr
def icon(self, components: List[html.Html.Html]=None, icon: str=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), options: dict=None, profile: types.PROFILE_TYPE=None) -> html.HtmlPopup.Popup:
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
dfl_options = {'margin': 10, 'closure': False, 'top': 100, 'escape': False}
if (options is not None):
dfl_options.update(options)
if (not isinstance(components, list)):
components = [components]
if (icon is not None):
icon_success = self.page.ui.icon(icon)
icon_success.style.css.font_size = 50
icon_success.style.css.margin_bottom = 20
icon_success.style.css.margin_top = 10
success_div = self.page.ui.div(icon_success)
success_div.style.css.text_align = 'center'
components.insert(0, success_div)
acknowledgement = self.page.ui.button('Ok', align='center', options=dfl_options.get('button', {}))
acknowledgement.style.css.margin_top = 10
components.append(acknowledgement)
popup = html.HtmlPopup.Popup(self.page, components, width, height, dfl_options, profile)
popup.acknowledgement = acknowledgement
acknowledgement.click([popup.dom.hide()])
html.Html.set_component_skin(popup)
return popup
def validation(self, components: List[html.Html.Html]=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), options: dict=None, profile: types.PROFILE_TYPE=None) -> html.HtmlPopup.Popup:
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
icon_details = self.page.icons.get('close')
dfl_options = {'margin': 10, 'closure': icon_details['icon'], 'top': 100}
if (options is not None):
dfl_options.update(options)
if (not isinstance(components, list)):
components = [components]
validate = self.page.ui.buttons.validate('Validate')
cancel = self.page.ui.buttons.cancel()
row = self.page.ui.row([validate, cancel], position='top', align='center')
row.options.autoSize = False
components.append(row)
popup = html.HtmlPopup.Popup(self.page, components, width, height, dfl_options, profile)
popup.validate = validate
popup.cancel = cancel
cancel.click([popup.dom.hide()])
html.Html.set_component_skin(popup)
return popup
def acknowledge(self, components: List[html.Html.Html]=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), options: dict=None, profile: types.PROFILE_TYPE=None) -> html.HtmlPopup.Popup:
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
dfl_options = {'margin': 10, 'closure': False, 'top': 100, 'escape': False}
if (options is not None):
dfl_options.update(options)
if (not isinstance(components, list)):
components = [components]
acknowledgement = self.page.ui.button('Ok', align='center')
components.append(acknowledgement)
popup = html.HtmlPopup.Popup(self.page, components, width, height, dfl_options, profile)
popup.acknowledgement = acknowledgement
acknowledgement.click([popup.dom.hide()])
html.Html.set_component_skin(popup)
return popup
def popup(self, components: List[html.Html.Html]=None, title: str=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), options: dict=None, profile: types.PROFILE_TYPE=None) -> html.HtmlPopup.Popup:
width = Arguments.size(width, unit='%')
height = Arguments.size(height, unit='px')
icon_details = self.page.icons.get('close')
dfl_options = {'margin': 10, 'closure': icon_details['icon'], 'top': 100}
if (options is not None):
dfl_options.update(options)
popup = html.HtmlPopup.Popup(self.page, components, width, height, dfl_options, profile)
if (title is not None):
popup.add_title(title)
html.Html.set_component_skin(popup)
return popup
def error(self, components: List[html.Html.Html]=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), options: dict=None, profile: types.PROFILE_TYPE=None) -> html.HtmlPopup.Popup:
dfl_options = {'button': {'category': 'delete'}}
if (options is not None):
dfl_options.update(options)
popup = self.icon(components=components, icon='error', width=width, height=height, options=dfl_options, profile=profile)
popup.window.style.css.border = ('3px solid %s' % self.page.theme.danger.light)
popup.container[0].style.css.color = self.page.theme.danger.base
html.Html.set_component_skin(popup)
return popup
def info(self, components: List[html.Html.Html]=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), options: dict=None, profile: types.PROFILE_TYPE=None) -> html.HtmlPopup.Popup:
popup = self.icon(components=components, icon='question', width=width, height=height, options=options, profile=profile)
html.Html.set_component_skin(popup)
return popup
def success(self, components: List[html.Html.Html]=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), options: dict=None, profile: types.PROFILE_TYPE=None) -> html.HtmlPopup.Popup:
popup = self.icon(components=components, icon='check', width=width, height=height, options=options, profile=profile)
popup.window.style.css.border = ('3px solid %s' % self.page.theme.success.light)
popup.container[0].style.css.color = self.page.theme.success.base
html.Html.set_component_skin(popup)
return popup
def loading(self, text: str='', width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), options: dict=None, profile: types.PROFILE_TYPE=None) -> html.HtmlPopup.Popup:
component = self.page.ui.text(text)
popup = self.icon(components=[component], icon='fas fa-spinner fa-pulse', width=width, height=height, options=options, profile=profile)
popup.window.style.css.border = ('3px solid %s' % self.page.theme.success.light)
popup.container[0].style.css.color = self.page.theme.success.base
popup.text = component
def build_text(data=None, options: dict=None, profile: types.PROFILE_TYPE=False):
return component.build(data, options, profile)
popup.build = build_text
html.Html.set_component_skin(popup)
return popup
def stepper(self, records=None, components: List[html.Html.Html]=None, shape: str='arrow', title: str=None, width: types.SIZE_TYPE=(100, '%'), height: types.SIZE_TYPE=(None, 'px'), options: dict=None, profile: types.PROFILE_TYPE=None) -> html.HtmlPopup.Popup:
if (components is not None):
if (not isinstance(components, list)):
components = [components]
else:
components = []
stepper = getattr(self.page.ui.steppers, shape)(records)
stepper.style.css.inline_block()
stepper.style.css.margin = 'auto'
stepper.style.css.width = 'auto'
stepper.options.line = False
components.insert(0, self.page.ui.div([stepper], align='center'))
if (title is not None):
title = self.page.ui.title(title)
components.insert(0, title)
popup = self.popup(components=components, width=width, height=height, options=options, profile=profile)
popup.title = title
popup.window.style.css.min_width = 'auto'
popup.window.style.css.width = 'auto'
popup.stepper = stepper
html.Html.set_component_skin(popup)
return popup |
class Keyboard(BaseKeyboard):
def __init__(self, disptype=settings.DISPTYPE, **args):
if (disptype == 'pygame'):
from pygaze._keyboard.pygamekeyboard import PyGameKeyboard as Keyboard
elif (disptype == 'psychopy'):
from pygaze._keyboard.psychopykeyboard import PsychoPyKeyboard as Keyboard
elif (disptype == 'opensesame'):
from pygaze._keyboard.oskeyboard import OSKeyboard as Keyboard
else:
raise Exception('Unexpected disptype: %s'.format(disptype))
self.__class__ = Keyboard
self.__class__.__init__(self, **args)
copy_docstr(BaseKeyboard, Keyboard) |
class IsCloseTests(unittest.TestCase):
isclose = staticmethod(isclose)
def assertIsClose(self, a, b, *args, **kwargs):
self.assertTrue(self.isclose(a, b, *args, **kwargs), msg=('%s and %s should be close!' % (a, b)))
def assertIsNotClose(self, a, b, *args, **kwargs):
self.assertFalse(self.isclose(a, b, *args, **kwargs), msg=('%s and %s should not be close!' % (a, b)))
def assertAllClose(self, examples, *args, **kwargs):
for (a, b) in examples:
self.assertIsClose(a, b, *args, **kwargs)
def assertAllNotClose(self, examples, *args, **kwargs):
for (a, b) in examples:
self.assertIsNotClose(a, b, *args, **kwargs)
def test_negative_tolerances(self):
with self.assertRaises(ValueError):
self.assertIsClose(1, 1, rel_tol=(- 1e-100))
with self.assertRaises(ValueError):
self.assertIsClose(1, 1, rel_tol=1e-100, abs_tol=(- .0))
def test_identical(self):
identical_examples = [(2.0, 2.0), (1e+199, 1e+199), (1.123e-300, 1.123e-300), (12345, 12345.0), (0.0, (- 0.0)), (345678, 345678)]
self.assertAllClose(identical_examples, rel_tol=0.0, abs_tol=0.0)
def test_eight_decimal_places(self):
eight_decimal_places_examples = [(.0, (.0 + 1)), ((- 1e-08), (- 1.e-08)), (1., 1.)]
self.assertAllClose(eight_decimal_places_examples, rel_tol=1e-08)
self.assertAllNotClose(eight_decimal_places_examples, rel_tol=1e-09)
def test_near_zero(self):
near_zero_examples = [(1e-09, 0.0), ((- 1e-09), 0.0), ((- 1e-150), 0.0)]
self.assertAllNotClose(near_zero_examples, rel_tol=0.9)
self.assertAllClose(near_zero_examples, abs_tol=1e-08)
def test_identical_infinite(self):
self.assertIsClose(INF, INF)
self.assertIsClose(INF, INF, abs_tol=0.0)
self.assertIsClose(NINF, NINF)
self.assertIsClose(NINF, NINF, abs_tol=0.0)
def test_inf_ninf_nan(self):
not_close_examples = [(NAN, NAN), (NAN, 1e-100), (1e-100, NAN), (INF, NAN), (NAN, INF), (INF, NINF), (INF, 1.0), (1.0, INF), (INF, 1e+308), (1e+308, INF)]
self.assertAllNotClose(not_close_examples, abs_tol=0.)
def test_zero_tolerance(self):
zero_tolerance_close_examples = [(1.0, 1.0), ((- 3.4), (- 3.4)), ((- 1e-300), (- 1e-300))]
self.assertAllClose(zero_tolerance_close_examples, rel_tol=0.0)
zero_tolerance_not_close_examples = [(1.0, 1.), (0., 1.0), (1e+200, 9.e+199)]
self.assertAllNotClose(zero_tolerance_not_close_examples, rel_tol=0.0)
def test_assymetry(self):
self.assertAllClose([(9, 10), (10, 9)], rel_tol=0.1)
def test_integers(self):
integer_examples = [(, ), (, )]
self.assertAllClose(integer_examples, rel_tol=1e-08)
self.assertAllNotClose(integer_examples, rel_tol=1e-09)
def test_decimals(self):
from decimal import Decimal
decimal_examples = [(Decimal('1.'), Decimal('1.0')), (Decimal('1.e-20'), Decimal('1.0e-20')), (Decimal('1.e-100'), Decimal('1.0e-100'))]
self.assertAllClose(decimal_examples, rel_tol=1e-08)
self.assertAllNotClose(decimal_examples, rel_tol=1e-09)
def test_fractions(self):
from fractions import Fraction
fraction_examples = [((Fraction(1, ) + 1), Fraction(1))]
self.assertAllClose(fraction_examples, rel_tol=1e-08)
self.assertAllNotClose(fraction_examples, rel_tol=1e-09) |
class DbtRunResult():
native_run_result: Optional[RunResultsArtifact]
(details='Use native_run_result instead')
def nativeRunResult(self):
return self.native_run_result
def results(self) -> Sequence[RunResultOutput]:
if self.native_run_result:
return self.native_run_result.results
else:
return [] |
def prepare_message(caller: Address, target: Union[(Bytes0, Address)], value: U256, data: Bytes, gas: Uint, env: Environment, code_address: Optional[Address]=None, should_transfer_value: bool=True, is_static: bool=False, preaccessed_addresses: FrozenSet[Address]=frozenset(), preaccessed_storage_keys: FrozenSet[Tuple[(Address, Bytes32)]]=frozenset()) -> Message:
if isinstance(target, Bytes0):
current_target = compute_contract_address(caller, (get_account(env.state, caller).nonce - U256(1)))
msg_data = Bytes(b'')
code = data
elif isinstance(target, Address):
current_target = target
msg_data = data
code = get_account(env.state, target).code
if (code_address is None):
code_address = target
else:
raise AssertionError('Target must be address or empty bytes')
accessed_addresses = set()
accessed_addresses.add(current_target)
accessed_addresses.add(caller)
accessed_addresses.update(PRE_COMPILED_CONTRACTS.keys())
accessed_addresses.update(preaccessed_addresses)
return Message(caller=caller, target=target, gas=gas, value=value, data=msg_data, code=code, depth=Uint(0), current_target=current_target, code_address=code_address, should_transfer_value=should_transfer_value, is_static=is_static, accessed_addresses=accessed_addresses, accessed_storage_keys=set(preaccessed_storage_keys), parent_evm=None) |
.parametrize('number_of_realizations', [100, 200])
def test_that_update_for_a_linear_model_works_with_rowscaling(number_of_realizations):
true_model = LinearModel(a_true, b_true)
ensemble = [LinearModel.random() for _ in range(number_of_realizations)]
A = np.array([[realization.a for realization in ensemble], [realization.b for realization in ensemble]])
mean_prior = np.mean(A, axis=1)
times = np.arange(number_of_observations)
S = np.array([[realization.eval(t) for realization in ensemble] for t in times])
observations = np.array([(true_model.eval(t) + np.random.normal(0.0, 1.0)) for t in times])
mean_observations = np.mean(observations)
times_mean = np.mean(times)
times_square_sum = sum(np.square(times))
a_maximum_likelihood = (sum(((t * (observations[t] - mean_observations)) for t in times)) / (times_square_sum - (times_mean * sum(times))))
b_maximum_likelihood = (mean_observations - (a_maximum_likelihood * times_mean))
maximum_likelihood = np.array([a_maximum_likelihood, b_maximum_likelihood])
previous_mean_posterior = mean_prior
epsilon = 0.01
for error in [10000.0, 100.0, 10.0, 1.0, 0.1]:
A = np.asfortranarray([[realization.a for realization in ensemble], [realization.b for realization in ensemble]])
row_scaling = RowScaling()
row_scaling[0] = 1.0
row_scaling[1] = 0.7
((A_posterior, _),) = ensemble_smoother_update_step_row_scaling(Y=S, X_with_row_scaling=[(A, row_scaling)], covariance=np.full(observations.shape, error), observations=observations, seed=42)
mean_posterior = np.mean(A_posterior, axis=1)
assert np.all(((distance(mean_posterior, maximum_likelihood) - distance(mean_prior, maximum_likelihood)) < epsilon))
assert np.all(((distance(mean_prior, mean_posterior) - distance(mean_prior, maximum_likelihood)) < epsilon))
assert np.all(((distance(mean_posterior, maximum_likelihood) - distance(previous_mean_posterior, maximum_likelihood)) < epsilon))
previous_mean_posterior = mean_posterior |
def topic(request, topic_id):
topic = Topic.objects.get(id=topic_id)
is_owner = False
if (request.user == topic.owner):
is_owner = True
if ((topic.owner != request.user) and (not topic.public)):
raise Http404
entries = topic.entry_set.order_by('-date_added')
context = {'topic': topic, 'entries': entries, 'is_owner': is_owner}
return render(request, 'learning_logs/topic.html', context) |
class TreasureHuntHandler(THBEventHandler):
interested = ['action_before']
execute_before = ['CiguateraHandler']
def handle(self, evt_type, act):
if ((evt_type == 'action_before') and isinstance(act, FatetellStage)):
tgt = act.target
if (not tgt.has_skill(TreasureHunt)):
return act
g = self.game
while True:
if (not g.user_input([tgt], ChooseOptionInputlet(self, (False, True)))):
return act
if (not g.process_action(TreasureHuntAction(tgt, tgt))):
return act
return act |
.parametrize('batch_size, drop_last', [(2, False), (8, False), (2, True), (8, True)])
def test_batch_sampler_array(dataset, batch_size, drop_last):
sampler = LengthBasedBatchSampler(dataset, batch_size, drop_last)
EXPECTED_LENGTH = ((SAMPLES // batch_size) if drop_last else ((SAMPLES // batch_size) + (SAMPLES % batch_size)))
all_ids = [i for b in sampler for i in b]
assert ((len(set(all_ids)) == (EXPECTED_LENGTH * batch_size)) if drop_last else len(dataset))
assert (len(sampler) == EXPECTED_LENGTH)
is_long = [(len(d) >= 10) for d in dataset]
def check_batch(batch):
return (all(batch) or (not any(batch)))
assert all((check_batch((is_long[i] for i in b)) for b in sampler)) |
class ArrayMeta(Meta):
def __getitem__(self, parameters):
if (not isinstance(parameters, tuple)):
parameters = (parameters,)
dtype = None
ndim = None
memview = False
mem_layout = MemLayout.C_or_F
shape = None
positive_indices = False
params_filtered = []
for param in parameters:
if (param is None):
continue
if isinstance(param, (Type, type, np.dtype)):
if (dtype is not None):
raise ValueError('Array should be defined with only one variable defining the types. For more than one type, use for example Type(float, int)')
if isinstance(param, np.dtype):
param = param.type
dtype = param
if isinstance(param, NDim):
if (ndim is not None):
raise ValueError('Array should be defined with only one NDim. For more than one dimension, use for example NDim(2, 3).')
ndim = param
if (isinstance(param, str) and (param[(- 1)] == 'd') and param[:(- 1)].isnumeric()):
try:
tmp = int(param[:(- 1)])
except ValueError:
pass
else:
if (ndim is not None):
raise ValueError('Array should be defined with only one string fixing the number of dimension. Use for example NDim(2, 3).')
param = ndim = NDim(tmp, name_calling_module=get_name_calling_module())
if isinstance(param, str):
param = param.strip()
if (param == 'memview'):
memview = True
continue
if (param == 'positive_indices'):
positive_indices = True
continue
if (param.startswith('[') and param.endswith(']')):
shape = str2shape(param)
continue
try:
mem_layout = MemLayout[param]
continue
except KeyError:
pass
raise ValueError(f'{param} cannot be interpretted...')
params_filtered.append(param)
if (shape is not None):
if (ndim is None):
ndim = NDim(len(shape), name_calling_module=get_name_calling_module())
params_filtered.append(ndim)
elif (ndim != len(shape)):
raise ValueError('ndim != len(shape)')
if (not any(shape)):
shape = None
if (dtype is None):
raise ValueError('No way to determine the dtype of the array')
if (ndim is None):
raise ValueError('No way to determine the ndim of the array')
parameters = {p.__name__: p for p in params_filtered}
assert isinstance(ndim, NDim)
if hasattr(dtype, 'short_repr'):
dtype_name = dtype.short_repr()
else:
dtype_name = dtype.__name__
return type(f'Array_{dtype_name}_{ndim.short_repr()}', (Array,), {'dtype': dtype, 'ndim': ndim, 'parameters': parameters, 'memview': memview, 'mem_layout': mem_layout, 'shape': shape, 'positive_indices': positive_indices})
def get_parameters(self):
return getattr(self, 'parameters', dict())
def get_template_parameters(self):
return tuple((param for param in self.get_parameters().values() if isinstance(param, TemplateVar)))
def __repr__(self):
if (not hasattr(self, 'parameters')):
return super().__repr__()
if (self.shape is not None):
parameters = [param for param in self.parameters.values() if (not isinstance(param, NDim))]
else:
parameters = self.parameters.values()
strings = []
for p in parameters:
if isinstance(p, type):
string = p.__name__
else:
string = repr(p)
strings.append(string)
if (self.shape is not None):
strings.append(shape2str(self.shape))
if self.memview:
strings.append('"memview"')
if (self.mem_layout is not MemLayout.C_or_F):
strings.append(repr(self.mem_layout))
if self.positive_indices:
strings.append('"positive_indices"')
return f"Array[{', '.join(strings)}]"
def format_as_backend_type(self, backend_type_formatter, **kwargs):
dtype = ndim = None
for var in self.parameters.values():
if (isinstance(var, Type) and var.values):
dtype = var.values[0]
elif (isinstance(var, NDim) and var.values):
ndim = var.values[0]
elif isinstance(var, type):
dtype = var
for (key, value) in kwargs.items():
try:
template_var = self.parameters[key]
except KeyError:
continue
if isinstance(template_var, Type):
dtype = value
elif isinstance(template_var, NDim):
ndim = (value + template_var.shift)
else:
raise ValueError
if ((dtype is None) or (ndim is None)):
raise ValueError
memview = kwargs.get('memview', self.memview)
return backend_type_formatter.make_array_code(dtype, ndim, self.shape, memview, self.mem_layout, self.positive_indices) |
class StartCLIParser():
def __init__(self):
super(StartCLIParser, self).__init__()
self.parser = DagdaStartParser(prog='dagda.py start', usage=start_parser_text)
self.parser.add_argument('-d', '--debug', action='store_true')
self.parser.add_argument('-s', '--server_host', type=str)
self.parser.add_argument('-p', '--server_port', type=int)
self.parser.add_argument('-m', '--mongodb_host', type=str)
self.parser.add_argument('-mp', '--mongodb_port', type=int)
self.parser.add_argument('--mongodb_ssl', action='store_true')
self.parser.add_argument('--mongodb_user', type=str)
self.parser.add_argument('--mongodb_pass', type=str)
self.parser.add_argument('--falco_rules_file', type=argparse.FileType('r'))
self.parser.add_argument('-ef', '--external_falco', type=argparse.FileType('r'))
(self.args, self.unknown) = self.parser.parse_known_args(sys.argv[2:])
status = self.verify_args(self.args)
if (status != 0):
exit(status)
def is_debug_logging_required(self):
return self.args.debug
def get_server_host(self):
return (self.args.server_host if (self.args.server_host is not None) else '127.0.0.1')
def get_server_port(self):
return (self.args.server_port if (self.args.server_port is not None) else 5000)
def get_mongodb_host(self):
return (self.args.mongodb_host if (self.args.mongodb_host is not None) else '127.0.0.1')
def get_mongodb_port(self):
return (self.args.mongodb_port if (self.args.mongodb_port is not None) else 27017)
def is_mongodb_ssl_enabled(self):
return self.args.mongodb_ssl
def get_mongodb_user(self):
return self.args.mongodb_user
def get_mongodb_pass(self):
return self.args.mongodb_pass
def get_falco_rules_filename(self):
if (self.args.falco_rules_file is None):
return None
else:
return self.args.falco_rules_file.name
def get_external_falco_output_filename(self):
if (self.args.external_falco is None):
return None
else:
return self.args.external_falco.name
def verify_args(args):
if (args.server_port and (args.server_port not in range(1, 65536))):
DagdaLogger.get_logger().error('Argument -p/--server_port: The port must be between 1 and 65535.')
return 1
elif (args.mongodb_port and (args.mongodb_port not in range(1, 65536))):
DagdaLogger.get_logger().error('Argument -mp/--mongodb_port: The port must be between 1 and 65535.')
return 2
elif (args.mongodb_user and (not args.mongodb_pass)):
DagdaLogger.get_logger().error('Argument --mongodb_pass: this argument should not be empty if you set "--mongodb_user".')
return 3
elif (args.mongodb_pass and (not args.mongodb_user)):
DagdaLogger.get_logger().error('Argument --mongodb_user: this argument should not be empty if you set "--mongodb_pass".')
return 4
elif (args.falco_rules_file and (not args.external_falco)):
with args.falco_rules_file as content_file:
try:
yaml.safe_load(content_file.read())
except:
DagdaLogger.get_logger().error('Argument --falco_rules_file: Malformed yaml file.')
return 5
elif (args.falco_rules_file and args.external_falco):
DagdaLogger.get_logger().error(('Argument --external_falco: this argument is not compatible with ' + '--falco_rules_file.'))
return 6
return 0 |
class OptionSeriesBarSonificationDefaultinstrumentoptionsMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class ReceiveView(QWidget):
_qr_window: Optional[QR_Window] = None
def __init__(self, main_window: 'ElectrumWindow', account_id: int) -> None:
super().__init__(main_window)
self._main_window = weakref.proxy(main_window)
self._account_id = account_id
self._account = main_window._wallet.get_account(account_id)
self._logger = logs.get_logger(f'receive-view[{self._account_id}]')
self._receive_key_id: Optional[int] = None
self._request_list_toolbar_layout = TableTopButtonLayout()
self._request_list_toolbar_layout.refresh_signal.connect(self._main_window.refresh_wallet_display)
self._request_list_toolbar_layout.filter_signal.connect(self._filter_request_list)
form_layout = self.create_form_layout()
self._request_list = RequestList(self, main_window)
request_container = self.create_request_list_container()
vbox = QVBoxLayout(self)
vbox.addLayout(form_layout)
vbox.addSpacing(20)
vbox.addWidget(request_container, 1)
self.setLayout(vbox)
def clean_up(self) -> None:
if (self._receive_qr is not None):
self._receive_qr.clean_up()
if (self._qr_window is not None):
self._qr_window.close()
def create_form_layout(self) -> QHBoxLayout:
grid = QGridLayout()
grid.setSpacing(8)
grid.setColumnStretch(3, 1)
self._receive_destination_e = ButtonsLineEdit()
self._receive_destination_e.addCopyButton(app_state.app)
self._receive_destination_e.setReadOnly(True)
msg = _('Bitcoin SV payment destination where the payment should be received. Note that each payment request uses a different Bitcoin SV payment destination.')
receive_address_label = HelpLabel(_('Receiving destination'), msg)
self._receive_destination_e.textChanged.connect(self._update_receive_qr)
self._receive_destination_e.setFocusPolicy(Qt.NoFocus)
grid.addWidget(receive_address_label, 0, 0)
grid.addWidget(self._receive_destination_e, 0, 1, 1, (- 1))
self._receive_message_e = QLineEdit()
grid.addWidget(QLabel(_('Description')), 1, 0)
grid.addWidget(self._receive_message_e, 1, 1, 1, (- 1))
self._receive_message_e.textChanged.connect(self._update_receive_qr)
self._receive_amount_e = BTCAmountEdit()
grid.addWidget(QLabel(_('Requested amount')), 2, 0)
grid.addWidget(self._receive_amount_e, 2, 1)
self._receive_amount_e.textChanged.connect(self._update_receive_qr)
self._fiat_receive_e = AmountEdit((app_state.fx.get_currency if app_state.fx else ''))
if ((not app_state.fx) or (not app_state.fx.is_enabled())):
self._fiat_receive_e.setVisible(False)
grid.addWidget(self._fiat_receive_e, 2, 2, Qt.AlignLeft)
self._main_window.connect_fields(self._receive_amount_e, self._fiat_receive_e)
self._expires_combo = QComboBox()
self._expires_combo.addItems([i[0] for i in expiration_values])
self._expires_combo.setCurrentIndex(3)
self._expires_combo.setFixedWidth(self._receive_amount_e.width())
msg = ' '.join([_('Expiration date of your request.'), _('This information is seen by the recipient if you send them a signed payment request.'), _('Expired requests have to be deleted manually from your list, in order to free the corresponding Bitcoin SV addresses.'), _('The Bitcoin SV address never expires and will always be part of this ElectrumSV wallet.')])
grid.addWidget(HelpLabel(_('Request expires'), msg), 3, 0)
grid.addWidget(self._expires_combo, 3, 1)
self._expires_label = QLineEdit('')
self._expires_label.setReadOnly(1)
self._expires_label.setFocusPolicy(Qt.NoFocus)
self._expires_label.hide()
grid.addWidget(self._expires_label, 3, 1)
self._save_request_button = EnterButton(_('Save request'), self._save_form_as_request)
self._new_request_button = EnterButton(_('New'), self._new_payment_request)
self._receive_qr = QRCodeWidget(fixedSize=200)
self._receive_qr.link_to_window(self._toggle_qr_window)
buttons = QHBoxLayout()
buttons.addStretch(1)
buttons.addWidget(self._save_request_button)
buttons.addWidget(self._new_request_button)
grid.addLayout(buttons, 4, 1, 1, 2)
vbox_g = QVBoxLayout()
vbox_g.addLayout(grid)
vbox_g.addStretch()
hbox = QHBoxLayout()
hbox.addLayout(vbox_g)
hbox.addWidget(self._receive_qr)
return hbox
def create_request_list_container(self) -> QGroupBox:
layout = QVBoxLayout()
layout.setSpacing(0)
layout.setContentsMargins(6, 0, 6, 6)
layout.addLayout(self._request_list_toolbar_layout)
layout.addWidget(self._request_list)
request_box = QGroupBox()
request_box.setTitle(_('Requests'))
request_box.setAlignment(Qt.AlignCenter)
request_box.setContentsMargins(0, 0, 0, 0)
request_box.setLayout(layout)
return request_box
def update_widgets(self) -> None:
self._request_list.update()
def update_destination(self) -> None:
text = ''
if (self._receive_key_id is not None):
script_template = self._account.get_script_template_for_id(self._receive_key_id)
if (script_template is not None):
text = script_template_to_string(script_template)
self._receive_destination_e.setText(text)
def update_contents(self) -> None:
self._expires_label.hide()
self._expires_combo.show()
if self._account.is_deterministic():
fresh_key = self._account.get_fresh_keys(RECEIVING_SUBPATH, 1)[0]
self.set_receive_key(fresh_key)
def update_for_fx_quotes(self) -> None:
if (self._account_id is not None):
edit = (self._fiat_receive_e if self._fiat_receive_e.is_last_edited else self._receive_amount_e)
edit.textEdited.emit(edit.text())
def _update_receive_qr(self) -> None:
if (self._receive_key_id is None):
return
amount = self._receive_amount_e.get_amount()
message = self._receive_message_e.text()
self._save_request_button.setEnabled(((amount is not None) or (message != '')))
script_template = self._account.get_script_template_for_id(self._receive_key_id)
address_text = script_template_to_string(script_template)
uri = web.create_URI(address_text, amount, message)
self._receive_qr.setData(uri)
if (self._qr_window and self._qr_window.isVisible()):
self._qr_window.set_content(self._receive_destination_e.text(), amount, message, uri)
def _toggle_qr_window(self, event: QEvent) -> None:
if (self._receive_key_id is None):
self._main_window.show_message(_('No available receiving destination.'))
return
if (not self._qr_window):
self._qr_window = QR_Window(self)
self._qr_window.setVisible(True)
self._qr_window_geometry = self._qr_window.geometry()
elif (not self._qr_window.isVisible()):
self._qr_window.setVisible(True)
self._qr_window.setGeometry(self._qr_window_geometry)
else:
self._qr_window_geometry = self._qr_window.geometry()
self._qr_window.setVisible(False)
self._update_receive_qr()
def set_fiat_ccy_enabled(self, flag: bool) -> None:
self._fiat_receive_e.setVisible(flag)
def get_bsv_edits(self) -> List[BTCAmountEdit]:
return [self._receive_amount_e]
def _save_form_as_request(self) -> None:
if (not self._receive_key_id):
self._main_window.show_error(_('No receiving payment destination'))
return
amount = self._receive_amount_e.get_amount()
message = self._receive_message_e.text()
if ((not message) and (not amount)):
self._main_window.show_error(_('No message or amount'))
return
def callback(exc_value: Optional[Exception]=None) -> None:
if (exc_value is not None):
raise exc_value
self._request_list.update_signal.emit()
i = self._expires_combo.currentIndex()
expiration = [x[1] for x in expiration_values][i]
row = self._account.requests.get_request_for_key_id(self._receive_key_id)
if (row is None):
row = self._account.requests.create_request(self._receive_key_id, PaymentFlag.UNPAID, amount, expiration, message, callback)
else:
self._account.requests.update_request(row.paymentrequest_id, row.state, amount, row.expiration, message, callback)
self._save_request_button.setEnabled(False)
def _new_payment_request(self) -> None:
keyinstances: List[KeyInstanceRow] = []
if self._account.is_deterministic():
keyinstances = self._account.get_fresh_keys(RECEIVING_SUBPATH, 1)
if (not len(keyinstances)):
if (not self._account.is_deterministic()):
msg = [_('No more payment destinations in your wallet.'), _('You are using a non-deterministic account, which cannot create new payment destinations.'), _('If you want to create new payment destinations, use a deterministic account instead.')]
self._main_window.show_message(' '.join(msg))
return
self._main_window.show_message(_('Your wallet is broken and could not allocate a new payment destination.'))
self.update_contents()
self._new_request_button.setEnabled(False)
self._receive_message_e.setFocus(1)
def get_receive_key_id(self) -> Optional[int]:
return self._receive_key_id
def receive_at_id(self, key_id: int) -> None:
self._receive_key_id = key_id
self._new_request_button.setEnabled(True)
self.update_destination()
self._main_window.show_receive_tab()
def set_receive_key_id(self, key_id: int) -> None:
self._receive_key_id = key_id
def set_receive_key(self, keyinstance: KeyInstanceRow) -> None:
self._receive_key_id = keyinstance.keyinstance_id
self._receive_message_e.setText('')
self._receive_amount_e.setAmount(None)
self.update_destination()
def set_form_contents(self, address_text: str, value: int, description: Optional[str]=None, expires_description: str='') -> None:
self._receive_destination_e.setText(address_text)
self._receive_message_e.setText((description or ''))
self._receive_amount_e.setAmount(value)
self._expires_combo.hide()
self._expires_label.show()
self._expires_label.setText(expires_description)
self._new_request_button.setEnabled(True)
def set_new_button_enabled(self, flag: bool) -> None:
self._new_request_button.setEnabled(flag)
def _filter_request_list(self, text: str) -> None:
self._request_list.filter(text) |
def malformed_replyto_error(status):
if request_wants_json():
return jsonerror(500, {'error': '_replyto or email field has not been sent correctly'})
return (render_template('error.html', title='Invalid email address', text=u'You entered <span class="code">{address}</span>. That is an invalid email address. Please correct the form and try to submit again <a href="{back}">here</a>.<p style="font-size: small">This could also be a problem with the form. For example, there could be two fields with <span class="code">_replyto</span> or <span class="code">email</span> name attribute. If you suspect the form is broken, please contact the form owner and ask them to investigate</p>'.format(address=status['address'], back=status['referrer'])), 400) |
def create_plot():
numpoints = 100
low = (- 5)
high = 15.0
x = linspace(low, high, numpoints)
pd = ArrayPlotData(index=x)
p = Plot(pd, bgcolor='oldlace', padding=50, border_visible=True)
for i in range(10):
pd.set_data(('y' + str(i)), jn(i, x))
p.plot(('index', ('y' + str(i))), color=tuple(COLOR_PALETTE[i]), width=(2.0 * dpi_scale))
p.x_grid.visible = True
p.x_grid.line_width *= dpi_scale
p.y_grid.visible = True
p.y_grid.line_width *= dpi_scale
p.legend.visible = True
return p |
def read_beats_schema(version: str=None):
if (version and (version.lower() == 'main')):
return json.loads(read_gzip(get_etc_path('beats_schemas', 'main.json.gz')))
version = (Version.parse(version) if version else None)
beats_schemas = get_versions()
if (version and (version not in beats_schemas)):
raise ValueError(f'Unknown beats schema: {version}')
version = (version or get_max_version())
return json.loads(read_gzip(get_etc_path('beats_schemas', f'v{version}.json.gz'))) |
class ValveRestBcastTestCase(ValveTestBases.ValveTestNetwork):
CONFIG = ('\ndps:\n s1:\n%s\n interfaces:\n p1:\n number: 1\n native_vlan: 0x100\n restricted_bcast_arpnd: true\n p2:\n number: 2\n native_vlan: 0x100\n p3:\n number: 3\n native_vlan: 0x100\n restricted_bcast_arpnd: true\n' % DP1_CONFIG)
def setUp(self):
self.setup_valves(self.CONFIG)
def test_rest_bcast(self):
match = {'in_port': 1, 'vlan_vid': 0, 'eth_type': ether.ETH_TYPE_IP, 'eth_src': self.P1_V100_MAC, 'eth_dst': mac.BROADCAST_STR}
table = self.network.tables[self.DP_ID]
self.assertTrue(table.is_output(match, port=2))
self.assertFalse(table.is_output(match, port=3))
match = {'in_port': 2, 'vlan_vid': 0, 'eth_type': ether.ETH_TYPE_IP, 'eth_src': self.P1_V100_MAC, 'eth_dst': mac.BROADCAST_STR}
self.assertTrue(table.is_output(match, port=1))
self.assertTrue(table.is_output(match, port=3)) |
class FilterGreater(BasePyMongoFilter):
def apply(self, query, value):
try:
value = float(value)
except ValueError:
value = 0
query.append({self.column: {'$gt': value}})
return query
def operation(self):
return lazy_gettext('greater than') |
class GroupList(ResourceList):
def query(self, view_kwargs):
query_ = self.session.query(Group)
if (view_kwargs.get('user_id') and ('GET' in request.method)):
query_ = query_.filter_by(user_id=view_kwargs['user_id'])
return query_
view_kwargs = True
decorators = (api.has_permission('is_user_itself', methods='PATCH,DELETE', fetch='user_id', model=Group),)
schema = GroupSchema
data_layer = {'session': db.session, 'model': Group, 'methods': {'query': query}} |
def book() -> Struct:
return Struct(name='Book', members={0: ComplexTypeMember(size=32, name='title', offset=0, type=Pointer(Integer.char())), 4: ComplexTypeMember(size=32, name='num_pages', offset=4, type=Integer.int32_t()), 8: ComplexTypeMember(size=32, name='author', offset=8, type=Pointer(Integer.char()))}, size=96) |
class Bits(object):
MAX_BITS = 32
def _build_seen_list(self, num):
seen = []
looking_for = 0
count = 0
for _ in range(self.MAX_BITS):
if ((num & 1) != looking_for):
seen.append(count)
looking_for = (not looking_for)
count = 0
count += 1
num >>= 1
seen.append(count)
return seen
def flip_bit(self, num):
if (num is None):
raise TypeError('num cannot be None')
if (num == (- 1)):
return self.MAX_BITS
if (num == 0):
return 1
seen = self._build_seen_list(num)
max_result = 0
looking_for = 0
for (index, count) in enumerate(seen):
result = 0
if (looking_for == 1):
looking_for = (not looking_for)
continue
if (index == 0):
if (count != 0):
try:
result = (seen[(index + 1)] + 1)
except IndexError:
result = 1
elif (index == (len(seen) - 1)):
result = (1 + seen[(index - 1)])
elif (count == 1):
result = ((seen[(index + 1)] + seen[(index - 1)]) + 1)
else:
result = (max(seen[(index + 1)], seen[(index - 1)]) + 1)
if (result > max_result):
max_result = result
looking_for = (not looking_for)
return max_result |
(base=RequestContextTask, name='sponsor.logo.urls', bind=True)
def sponsor_logos_url_task(self, event_id):
sponsors = Sponsor.query.filter_by(event_id=event_id, deleted_at=None).all()
for sponsor in sponsors:
try:
logging.info(f'Sponsor logo url generation task started {sponsor.logo_url}')
new_logo_url = create_save_resized_image(image_file=sponsor.logo_url, resize=False)
sponsor.logo_url = new_logo_url
save_to_db(sponsor)
logging.info('Sponsor logo url successfully generated')
except (requests.exceptions.HTTPError, requests.exceptions.InvalidURL):
logging.exception('Error encountered while logo generation') |
class Integrations():
def get_integration(config: Config, tracking: Optional[Tracking]=None, override_config_defaults: bool=False) -> BaseIntegration:
if config.has_slack:
return SlackIntegration(config=config, tracking=tracking, override_config_defaults=override_config_defaults)
else:
raise UnsupportedAlertIntegrationError |
def downgrade():
op.drop_constraint('email_role_event_uc', 'role_invites', type_='unique')
op.create_unique_constraint('email_role_event_uc', 'role_invites', ['email', 'role_id', 'event_id'])
op.drop_constraint('uq_event_discount_code', 'discount_codes', type_='unique')
op.create_unique_constraint('uq_event_discount_code', 'discount_codes', ['event_id', 'code']) |
('/calls', methods=['GET'])
def calls():
number = request.args.get('number')
search_text = request.args.get('search')
search_type = request.args.get('submit')
search_criteria = ''
if search_text:
if (search_type == 'phone'):
number = transform_number(search_text)
search_criteria = "WHERE Number='{}'".format(number)
else:
search_criteria = "WHERE Caller LIKE '%{}%'".format(search_text)
sql = 'SELECT COUNT(*), Number, Name Caller FROM CallLog {}'.format(search_criteria)
g.cur.execute(sql)
total = g.cur.fetchone()[0]
(page, per_page, offset) = get_page_args(page_parameter='page', per_page_parameter='per_page')
sql = "SELECT\n a.CallLogID,\n CASE\n WHEN b.PhoneNo is not null then b.Name\n WHEN c.PhoneNo is not null then c.Name\n ELSE a.Name\n END Caller,\n a.Number Number,\n a.Date,\n a.Time,\n a.Action,\n a.Reason,\n CASE WHEN b.PhoneNo is null THEN 'N' ELSE 'Y' END Whitelisted,\n CASE WHEN c.PhoneNo is null THEN 'N' ELSE 'Y' end Blacklisted,\n d.MessageID,\n d.Played,\n d.Filename,\n a.SystemDateTime\n FROM CallLog as a\n LEFT JOIN Whitelist AS b ON a.Number = b.PhoneNo\n LEFT JOIN Blacklist AS c ON a.Number = c.PhoneNo\n LEFT JOIN Message AS d ON a.CallLogID = d.CallLogID\n {}\n ORDER BY a.SystemDateTime DESC\n LIMIT {}, {}".format(search_criteria, offset, per_page)
g.cur.execute(sql)
result_set = g.cur.fetchall()
calls = []
for row in result_set:
number = row[2]
phone_no = format_phone_no(number)
filepath = row[11]
if (filepath is not None):
basename = os.path.basename(filepath)
filepath = os.path.join('../static/messages', basename)
date_time = datetime.strptime(row[12][:19], '%Y-%m-%d %H:%M:%S')
calls.append(dict(call_no=row[0], phone_no=phone_no, name=row[1], date=date_time.strftime('%d-%b-%y'), time=date_time.strftime('%I:%M %p'), action=row[5], reason=row[6], whitelisted=row[7], blacklisted=row[8], msg_no=row[9], msg_played=row[10], wav_file=filepath))
pagination = get_pagination(page=page, per_page=per_page, total=total, record_name='calls', format_total=True, format_number=True)
return render_template('calls.html', active_nav_item='calls', calls=calls, search_criteria=search_criteria, page=page, per_page=per_page, pagination=pagination) |
def _parse_worker_params(model_name: str=None, model_path: str=None, **kwargs) -> ModelWorkerParameters:
worker_args = EnvArgumentParser()
env_prefix = None
if model_name:
env_prefix = EnvArgumentParser.get_env_prefix(model_name)
worker_params: ModelWorkerParameters = worker_args.parse_args_into_dataclass(ModelWorkerParameters, env_prefixes=[env_prefix], model_name=model_name, model_path=model_path, **kwargs)
env_prefix = EnvArgumentParser.get_env_prefix(worker_params.model_name)
new_worker_params = worker_args.parse_args_into_dataclass(ModelWorkerParameters, env_prefixes=[env_prefix], model_name=worker_params.model_name, model_path=worker_params.model_path, **kwargs)
worker_params.update_from(new_worker_params)
return worker_params |
.parametrize(('test_input', 'expected_output'), [([], []), ([(1, 2), (2, 3), (1, 3)], [[1, 2, 3]]), ([(1, 2), (2, 3), (1, 3), (1, 4), (2, 4), (3, 4), (1, 5), (2, 5), (3, 5), (4, 5)], [[1, 2, 3, 4, 5]]), ([(1, 2), (2, 3), (1, 3), (1, 4)], [[1, 2, 3], [1, 4]]), ([(1, 2), (2, 3), (1, 3), (1, 4), (3, 4)], [[1, 2, 3], [1, 3, 4]]), ([(1, 4), (4, 5)], [[1, 4], [4, 5]])])
def test_generate_similarity_sets(test_input, expected_output):
assert (generate_similarity_sets(test_input) == expected_output) |
def test_oidcclient_login_retry(mocker, client):
oauth2client = mocker.Mock()
client.client = oauth2client
client._tokens = None
client.storage.load.side_effect = (lambda k, d: d)
prompt = mocker.patch('bodhi.client.oidcclient.click.prompt')
prompt.return_value = 'result-code'
secho = mocker.patch('bodhi.client.oidcclient.click.secho')
client.metadata = {'token_endpoint': ' 'authorization_endpoint': ' 'response_modes_supported': ['oob']}
oauth2client.create_authorization_url.return_value = ('auth-url', 'state')
oauth2client.fetch_token.side_effect = [OAuthError('nope', 'wrong'), OAuthError('nope', 'wrong again'), 'result-token']
client.login()
assert (prompt.call_count == 3)
assert (oauth2client.fetch_token.call_count == 3)
assert (client.tokens == 'result-token')
assert (secho.call_args_list == [mocker.call('Authenticating... Please open your browser to:', fg='yellow'), mocker.call('Login failed!: nope: wrong. Please try again.', fg='red'), mocker.call('Login failed!: nope: wrong again. Please try again.', fg='red'), mocker.call('Login successful!', fg='green')]) |
def main():
segmk = Segmaker('design.bits')
print('Loading tags')
with open('params.json') as f:
params = json.load(f)
for row in params:
base_name = 'BUFHCE_X{}Y{}'.format(row['x'], row['y'])
segmk.add_site_tag(row['site'], '{}.IN_USE'.format(base_name), row['IN_USE'])
if (not row['IN_USE']):
continue
segmk.add_site_tag(row['site'], '{}.INIT_OUT'.format(base_name), row['INIT_OUT'])
segmk.add_site_tag(row['site'], '{}.ZINV_CE'.format(base_name), (1 ^ row['IS_CE_INVERTED']))
for opt in ['ASYNC']:
segmk.add_site_tag(row['site'], ('{}.CE_TYPE.'.format(base_name) + opt), (verilog.unquote(row['CE_TYPE']) == opt))
segmk.compile()
segmk.write() |
class IObserver(abc.ABC):
def __hash__(self):
raise NotImplementedError('__hash__ must be implemented.')
def __eq__(self, other):
raise NotImplementedError('__eq__ must be implemented.')
def notify(self):
raise NotImplementedError('notify property must be implemented.')
def iter_observables(self, object):
raise NotImplementedError('iter_observables must be implemented.')
def iter_objects(self, object):
raise NotImplementedError('iter_objects must be implemented.')
def get_notifier(self, handler, target, dispatcher):
raise NotImplementedError('get_notifier must be implemented.')
def get_maintainer(self, graph, handler, target, dispatcher):
raise NotImplementedError('get_maintainer must be implemented.')
def iter_extra_graphs(self, graph):
raise NotImplementedError('iter_extra_graphs must be implemented.') |
.object(source_manager.SourceManager, 'get_source_tree')
def test_find_all_images_from_specified_tags_fail(mock_source_tree):
job_control = proto_control.JobControl(remote=False, scavenging_benchmark=True)
mock_source_tree.return_value = None
manager = source_manager.SourceManager(job_control)
hashes = []
with pytest.raises(source_manager.SourceManagerError) as source_error:
hashes = manager.find_all_images_from_specified_tags()
assert (not hashes)
assert (str(source_error.value) == 'No images are specified or able to be built from the control document') |
def test_zeroed_in_1st_lr():
with pytest.raises(RuntimeError) as excinfo:
_ = dlis.load('data/chap2/zeroed-in-1st-lr.dlis')
assert ('Too short logical record' in str(excinfo.value))
dbg = 'Physical tell: 188 (dec), Logical Record tell: 100 (dec), Logical Record Segment tell: 100 (dec)'
assert (dbg in str(excinfo.value)) |
('pandas.DataFrame.to_parquet')
('pandas.read_parquet')
('flytekit.types.structured.basic_dfs.get_fsspec_storage_options')
def test_pandas_to_parquet_azure_storage_options(mock_get_fsspec_storage_options, mock_read_parquet, mock_to_parquet):
df = pd.DataFrame({'Name': ['Tom', 'Joseph'], 'Age': [20, 22]})
encoder = basic_dfs.PandasToParquetEncodingHandler()
decoder = basic_dfs.ParquetToPandasDecodingHandler()
mock_get_fsspec_storage_options.return_value = {'account_name': 'accountname_from_storage_options'}
ctx = context_manager.FlyteContextManager.current_context()
sd = StructuredDataset(dataframe=df, uri='abfs://container/parquet_df')
sd_type = StructuredDatasetType(format='parquet')
sd_lit = encoder.encode(ctx, sd, sd_type)
mock_to_parquet.assert_called_once()
write_storage_options = mock_to_parquet.call_args.kwargs['storage_options']
assert (write_storage_options == {'account_name': 'accountname_from_storage_options'})
decoder.decode(ctx, sd_lit, StructuredDatasetMetadata(sd_type))
mock_read_parquet.assert_called_once()
read_storage_options = mock_read_parquet.call_args.kwargs['storage_options']
(read_storage_options == {'account_name': 'accountname_from_storage_options'}) |
class OptionSeriesWordcloudSonificationDefaultinstrumentoptionsMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def extractAm23MtranslationBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def compile(args):
project_path = (args.path or os.getcwd())
sys.path.append(project_path)
reports_path = utils.get_report_path(project_path)
with open(os.path.join(reports_path, ('%s.md' % args.name)), 'w') as md:
md_dta = md.read()
page = Report()
page.py.markdown.resolve(md_dta)
try:
settings = __import__('ui_settings', fromlist=['object'])
if (not os.path.exists(settings.VIEWS_FOLDER)):
settings.VIEWS_FOLDER = os.path.join(reports_path, '..', '..', settings.VIEWS_FOLDER)
output = page.outs.html_file(path=settings.VIEWS_FOLDER, name=args.name, options={'split': split_files, 'css_route': '/css', 'js_route': '/js'})
except:
output = page.outs.html_file(name=args.name, options={'split': False})
print(output) |
class requires(Decorator):
def __init__(self, condition=None, otherwise=None):
if ((condition is None) or (otherwise is None)):
raise SyntaxError('requires usage: (condition, otherwise)')
if ((not callable(otherwise)) and (not isinstance(otherwise, str))):
raise SyntaxError("requires 'otherwise' param must be string or callable")
self.condition = condition
self.otherwise = otherwise
def build_pipe(self):
from ..pipeline import RequirePipe
return RequirePipe(self.condition, self.otherwise) |
def get_path(path):
if (type(path) is not str):
if inspect.isclass(path):
klass = path
else:
klass = path.__class__
module_name = klass.__module__
module = sys.modules[module_name]
if (module_name == '__main__'):
dirs = [os.path.dirname(sys.argv[0]), os.getcwd()]
for d in dirs:
if os.path.exists(d):
path = d
break
else:
path = os.path.dirname(module.__file__)
return path |
class LinearHeatPerturbation(HeatPerturbation):
temperature_ref: pd.NonNegativeFloat = pd.Field(..., title='Reference temperature', description='Temperature at which perturbation is zero.', units=KELVIN)
coeff: Union[(float, Complex)] = pd.Field(..., title='Thermo-optic Coefficient', description='Sensitivity (derivative) of perturbation with respect to temperature.', units=f'1/{KELVIN}')
_property
def perturbation_range(self) -> Union[(Tuple[(float, float)], Tuple[(Complex, Complex)])]:
return self._linear_range(self.temperature_range, self.temperature_ref, self.coeff)
_temp_in_range
def sample(self, temperature: Union[(ArrayLike[float], SpatialDataArray)]) -> Union[(ArrayLike[float], ArrayLike[Complex], SpatialDataArray)]:
t_vals = (np.array(temperature) if (self._array_type(temperature) == 'array') else temperature)
return (self.coeff * (t_vals - self.temperature_ref))
_property
def is_complex(self) -> bool:
return np.iscomplex(self.coeff) |
class OptionPlotoptionsBubbleSonificationContexttracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def mocked_dynamodb_scan():
all_contacts = [{'ListId': '1-simplified', 'SubscriberEmail': '', 'CharacterSet': 'simplified', 'DateSubscribed': '3/18/20', 'Status': 'subscribed'}, {'ListId': '3-traditional', 'SubscriberEmail': '', 'CharacterSet': 'traditional', 'DateSubscribed': '3/18/20', 'Status': 'unsubscribed'}]
return all_contacts |
class HistogramStatsRequestBadTLVs(base_tests.SimpleProtocol):
def runTest(self):
request = ofp.message.bsn_generic_stats_request(name='histogram', tlvs=[])
(reply, _) = self.controller.transact(request)
self.assertBsnError(reply, 'Expected name TLV, found empty list')
request = ofp.message.bsn_generic_stats_request(name='histogram', tlvs=[ofp.bsn_tlv.ipv4_src(), ofp.bsn_tlv.name('test')])
(reply, _) = self.controller.transact(request)
self.assertBsnError(reply, 'Expected name TLV, found of_bsn_tlv_ipv4_src')
request = ofp.message.bsn_generic_stats_request(name='histogram', tlvs=[ofp.bsn_tlv.name('test'), ofp.bsn_tlv.ipv4_src()])
(reply, _) = self.controller.transact(request)
self.assertBsnError(reply, 'Expected end of TLV list, found of_bsn_tlv_ipv4_src')
def assertBsnError(self, msg, text):
self.assertIsInstance(msg, ofp.message.bsn_error)
self.assertEqual(text, msg.err_msg) |
def call(evm: Evm) -> None:
gas = Uint(pop(evm.stack))
to = to_address(pop(evm.stack))
value = pop(evm.stack)
memory_input_start_position = pop(evm.stack)
memory_input_size = pop(evm.stack)
memory_output_start_position = pop(evm.stack)
memory_output_size = pop(evm.stack)
extend_memory = calculate_gas_extend_memory(evm.memory, [(memory_input_start_position, memory_input_size), (memory_output_start_position, memory_output_size)])
create_gas_cost = (Uint(0) if ((value == 0) or is_account_alive(evm.env.state, to)) else GAS_NEW_ACCOUNT)
transfer_gas_cost = (Uint(0) if (value == 0) else GAS_CALL_VALUE)
message_call_gas = calculate_message_call_gas(value, gas, Uint(evm.gas_left), extend_memory.cost, ((GAS_CALL + create_gas_cost) + transfer_gas_cost))
charge_gas(evm, (message_call_gas.cost + extend_memory.cost))
evm.memory += (b'\x00' * extend_memory.expand_by)
sender_balance = get_account(evm.env.state, evm.message.current_target).balance
if (sender_balance < value):
push(evm.stack, U256(0))
evm.gas_left += message_call_gas.stipend
else:
generic_call(evm, message_call_gas.stipend, value, evm.message.current_target, to, to, True, memory_input_start_position, memory_input_size, memory_output_start_position, memory_output_size)
evm.pc += 1 |
def flatten_tree(tree, leaf='argmax'):
flat = []
assert (tree.node_count == len(tree.value))
assert (tree.value.shape[1] == 1)
for (left, right, feature, th, value) in zip(tree.children_left, tree.children_right, tree.feature, tree.threshold, tree.value):
if ((left == (- 1)) and (right == (- 1))):
if (leaf == 'argmax'):
val = numpy.argmax(value[0])
elif (leaf == 'value'):
val = value[0][0]
n = [(- 1), val, (- 1), (- 1)]
else:
n = [feature, th, left, right]
flat.append(n)
return flat |
class PrivateComputationMRPIDOnlyTestStageFlow(PrivateComputationBaseStageFlow):
_order_ = 'CREATED PC_PRE_VALIDATION UNION_PID_MR_MULTIKEY'
CREATED = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.CREATION_INITIALIZED, started_status=PrivateComputationInstanceStatus.CREATION_STARTED, completed_status=PrivateComputationInstanceStatus.CREATED, failed_status=PrivateComputationInstanceStatus.CREATION_FAILED, is_joint_stage=False)
PC_PRE_VALIDATION = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.PC_PRE_VALIDATION_INITIALIZED, started_status=PrivateComputationInstanceStatus.PC_PRE_VALIDATION_STARTED, completed_status=PrivateComputationInstanceStatus.PC_PRE_VALIDATION_COMPLETED, failed_status=PrivateComputationInstanceStatus.PC_PRE_VALIDATION_FAILED, is_joint_stage=False)
UNION_PID_MR_MULTIKEY = PrivateComputationStageFlowData(initialized_status=PrivateComputationInstanceStatus.PID_MR_INITIALIZED, started_status=PrivateComputationInstanceStatus.PID_MR_STARTED, completed_status=PrivateComputationInstanceStatus.PID_MR_COMPLETED, failed_status=PrivateComputationInstanceStatus.PID_MR_FAILED, is_joint_stage=False)
def get_stage_service(self, args: PrivateComputationStageServiceArgs) -> PrivateComputationStageService:
logging.info('Start MR-PID only test stage flow')
if (self is self.UNION_PID_MR_MULTIKEY):
if (args.workflow_svc is None):
raise NotImplementedError('workflow_svc is None')
return PIDMRStageService(args.workflow_svc)
else:
return self.get_default_stage_service(args) |
('/job-execution')
def job_execution_metadata():
workflow_execution_id = request.args.get('workflow_execution_id')
job_execution_list = (scheduler.list_job_executions(workflow_execution_id) if workflow_execution_id else None)
page_job_executions = (Paginator(job_execution_list, int(request.args.get('pageSize'))).get_page(int(request.args.get('pageNo'))).object_list if job_execution_list else None)
job_execution_objects = []
if page_job_executions:
workflow_info = page_job_executions[0].workflow_execution.workflow_info
workflow_graph = extract_graph(store.get_workflow_by_name(workflow_info.namespace, workflow_info.workflow_name))
job_types = {}
for node in workflow_graph.nodes.values():
job_types.update({node.config.job_name: node.config.job_type})
for page_job_execution in page_job_executions:
job_execution_object = page_job_execution.__dict__
job_execution_object.update({'_job_type': job_types.get(page_job_execution.job_name)})
job_execution_objects.append(job_execution_object)
return pagination_response(page_no=int(request.args.get('pageNo')), total_count=(len(job_execution_list) if job_execution_list else 0), data=job_execution_objects) |
class OptionPlotoptionsAreasplineSonificationDefaultinstrumentoptionsMappingHighpass(Options):
def frequency(self) -> 'OptionPlotoptionsAreasplineSonificationDefaultinstrumentoptionsMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsAreasplineSonificationDefaultinstrumentoptionsMappingHighpassFrequency)
def resonance(self) -> 'OptionPlotoptionsAreasplineSonificationDefaultinstrumentoptionsMappingHighpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsAreasplineSonificationDefaultinstrumentoptionsMappingHighpassResonance) |
def test_pubsub_topic():
pubsub = Pubsub(EventService())
subscriber = DummySubscriber()
publisher = pubsub.create_event_topic(DummyInterface)
pubsub.register_subscriber(subscriber)
publisher.event1()
publisher.event2(param1=1, param2=2)
assert (len(subscriber.events) == 2)
record = subscriber.events[0]
assert (record.interface_method == DummyInterface.event1)
assert (record.attributes == dict())
record = subscriber.events[1]
assert (record.interface_method == DummyInterface.event2)
assert (record.attributes == dict(param1=1, param2=2)) |
def _create_plot_component():
numpts = 500
x1 = random(numpts)
y1 = random(numpts)
x2 = (x1 + (standard_normal(numpts) * 0.05))
y2 = (y1 + (standard_normal(numpts) * 0.05))
pd = ArrayPlotData()
pd.set_data('index', column_stack([x1, x2]).reshape((- 1)))
pd.set_data('value', column_stack([y1, y2]).reshape((- 1)))
plot = Plot(pd)
plot.plot(('index', 'value'), type='segment', color='forestgreen', line_width=2, line_style='dash', alpha=0.7, bgcolor='white')
plot.title = 'Segment Plot'
plot.line_width = 0.5
plot.padding = 50
plot.tools.append(PanTool(plot, constrain_key='shift'))
zoom = ZoomTool(component=plot, tool_mode='box', always_on=False)
plot.overlays.append(zoom)
return plot |
def main():
parser = argparse.ArgumentParser(description='Create timing worksheet for 7-series timing analysis.')
util.db_root_arg(parser)
util.part_arg(parser)
parser.add_argument('--timing_json', required=True)
parser.add_argument('--output_xlsx', required=True)
parser.add_argument('--wire_filter', help='List of wires that must be present in a net to be output')
args = parser.parse_args()
with OpenSafeFile(args.timing_json) as f:
timing = json.load(f)
db = Database(args.db_root, args.part)
nodes = {}
for net in timing:
for node in net['nodes']:
nodes[node['name']] = node
timing_lookup = TimingLookup(db, nodes)
wb = Workbook()
summary_ws = wb[wb.sheetnames[0]]
summary_ws.title = 'Summary'
summary_ws['A1'] = 'Name'
cols = ['FAST_MAX', 'FAST_MIN', 'SLOW_MAX', 'SLOW_MIN']
cur_col = 'B'
for col in cols:
summary_ws['{}1'.format(cur_col)] = col
cur_col = chr((ord(cur_col) + 1))
summary_ws['{}1'.format(cur_col)] = ('Computed ' + col)
cur_col = chr((ord(cur_col) + 3))
if args.wire_filter:
wire_filter = build_wire_filter(args.wire_filter)
else:
wire_filter = (lambda x: True)
summary_row = 2
timing = [net for net in timing if wire_filter(net)]
for (idx, net) in enumerate(timing):
if ('<' in net['route']):
print('WARNING: Skipping net {} because it has complicated route description.'.format(net['net']))
continue
print('Process net {} ({} / {})'.format(net['net'], idx, len(timing)))
for summary_cells in add_net(wb, net, timing_lookup):
summary_ws['A{}'.format(summary_row)] = summary_cells['Name']
cur_col = 'B'
for col in cols:
truth_col = chr((ord(cur_col) + 0))
computed_col = chr((ord(cur_col) + 1))
error_col = chr((ord(cur_col) + 2))
error_per_col = chr((ord(cur_col) + 3))
summary_ws['{}{}'.format(truth_col, summary_row)] = ('=' + summary_cells['truth'][col])
summary_ws['{}{}'.format(computed_col, summary_row)] = ('=' + summary_cells['computed'][col])
summary_ws['{}{}'.format(error_col, summary_row)] = '={truth}{row}-{comp}{row}'.format(truth=truth_col, comp=computed_col, row=summary_row)
summary_ws['{}{}'.format(error_per_col, summary_row)] = '={error}{row}/{truth}{row}'.format(error=error_col, truth=truth_col, row=summary_row)
cur_col = chr((ord(cur_col) + 4))
summary_row += 1
wb.save(filename=args.output_xlsx) |
def test_grid_analytic_refinement():
max_dl_list = np.array([0.5, 0.5, 0.4, 0.1, 0.4])
len_interval_list = np.array([2.0, 0.5, 0.2, 0.1, 0.3])
max_scale = 1.5
periodic = True
(left_dl, right_dl) = MESHER.grid_multiple_interval_analy_refinement(max_dl_list, len_interval_list, max_scale, periodic)
assert np.all(np.isclose(left_dl[1:], right_dl[:(- 1)])) |
class bsn_vport_l2gre(bsn_vport):
type = 1
def __init__(self, flags=None, port_no=None, loopback_port_no=None, local_mac=None, nh_mac=None, src_ip=None, dst_ip=None, dscp=None, ttl=None, vpn=None, rate_limit=None, if_name=None):
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (port_no != None):
self.port_no = port_no
else:
self.port_no = 0
if (loopback_port_no != None):
self.loopback_port_no = loopback_port_no
else:
self.loopback_port_no = 0
if (local_mac != None):
self.local_mac = local_mac
else:
self.local_mac = [0, 0, 0, 0, 0, 0]
if (nh_mac != None):
self.nh_mac = nh_mac
else:
self.nh_mac = [0, 0, 0, 0, 0, 0]
if (src_ip != None):
self.src_ip = src_ip
else:
self.src_ip = 0
if (dst_ip != None):
self.dst_ip = dst_ip
else:
self.dst_ip = 0
if (dscp != None):
self.dscp = dscp
else:
self.dscp = 0
if (ttl != None):
self.ttl = ttl
else:
self.ttl = 0
if (vpn != None):
self.vpn = vpn
else:
self.vpn = 0
if (rate_limit != None):
self.rate_limit = rate_limit
else:
self.rate_limit = 0
if (if_name != None):
self.if_name = if_name
else:
self.if_name = ''
return
def pack(self):
packed = []
packed.append(struct.pack('!H', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.flags))
packed.append(util.pack_port_no(self.port_no))
packed.append(util.pack_port_no(self.loopback_port_no))
packed.append(struct.pack('!6B', *self.local_mac))
packed.append(struct.pack('!6B', *self.nh_mac))
packed.append(struct.pack('!L', self.src_ip))
packed.append(struct.pack('!L', self.dst_ip))
packed.append(struct.pack('!B', self.dscp))
packed.append(struct.pack('!B', self.ttl))
packed.append(('\x00' * 2))
packed.append(struct.pack('!L', self.vpn))
packed.append(struct.pack('!L', self.rate_limit))
packed.append(struct.pack('!16s', self.if_name))
length = sum([len(x) for x in packed])
packed[1] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bsn_vport_l2gre()
_type = reader.read('!H')[0]
assert (_type == 1)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.flags = reader.read('!L')[0]
obj.port_no = util.unpack_port_no(reader)
obj.loopback_port_no = util.unpack_port_no(reader)
obj.local_mac = list(reader.read('!6B'))
obj.nh_mac = list(reader.read('!6B'))
obj.src_ip = reader.read('!L')[0]
obj.dst_ip = reader.read('!L')[0]
obj.dscp = reader.read('!B')[0]
obj.ttl = reader.read('!B')[0]
reader.skip(2)
obj.vpn = reader.read('!L')[0]
obj.rate_limit = reader.read('!L')[0]
obj.if_name = reader.read('!16s')[0].rstrip('\x00')
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.flags != other.flags):
return False
if (self.port_no != other.port_no):
return False
if (self.loopback_port_no != other.loopback_port_no):
return False
if (self.local_mac != other.local_mac):
return False
if (self.nh_mac != other.nh_mac):
return False
if (self.src_ip != other.src_ip):
return False
if (self.dst_ip != other.dst_ip):
return False
if (self.dscp != other.dscp):
return False
if (self.ttl != other.ttl):
return False
if (self.vpn != other.vpn):
return False
if (self.rate_limit != other.rate_limit):
return False
if (self.if_name != other.if_name):
return False
return True
def pretty_print(self, q):
q.text('bsn_vport_l2gre {')
with q.group():
with q.indent(2):
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OF_BSN_VPORT_L2GRE_LOCAL_MAC_IS_VALID', 2: 'OF_BSN_VPORT_L2GRE_DSCP_ASSIGN', 4: 'OF_BSN_VPORT_L2GRE_DSCP_COPY', 8: 'OF_BSN_VPORT_L2GRE_LOOPBACK_IS_VALID', 16: 'OF_BSN_VPORT_L2GRE_RATE_LIMIT_IS_VALID'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('port_no = ')
q.text(util.pretty_port(self.port_no))
q.text(',')
q.breakable()
q.text('loopback_port_no = ')
q.text(util.pretty_port(self.loopback_port_no))
q.text(',')
q.breakable()
q.text('local_mac = ')
q.text(util.pretty_mac(self.local_mac))
q.text(',')
q.breakable()
q.text('nh_mac = ')
q.text(util.pretty_mac(self.nh_mac))
q.text(',')
q.breakable()
q.text('src_ip = ')
q.text(util.pretty_ipv4(self.src_ip))
q.text(',')
q.breakable()
q.text('dst_ip = ')
q.text(util.pretty_ipv4(self.dst_ip))
q.text(',')
q.breakable()
q.text('dscp = ')
q.text(('%#x' % self.dscp))
q.text(',')
q.breakable()
q.text('ttl = ')
q.text(('%#x' % self.ttl))
q.text(',')
q.breakable()
q.text('vpn = ')
q.text(('%#x' % self.vpn))
q.text(',')
q.breakable()
q.text('rate_limit = ')
q.text(('%#x' % self.rate_limit))
q.text(',')
q.breakable()
q.text('if_name = ')
q.pp(self.if_name)
q.breakable()
q.text('}') |
class TestELFHash(unittest.TestCase):
def test_elf_hash(self):
self.assertEqual(ELFHashTable.elf_hash(''), 0)
self.assertEqual(ELFHashTable.elf_hash('main'), 473086)
self.assertEqual(ELFHashTable.elf_hash('printf'), )
self.assertEqual(ELFHashTable.elf_hash('exit'), 446212)
self.assertEqual(ELFHashTable.elf_hash(u'io123'), )
self.assertEqual(ELFHashTable.elf_hash(b'\xe4\xbd\xa0\xe5\xa5\xbd'), )
def test_get_number_of_syms(self):
with open(os.path.join('test', 'testfiles_for_unittests', 'aarch64_super_stripped.elf'), 'rb') as f:
elf = ELFFile(f)
dynamic_segment = None
for segment in elf.iter_segments():
if (segment.header.p_type == 'PT_DYNAMIC'):
dynamic_segment = segment
break
(_, hash_offset) = dynamic_segment.get_table_offset('DT_HASH')
hash_section = ELFHashTable(elf, hash_offset, dynamic_segment)
self.assertIsNotNone(hash_section)
self.assertEqual(hash_section.get_number_of_symbols(), 4)
def test_get_symbol(self):
path = os.path.join('test', 'testfiles_for_unittests', 'simple_mipsel.elf')
with open(path, 'rb') as f:
elf = ELFFile(f)
hash_section = elf.get_section_by_name('.hash')
self.assertIsNotNone(hash_section)
symbol_main = hash_section.get_symbol('main')
self.assertIsNotNone(symbol_main)
self.assertEqual(symbol_main['st_value'], int(4196240)) |
def add_transaction_to_group(group: Dict[(str, Any)], transaction: TransactionDict) -> Tuple[(Dict[(str, Any)], Dict[(str, int)])]:
for key in ['gasPrice', 'nonce', 'secretKey', 'to']:
if ((key in transaction) and (transaction[key] != group[key])):
raise ValueError(f"Can't add transaction as it differs in {key}")
new_group = copy.deepcopy(group)
indexes = {}
for (key, index_key) in [('data', 'data'), ('gasLimit', 'gas'), ('value', 'value')]:
if (key in group):
if (key not in transaction):
if (len(new_group[key]) != 1):
raise ValueError(f"Can't add transaction as {key} is ambiguous")
index = 0
else:
if (transaction[key] not in new_group[key]):
new_group[key].append(transaction[key])
index = new_group[key].index(transaction[key])
indexes[index_key] = index
else:
assert (key not in transaction)
return (new_group, indexes) |
class TestLocalBlobManager(unittest.TestCase):
def setUpClass(cls) -> None:
os.mkdir(_TMP_FOLDER)
os.mkdir(_UPLOAD_FOLDER)
os.mkdir(_DOWNLOAD_FOLDER)
def tearDownClass(cls) -> None:
if os.path.exists(_TMP_FOLDER):
shutil.rmtree(_TMP_FOLDER)
def setUp(self) -> None:
file = open(_TMP_FILE, 'w')
file.close()
def tearDown(self) -> None:
if os.path.exists(_TMP_FILE):
os.remove(_TMP_FILE)
def test_without_root_directory_set(self):
conf = BlobManagerConfig({'blob_manager_class': 'ai_flow.blob_manager.impl.local_blob_manager.LocalBlobManager', 'blob_manager_config': {}})
with self.assertRaisesRegex(Exception, '`root_directory` option of blob manager config is not configured'):
BlobManagerFactory.create_blob_manager(conf)
def test_project_upload_download_local(self):
blob_manager = BlobManagerFactory.create_blob_manager(CONFIG)
uploaded_path = blob_manager.upload(_TMP_FILE)
self.assertEqual(os.path.join(_UPLOAD_FOLDER, os.path.basename(_TMP_FILE), uploaded_path), uploaded_path)
downloaded_path = blob_manager.download(uploaded_path, _DOWNLOAD_FOLDER)
self.assertEqual(os.path.join(_DOWNLOAD_FOLDER, os.path.basename(_TMP_FILE)), downloaded_path)
def test_project_upload_download_local_2(self):
blob_manager = BlobManagerFactory.create_blob_manager(CONFIG)
uploaded_path = blob_manager.upload(_TMP_FILE)
downloaded_path = blob_manager.download(uploaded_path)
self.assertEqual(uploaded_path, downloaded_path)
def test_project_download_local_same_time(self):
blob_manager = BlobManagerFactory.create_blob_manager(CONFIG)
uploaded_path = blob_manager.upload(_TMP_FILE)
def download_project():
blob_manager.download(uploaded_path)
t1 = threading.Thread(target=download_project, args=())
t1.setDaemon(True)
t1.start()
t2 = threading.Thread(target=download_project, args=())
t2.setDaemon(True)
t2.start()
t1.join()
t2.join()
downloaded_path = blob_manager.download(uploaded_path, _DOWNLOAD_FOLDER)
self.assertEqual(os.path.join(_DOWNLOAD_FOLDER, os.path.basename(_TMP_FILE)), downloaded_path) |
class LedgerApiDialogues(Model, BaseLedgerApiDialogues):
def __init__(self, **kwargs: Any) -> None:
Model.__init__(self, **kwargs)
def role_from_first_message(message: Message, receiver_address: Address) -> Dialogue.Role:
return BaseLedgerApiDialogue.Role.AGENT
BaseLedgerApiDialogues.__init__(self, self_address=str(self.skill_id), role_from_first_message=role_from_first_message, dialogue_class=LedgerApiDialogue) |
class OptionPlotoptionsSunburstSonification(Options):
def contextTracks(self) -> 'OptionPlotoptionsSunburstSonificationContexttracks':
return self._config_sub_data('contextTracks', OptionPlotoptionsSunburstSonificationContexttracks)
def defaultInstrumentOptions(self) -> 'OptionPlotoptionsSunburstSonificationDefaultinstrumentoptions':
return self._config_sub_data('defaultInstrumentOptions', OptionPlotoptionsSunburstSonificationDefaultinstrumentoptions)
def defaultSpeechOptions(self) -> 'OptionPlotoptionsSunburstSonificationDefaultspeechoptions':
return self._config_sub_data('defaultSpeechOptions', OptionPlotoptionsSunburstSonificationDefaultspeechoptions)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def pointGrouping(self) -> 'OptionPlotoptionsSunburstSonificationPointgrouping':
return self._config_sub_data('pointGrouping', OptionPlotoptionsSunburstSonificationPointgrouping)
def tracks(self) -> 'OptionPlotoptionsSunburstSonificationTracks':
return self._config_sub_data('tracks', OptionPlotoptionsSunburstSonificationTracks) |
def test_get_included_sum(stats_update_db, backend_db):
(fw, parent_fo, child_fo) = create_fw_with_parent_and_child()
(fw.size, parent_fo.size, child_fo.size) = (1337, 25, 175)
backend_db.add_object(fw)
backend_db.add_object(parent_fo)
backend_db.add_object(child_fo)
result = stats_update_db.get_sum(FileObjectEntry.size, firmware=False)
assert (result == 200) |
def extractGalaxytranslations97Com(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class Recyclebin(BaseAligo):
def _core_move_file_to_trash(self, body: MoveFileToTrashRequest) -> MoveFileToTrashResponse:
response = self.post(V2_RECYCLEBIN_TRASH, body=body)
return self._result(response, MoveFileToTrashResponse, [202, 204])
def _core_batch_move_to_trash(self, body: BatchMoveToTrashRequest) -> Iterator[BatchSubResponse]:
if (body.drive_id is None):
body.drive_id = self.default_drive_id
(yield from self.batch_request(BatchRequest(requests=[BatchSubRequest(id=file_id, url='/recyclebin/trash', body=MoveFileToTrashRequest(drive_id=body.drive_id, file_id=file_id)) for file_id in body.file_id_list]), MoveFileToTrashResponse))
def _core_restore_file(self, body: RestoreFileRequest) -> RestoreFileResponse:
response = self.post(V2_RECYCLEBIN_RESTORE, body=body)
return self._result(response, RestoreFileResponse, 204)
def _core_batch_restore_files(self, body: BatchRestoreRequest) -> Iterator[BatchSubResponse]:
if (body.drive_id is None):
body.drive_id = self.default_drive_id
(yield from self.batch_request(BatchRequest(requests=[BatchSubRequest(id=file_id, url='/recyclebin/restore', body=RestoreFileRequest(drive_id=body.drive_id, file_id=file_id)) for file_id in body.file_id_list]), RestoreFileResponse))
def _core_get_recyclebin_list(self, body: GetRecycleBinListRequest) -> Iterator[BaseFile]:
(yield from self._list_file(V2_RECYCLEBIN_LIST, body, GetRecycleBinListResponse)) |
class OptionPlotoptionsSplineDragdrop(Options):
def draggableX(self):
return self._config_get(None)
def draggableX(self, flag: bool):
self._config(flag, js_type=False)
def draggableY(self):
return self._config_get(None)
def draggableY(self, flag: bool):
self._config(flag, js_type=False)
def dragHandle(self) -> 'OptionPlotoptionsSplineDragdropDraghandle':
return self._config_sub_data('dragHandle', OptionPlotoptionsSplineDragdropDraghandle)
def dragMaxX(self):
return self._config_get(None)
def dragMaxX(self, num: float):
self._config(num, js_type=False)
def dragMaxY(self):
return self._config_get(None)
def dragMaxY(self, num: float):
self._config(num, js_type=False)
def dragMinX(self):
return self._config_get(None)
def dragMinX(self, num: float):
self._config(num, js_type=False)
def dragMinY(self):
return self._config_get(None)
def dragMinY(self, num: float):
self._config(num, js_type=False)
def dragPrecisionX(self):
return self._config_get(0)
def dragPrecisionX(self, num: float):
self._config(num, js_type=False)
def dragPrecisionY(self):
return self._config_get(0)
def dragPrecisionY(self, num: float):
self._config(num, js_type=False)
def dragSensitivity(self):
return self._config_get(2)
def dragSensitivity(self, num: float):
self._config(num, js_type=False)
def groupBy(self):
return self._config_get(None)
def groupBy(self, text: str):
self._config(text, js_type=False)
def guideBox(self) -> 'OptionPlotoptionsSplineDragdropGuidebox':
return self._config_sub_data('guideBox', OptionPlotoptionsSplineDragdropGuidebox)
def liveRedraw(self):
return self._config_get(True)
def liveRedraw(self, flag: bool):
self._config(flag, js_type=False) |
def format_suffix_patterns(urlpatterns, suffix_required=False, allowed=None):
suffix_kwarg = api_settings.FORMAT_SUFFIX_KWARG
if allowed:
if (len(allowed) == 1):
allowed_pattern = allowed[0]
else:
allowed_pattern = ('(%s)' % '|'.join(allowed))
suffix_pattern = ('\\.(?P<%s>%s)/?$' % (suffix_kwarg, allowed_pattern))
else:
suffix_pattern = ('\\.(?P<%s>[a-z0-9]+)/?$' % suffix_kwarg)
(converter_name, suffix_converter) = _get_format_path_converter(suffix_kwarg, allowed)
register_converter(suffix_converter, converter_name)
suffix_route = ('<%s:%s>' % (converter_name, suffix_kwarg))
return apply_suffix_patterns(urlpatterns, suffix_pattern, suffix_required, suffix_route) |
class TestChangePasswordForm(object):
def test_transforms_to_expected_change_object(self):
data = MultiDict({'old_password': 'old_password', 'new_password': 'password', 'confirm_new_password': 'password', 'submit': True})
form = forms.ChangePasswordForm(formdata=data)
expected = PasswordUpdate(old_password='old_password', new_password='password')
assert (form.as_change() == expected)
def test_valid_inputs(self):
data = MultiDict({'submit': True, 'old_password': 'old_password', 'new_password': 'password', 'confirm_new_password': 'password'})
form = forms.ChangePasswordForm(formdata=data, meta={'csrf': False})
assert form.validate_on_submit()
.parametrize('formdata', [{'old_password': ''}, {'new_password': ''}, {'confirm_new_password': ''}, {'new_password': 'doesntmatch'}])
def test_invalid_inputs(self, formdata):
data = {'old_password': 'old_password', 'new_password': 'password', 'confirm_new_password': 'password', 'submit': True}
data.update(formdata)
form = forms.ChangePasswordForm(formdata=MultiDict(data))
assert (not form.validate_on_submit()) |
def _gather_deleted_transaction_keys(config: dict, fabs_external_data_load_date_key: str='fabs', fpds_external_data_load_date_key: str='fpds') -> Optional[Dict[(Union[(str, Any)], Dict[(str, Any)])]]:
if (not config['process_deletes']):
logger.info(format_log(f'Skipping the S3 CSV fetch for deleted transactions', action='Delete'))
return None
logger.info(format_log(f'Gathering all deleted transactions from S3', action='Delete'))
start = perf_counter()
bucket_objects = retrieve_s3_bucket_object_list(bucket_name=config['s3_bucket'])
logger.info(format_log(f"{len(bucket_objects):,} files found in bucket '{config['s3_bucket']}'", action='Delete'))
start_date = get_last_load_date('es_deletes')
end_date = get_latest_load_date([fabs_external_data_load_date_key, fpds_external_data_load_date_key])
logger.info(format_log(f'CSV data from {start_date} to {end_date}', action='Delete'))
filtered_csv_list = [x for x in bucket_objects if (x.key.endswith('.csv') and (not x.key.startswith('staging')) and (x.last_modified >= start_date) and (x.last_modified <= end_date))]
if config['verbose']:
logger.info(format_log(f'Found {len(filtered_csv_list)} csv files', action='Delete'))
deleted_keys = {}
for obj in filtered_csv_list:
object_data = access_s3_object(bucket_name=config['s3_bucket'], obj=obj)
data = pd.read_csv(object_data, dtype=str)
if ('detached_award_proc_unique' in data):
new_ids = [('CONT_TX_' + x.upper()) for x in data['detached_award_proc_unique'].values]
elif ('afa_generated_unique' in data):
new_ids = [('ASST_TX_' + x.upper()) for x in data['afa_generated_unique'].values]
else:
msg = f'[Missing valid CSV col] in {obj.key}'
logger.error(format_log(msg, action='Delete'))
raise RuntimeError(msg)
for uid in new_ids:
if (uid in deleted_keys):
if (deleted_keys[uid]['timestamp'] < obj.last_modified):
deleted_keys[uid]['timestamp'] = obj.last_modified
else:
deleted_keys[uid] = {'timestamp': obj.last_modified}
if config['verbose']:
for (uid, deleted_dict) in deleted_keys.items():
logger.info(format_log(f"id: {uid} last modified: {deleted_dict['timestamp']}", action='Delete'))
logger.info(format_log(f'Gathered {len(deleted_keys):,} deleted transactions from {len(filtered_csv_list)} files in increment in {(perf_counter() - start):.2f}s', action='Delete'))
return deleted_keys |
class Command(BaseRevisionCommand):
help = 'Deletes revisions for a given app [and model].'
def add_arguments(self, parser):
super().add_arguments(parser)
parser.add_argument('--days', default=0, type=int, help='Delete only revisions older than the specified number of days.')
parser.add_argument('--keep', default=0, type=int, help='Keep the specified number of revisions (most recent) for each object.')
def handle(self, *app_labels, **options):
verbosity = options['verbosity']
using = options['using']
model_db = options['model_db']
days = options['days']
keep = options['keep']
using = (using or router.db_for_write(Revision))
with transaction.atomic(using=using):
revision_query = models.Q()
keep_revision_ids = set()
can_delete = False
for model in self.get_models(options):
if (verbosity >= 1):
self.stdout.write('Finding stale revisions for {name}'.format(name=model._meta.verbose_name))
model_query = Version.objects.using(using).get_for_model(model, model_db=model_db)
if keep:
overflow_object_ids = list(Version.objects.using(using).get_for_model(model, model_db=model_db).order_by().values_list('object_id').annotate(count=models.Count('object_id')).filter(count__gt=keep).values_list('object_id', flat=True).iterator())
model_query = model_query.filter(object_id__in=overflow_object_ids)
for object_id in overflow_object_ids:
if (verbosity >= 2):
self.stdout.write('- Finding stale revisions for {name} #{object_id}'.format(name=model._meta.verbose_name, object_id=object_id))
keep_revision_ids.update(Version.objects.using(using).get_for_object_reference(model, object_id, model_db=model_db).values_list('revision_id', flat=True)[:keep].iterator())
revision_query |= models.Q(pk__in=model_query.order_by().values_list('revision_id', flat=True))
can_delete = True
if can_delete:
revisions_to_delete = Revision.objects.using(using).filter(revision_query, date_created__lt=(timezone.now() - timedelta(days=days))).exclude(pk__in=keep_revision_ids).order_by()
else:
revisions_to_delete = Revision.objects.using(using).none()
if (verbosity >= 1):
self.stdout.write('Deleting {total} revisions...'.format(total=revisions_to_delete.count()))
revisions_to_delete.delete() |
class ValveTestOrderedTunnel2DP(ValveTestBases.ValveTestTunnel):
SRC_ID = 6
DST_ID = 2
SAME_ID = 4
NONE_ID = 3
CONFIG = "\nacls:\n src_acl:\n - rule:\n dl_type: 0x0800\n ip_proto: 1\n actions:\n output:\n - tunnel: {dp: s2, port: 1}\n - rule:\n dl_type: 0x86dd\n ip_proto: 56\n actions:\n output:\n - tunnel: {dp: s2, port: 1}\n dst_acl:\n - rule:\n dl_type: 0x0800\n ip_proto: 1\n actions:\n output:\n - tunnel: {dp: s1, port: 1}\n same_acl:\n - rule:\n dl_type: 0x0800\n ip_proto: 1\n actions:\n output:\n - tunnel: {dp: s1, port: 1}\n none_acl:\n - rule:\n dl_type: 0x0800\n ip_proto: 1\n actions:\n output:\n - tunnel: {dp: s2, port: 1}\nvlans:\n vlan100:\n vid: 1\ndps:\n s1:\n dp_id: 0x1\n hardware: 'GenericTFM'\n stack:\n priority: 1\n interfaces:\n 1:\n name: src_tunnel_host\n native_vlan: vlan100\n acls_in: [src_acl]\n 2:\n name: same_tunnel_host\n native_vlan: vlan100\n acls_in: [same_acl]\n 3:\n stack: {dp: s2, port: 3}\n 4:\n stack: {dp: s2, port: 4}\n s2:\n dp_id: 0x2\n hardware: 'GenericTFM'\n interfaces:\n 1:\n name: dst_tunnel_host\n native_vlan: vlan100\n acls_in: [dst_acl]\n 2:\n name: transit_tunnel_host\n native_vlan: vlan100\n acls_in: [none_acl]\n 3:\n stack: {dp: s1, port: 3}\n 4:\n stack: {dp: s1, port: 4}\n"
def test_update_src_tunnel(self):
valve = self.valves_manager.valves[1]
port = valve.dp.ports[3]
self.apply_ofmsgs(valve.stack_manager.add_tunnel_acls())
self.validate_tunnel(self.DP_ID, self.DP_ID, 1, 0, 3, self.SRC_ID, True, 'Did not encapsulate and forward')
self.validate_tunnel(self.DP_ID, self.DP_ID, 1, 0, 3, self.SRC_ID, True, 'Did not encapsulate and forward', eth_type=34525, ip_proto=56)
self.set_port_down(port.number)
ofmsgs = valve.stack_manager.add_tunnel_acls()
self.assertTrue(ofmsgs, 'No tunnel ofmsgs returned after a topology change')
self.apply_ofmsgs(ofmsgs)
self.validate_tunnel(self.DP_ID, self.DP_ID, 1, 0, 4, self.SRC_ID, True, 'Did not encapsulate and forward out re-calculated port')
def test_update_same_tunnel(self):
valve = self.valves_manager.valves[1]
self.apply_ofmsgs(valve.stack_manager.add_tunnel_acls())
self.validate_tunnel(self.DP_ID, self.DP_ID, 2, 0, 1, 0, True, 'Did not forward to host on same DP')
def test_update_dst_tunnel(self):
valve = self.valves_manager.valves[1]
port = valve.dp.ports[3]
self.apply_ofmsgs(valve.stack_manager.add_tunnel_acls())
self.validate_tunnel(self.DP_ID, self.DP_ID, 3, self.DST_ID, 1, 0, True, 'Did not output to host', pcp=valve_of.PCP_TUNNEL_FLAG)
self.set_port_down(port.number)
ofmsgs = valve.stack_manager.add_tunnel_acls()
self.assertTrue(ofmsgs, 'No tunnel ofmsgs returned after a topology change')
self.apply_ofmsgs(ofmsgs)
self.validate_tunnel(self.DP_ID, self.DP_ID, 4, self.DST_ID, 1, 0, True, 'Did not output to host', pcp=valve_of.PCP_TUNNEL_FLAG)
def test_update_none_tunnel(self):
valve = self.valves_manager.valves[1]
self.apply_ofmsgs(valve.stack_manager.add_tunnel_acls())
self.validate_tunnel(self.DP_ID, self.DP_ID, 5, self.NONE_ID, None, None, False, 'Should not output a packet')
self.validate_tunnel(self.DP_ID, self.DP_ID, 6, self.NONE_ID, None, None, False, 'Should not output a packet') |
class Markdown():
doc_tag = 'div'
output_formats = {'html': to_html_string, 'xhtml': to_xhtml_string}
def __init__(self, **kwargs):
self.tab_length = kwargs.get('tab_length', 4)
self.ESCAPED_CHARS = ['\\', '`', '*', '_', '{', '}', '[', ']', '(', ')', '>', '#', '+', '-', '.', '!']
self.block_level_elements = ['address', 'article', 'aside', 'blockquote', 'details', 'div', 'dl', 'fieldset', 'figcaption', 'figure', 'footer', 'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'header', 'hr', 'main', 'menu', 'nav', 'ol', 'p', 'pre', 'section', 'table', 'ul', 'canvas', 'dd', 'dt', 'group', 'iframe', 'li', 'math', 'noscript', 'output', 'progress', 'script', 'style', 'tbody', 'td', 'th', 'thead', 'tr', 'video']
self.registeredExtensions = []
self.docType = ''
self.stripTopLevelTags = True
self.build_parser()
self.references = {}
self.htmlStash = util.HtmlStash()
self.registerExtensions(extensions=kwargs.get('extensions', []), configs=kwargs.get('extension_configs', {}))
self.set_output_format(kwargs.get('output_format', 'xhtml'))
self.reset()
def build_parser(self):
self.preprocessors = build_preprocessors(self)
self.parser = build_block_parser(self)
self.inlinePatterns = build_inlinepatterns(self)
self.treeprocessors = build_treeprocessors(self)
self.postprocessors = build_postprocessors(self)
return self
def registerExtensions(self, extensions, configs):
for ext in extensions:
if isinstance(ext, str):
ext = self.build_extension(ext, configs.get(ext, {}))
if isinstance(ext, Extension):
ext._extendMarkdown(self)
logger.debug(('Successfully loaded extension "%s.%s".' % (ext.__class__.__module__, ext.__class__.__name__)))
elif (ext is not None):
raise TypeError('Extension "{}.{}" must be of type: "{}.{}"'.format(ext.__class__.__module__, ext.__class__.__name__, Extension.__module__, Extension.__name__))
return self
def build_extension(self, ext_name, configs):
configs = dict(configs)
entry_points = [ep for ep in util.INSTALLED_EXTENSIONS if (ep.name == ext_name)]
if entry_points:
ext = entry_points[0].load()
return ext(**configs)
(ext_name, class_name) = (ext_name.split(':', 1) if (':' in ext_name) else (ext_name, ''))
try:
module = importlib.import_module(ext_name)
logger.debug(('Successfully imported extension module "%s".' % ext_name))
except ImportError as e:
message = ('Failed loading extension "%s".' % ext_name)
e.args = ((message,) + e.args[1:])
raise
if class_name:
return getattr(module, class_name)(**configs)
else:
try:
return module.makeExtension(**configs)
except AttributeError as e:
message = e.args[0]
message = ("Failed to initiate extension '%s': %s" % (ext_name, message))
e.args = ((message,) + e.args[1:])
raise
def registerExtension(self, extension):
self.registeredExtensions.append(extension)
return self
def reset(self):
self.htmlStash.reset()
self.references.clear()
for extension in self.registeredExtensions:
if hasattr(extension, 'reset'):
extension.reset()
return self
def set_output_format(self, format):
self.output_format = format.lower().rstrip('145')
try:
self.serializer = self.output_formats[self.output_format]
except KeyError as e:
valid_formats = list(self.output_formats.keys())
valid_formats.sort()
message = ('Invalid Output Format: "%s". Use one of %s.' % (self.output_format, (('"' + '", "'.join(valid_formats)) + '"')))
e.args = ((message,) + e.args[1:])
raise
return self
def is_block_level(self, tag):
if isinstance(tag, str):
return (tag.lower().rstrip('/') in self.block_level_elements)
return False
def convert(self, source):
if (not source.strip()):
return ''
try:
source = str(source)
except UnicodeDecodeError as e:
e.reason += '. -- Note: Markdown only accepts unicode input!'
raise
self.lines = source.split('\n')
for prep in self.preprocessors:
self.lines = prep.run(self.lines)
root = self.parser.parseDocument(self.lines).getroot()
for treeprocessor in self.treeprocessors:
newRoot = treeprocessor.run(root)
if (newRoot is not None):
root = newRoot
output = self.serializer(root)
if self.stripTopLevelTags:
try:
start = ((output.index(('<%s>' % self.doc_tag)) + len(self.doc_tag)) + 2)
end = output.rindex(('</%s>' % self.doc_tag))
output = output[start:end].strip()
except ValueError:
if output.strip().endswith(('<%s />' % self.doc_tag)):
output = ''
else:
raise ValueError(('Markdown failed to strip top-level tags. Document=%r' % output.strip()))
for pp in self.postprocessors:
output = pp.run(output)
return output.strip()
def convertFile(self, input=None, output=None, encoding=None):
encoding = (encoding or 'utf-8')
if input:
if isinstance(input, str):
input_file = codecs.open(input, mode='r', encoding=encoding)
else:
input_file = codecs.getreader(encoding)(input)
text = input_file.read()
input_file.close()
else:
text = sys.stdin.read()
if (not isinstance(text, str)):
text = text.decode(encoding)
text = text.lstrip('\ufeff')
html = self.convert(text)
if output:
if isinstance(output, str):
output_file = codecs.open(output, 'w', encoding=encoding, errors='xmlcharrefreplace')
output_file.write(html)
output_file.close()
else:
writer = codecs.getwriter(encoding)
output_file = writer(output, errors='xmlcharrefreplace')
output_file.write(html)
else:
html = html.encode(encoding, 'xmlcharrefreplace')
try:
sys.stdout.buffer.write(html)
except AttributeError:
sys.stdout.write(html)
return self |
class FlyteDirToMultipartBlobTransformer(TypeTransformer[FlyteDirectory]):
def __init__(self):
super().__init__(name='FlyteDirectory', t=FlyteDirectory)
def get_format(t: typing.Type[FlyteDirectory]) -> str:
return t.extension()
def _blob_type(format: str) -> _core_types.BlobType:
return _core_types.BlobType(format=format, dimensionality=_core_types.BlobType.BlobDimensionality.MULTIPART)
def assert_type(self, t: typing.Type[FlyteDirectory], v: typing.Union[(FlyteDirectory, os.PathLike, str)]):
if (isinstance(v, FlyteDirectory) or isinstance(v, str) or isinstance(v, os.PathLike)):
return
raise TypeError(f'No automatic conversion from {type(v)} declared type {t} to FlyteDirectory found. Use (FlyteDirectory, str, os.PathLike)')
def get_literal_type(self, t: typing.Type[FlyteDirectory]) -> LiteralType:
return _type_models.LiteralType(blob=self._blob_type(format=FlyteDirToMultipartBlobTransformer.get_format(t)))
def to_literal(self, ctx: FlyteContext, python_val: FlyteDirectory, python_type: typing.Type[FlyteDirectory], expected: LiteralType) -> Literal:
remote_directory = None
should_upload = True
batch_size = get_batch_size(python_type)
meta = BlobMetadata(type=self._blob_type(format=self.get_format(python_type)))
if isinstance(python_val, FlyteDirectory):
if (python_val._remote_source is not None):
return Literal(scalar=Scalar(blob=Blob(metadata=meta, uri=python_val._remote_source)))
source_path = str(python_val.path)
if ((not isinstance(python_val.remote_directory, (pathlib.Path, str))) and ((python_val.remote_directory is False) or ctx.file_access.is_remote(source_path) or (ctx.execution_state.mode in {ctx.execution_state.Mode.LOCAL_WORKFLOW_EXECUTION, ctx.execution_state.Mode.LOCAL_TASK_EXECUTION}))):
should_upload = False
remote_directory = (python_val.remote_directory or None)
elif isinstance(python_val, (pathlib.Path, str)):
source_path = str(python_val)
if ctx.file_access.is_remote(source_path):
should_upload = False
else:
p = Path(source_path)
if (not p.is_dir()):
raise ValueError(f'Expected a directory. {source_path} is not a directory')
else:
raise AssertionError(f'Expected FlyteDirectory or os.PathLike object, received {type(python_val)}')
if should_upload:
if (remote_directory is None):
remote_directory = ctx.file_access.get_random_remote_directory()
ctx.file_access.put_data(source_path, remote_directory, is_multipart=True, batch_size=batch_size)
return Literal(scalar=Scalar(blob=Blob(metadata=meta, uri=remote_directory)))
else:
return Literal(scalar=Scalar(blob=Blob(metadata=meta, uri=source_path)))
def to_python_value(self, ctx: FlyteContext, lv: Literal, expected_python_type: typing.Type[FlyteDirectory]) -> FlyteDirectory:
uri = lv.scalar.blob.uri
if (not ctx.file_access.is_remote(uri)):
return expected_python_type(uri, remote_directory=False)
local_folder = ctx.file_access.get_random_local_directory()
batch_size = get_batch_size(expected_python_type)
def _downloader():
return ctx.file_access.get_data(uri, local_folder, is_multipart=True, batch_size=batch_size)
expected_format = self.get_format(expected_python_type)
fd = FlyteDirectory.__class_getitem__(expected_format)(local_folder, _downloader)
fd._remote_source = uri
return fd
def guess_python_type(self, literal_type: LiteralType) -> typing.Type[FlyteDirectory[typing.Any]]:
if ((literal_type.blob is not None) and (literal_type.blob.dimensionality == _core_types.BlobType.BlobDimensionality.MULTIPART)):
return FlyteDirectory.__class_getitem__(literal_type.blob.format)
raise ValueError(f'Transformer {self} cannot reverse {literal_type}') |
class OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TagList(ResourceList):
def query(self, view_kwargs):
query_ = self.session.query(Tag)
if view_kwargs.get('event_id'):
event = safe_query_kwargs(Event, view_kwargs, 'event_id')
query_ = query_.join(Event).filter((Event.id == event.id)).order_by(Tag.id.asc())
return query_
view_kwargs = True
methods = ['GET']
schema = TagSchema
data_layer = {'session': db.session, 'model': Tag, 'methods': {'query': query}} |
class MDTextEdit(QTextEdit):
def __init__(self, parent=None):
super(QTextEdit, self).__init__(parent)
self.text_was_pasted = False
def setMarkdown(self, markdown):
if ('setMarkdown' in QTextEdit.__dict__):
super(MDTextEdit, self).setMarkdown(markdown)
else:
self.setPlainText(markdown)
def toMarkdown(self):
if ('toMarkdown' in QTextEdit.__dict__):
return super(MDTextEdit, self).toMarkdown()
else:
return self.toPlainText()
def insertFromMimeData(self, source):
super(MDTextEdit, self).insertFromMimeData(source)
if source.hasText():
self.text_was_pasted = True |
def extractRvstranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('AUC', 'Appreciation of Unconventional Plants', 'translated'), ('HCTCTM', 'How Could This Cat Tease Me?', 'translated'), ('AUP', 'Appreciation of Unconventional Plants', 'translated'), ('LCU', 'The Last Cat in the Universe', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def resize_volume_claim(volume_name, new_capacity):
data = {'capacity': new_capacity}
response = get_session().patch((base_volume_url + '/{}'.format(volume_name)), data=data)
if (response.status_code != 200):
raise get_exception(response)
else:
return response.json()['message'] |
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName('MainWindow')
MainWindow.resize(400, 500)
MainWindow.setMinimumSize(QtCore.QSize(400, 500))
MainWindow.setMaximumSize(QtCore.QSize(400, 510))
MainWindow.setToolTip('')
MainWindow.setAnimated(True)
MainWindow.setDocumentMode(False)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setMinimumSize(QtCore.QSize(100, 0))
self.centralwidget.setObjectName('centralwidget')
self.horizontalLayout = QtWidgets.QHBoxLayout(self.centralwidget)
self.horizontalLayout.setObjectName('horizontalLayout')
self.gridLayout = QtWidgets.QGridLayout()
self.gridLayout.setSizeConstraint(QtWidgets.QLayout.SetNoConstraint)
self.gridLayout.setSpacing(6)
self.gridLayout.setObjectName('gridLayout')
self.groupBox_3 = QtWidgets.QGroupBox(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_3.sizePolicy().hasHeightForWidth())
self.groupBox_3.setSizePolicy(sizePolicy)
self.groupBox_3.setMinimumSize(QtCore.QSize(0, 111))
font = QtGui.QFont()
font.setPointSize(10)
self.groupBox_3.setFont(font)
self.groupBox_3.setObjectName('groupBox_3')
self.gridLayoutWidget_2 = QtWidgets.QWidget(self.groupBox_3)
self.gridLayoutWidget_2.setGeometry(QtCore.QRect(10, 30, 365, 71))
self.gridLayoutWidget_2.setObjectName('gridLayoutWidget_2')
self.gridLayout_3 = QtWidgets.QGridLayout(self.gridLayoutWidget_2)
self.gridLayout_3.setContentsMargins(0, 0, 0, 0)
self.gridLayout_3.setObjectName('gridLayout_3')
self.chkMinimized = QtWidgets.QCheckBox(self.gridLayoutWidget_2)
font = QtGui.QFont()
font.setPointSize(10)
self.chkMinimized.setFont(font)
self.chkMinimized.setLayoutDirection(QtCore.Qt.LeftToRight)
self.chkMinimized.setObjectName('chkMinimized')
self.gridLayout_3.addWidget(self.chkMinimized, 0, 1, 1, 1)
self.chkRemember = QtWidgets.QCheckBox(self.gridLayoutWidget_2)
font = QtGui.QFont()
font.setPointSize(10)
self.chkRemember.setFont(font)
self.chkRemember.setLayoutDirection(QtCore.Qt.LeftToRight)
self.chkRemember.setObjectName('chkRemember')
self.gridLayout_3.addWidget(self.chkRemember, 1, 0, 1, 1)
self.chkAutoupdate = QtWidgets.QCheckBox(self.gridLayoutWidget_2)
font = QtGui.QFont()
font.setPointSize(10)
self.chkAutoupdate.setFont(font)
self.chkAutoupdate.setChecked(False)
self.chkAutoupdate.setObjectName('chkAutoupdate')
self.gridLayout_3.addWidget(self.chkAutoupdate, 1, 1, 1, 1)
self.chkAutostart = QtWidgets.QCheckBox(self.gridLayoutWidget_2)
font = QtGui.QFont()
font.setPointSize(10)
self.chkAutostart.setFont(font)
self.chkAutostart.setObjectName('chkAutostart')
self.gridLayout_3.addWidget(self.chkAutostart, 0, 0, 1, 1)
self.gridLayout.addWidget(self.groupBox_3, 3, 0, 1, 4)
self.btnUpdate = QtWidgets.QPushButton(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btnUpdate.sizePolicy().hasHeightForWidth())
self.btnUpdate.setSizePolicy(sizePolicy)
self.btnUpdate.setMinimumSize(QtCore.QSize(0, 35))
font = QtGui.QFont()
font.setPointSize(10)
self.btnUpdate.setFont(font)
self.btnUpdate.setObjectName('btnUpdate')
self.gridLayout.addWidget(self.btnUpdate, 5, 0, 1, 4)
self.groupBox = QtWidgets.QGroupBox(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox.sizePolicy().hasHeightForWidth())
self.groupBox.setSizePolicy(sizePolicy)
self.groupBox.setMinimumSize(QtCore.QSize(0, 70))
font = QtGui.QFont()
font.setPointSize(10)
self.groupBox.setFont(font)
self.groupBox.setObjectName('groupBox')
self.horizontalLayoutWidget = QtWidgets.QWidget(self.groupBox)
self.horizontalLayoutWidget.setGeometry(QtCore.QRect(10, 30, 361, 21))
self.horizontalLayoutWidget.setObjectName('horizontalLayoutWidget')
self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.horizontalLayoutWidget)
self.horizontalLayout_2.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_2.setObjectName('horizontalLayout_2')
self.rdbLight = QtWidgets.QRadioButton(self.horizontalLayoutWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.rdbLight.sizePolicy().hasHeightForWidth())
self.rdbLight.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(10)
self.rdbLight.setFont(font)
self.rdbLight.setObjectName('rdbLight')
self.horizontalLayout_2.addWidget(self.rdbLight)
self.rdbDark = QtWidgets.QRadioButton(self.horizontalLayoutWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.rdbDark.sizePolicy().hasHeightForWidth())
self.rdbDark.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(10)
self.rdbDark.setFont(font)
self.rdbDark.setChecked(True)
self.rdbDark.setObjectName('rdbDark')
self.horizontalLayout_2.addWidget(self.rdbDark)
self.gridLayout.addWidget(self.groupBox, 0, 0, 1, 4)
self.btnDefaults = QtWidgets.QPushButton(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btnDefaults.sizePolicy().hasHeightForWidth())
self.btnDefaults.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(10)
self.btnDefaults.setFont(font)
self.btnDefaults.setObjectName('btnDefaults')
self.gridLayout.addWidget(self.btnDefaults, 4, 0, 1, 2)
self.btnApply = QtWidgets.QPushButton(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btnApply.sizePolicy().hasHeightForWidth())
self.btnApply.setSizePolicy(sizePolicy)
self.btnApply.setMinimumSize(QtCore.QSize(0, 35))
font = QtGui.QFont()
font.setPointSize(10)
self.btnApply.setFont(font)
self.btnApply.setObjectName('btnApply')
self.gridLayout.addWidget(self.btnApply, 4, 2, 1, 2)
self.groupBox_4 = QtWidgets.QGroupBox(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_4.sizePolicy().hasHeightForWidth())
self.groupBox_4.setSizePolicy(sizePolicy)
self.groupBox_4.setMinimumSize(QtCore.QSize(0, 78))
font = QtGui.QFont()
font.setPointSize(10)
self.groupBox_4.setFont(font)
self.groupBox_4.setObjectName('groupBox_4')
self.horizontalLayoutWidget_2 = QtWidgets.QWidget(self.groupBox_4)
self.horizontalLayoutWidget_2.setGeometry(QtCore.QRect(10, 30, 361, 31))
self.horizontalLayoutWidget_2.setObjectName('horizontalLayoutWidget_2')
self.horizontalLayout_3 = QtWidgets.QHBoxLayout(self.horizontalLayoutWidget_2)
self.horizontalLayout_3.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_3.setObjectName('horizontalLayout_3')
self.comboInterface = QtWidgets.QComboBox(self.horizontalLayoutWidget_2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(9)
sizePolicy.setHeightForWidth(self.comboInterface.sizePolicy().hasHeightForWidth())
self.comboInterface.setSizePolicy(sizePolicy)
self.comboInterface.setMinimumSize(QtCore.QSize(0, 9))
font = QtGui.QFont()
font.setPointSize(10)
self.comboInterface.setFont(font)
self.comboInterface.setObjectName('comboInterface')
self.horizontalLayout_3.addWidget(self.comboInterface)
self.gridLayout.addWidget(self.groupBox_4, 1, 0, 1, 4)
self.groupBox_2 = QtWidgets.QGroupBox(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_2.sizePolicy().hasHeightForWidth())
self.groupBox_2.setSizePolicy(sizePolicy)
self.groupBox_2.setMinimumSize(QtCore.QSize(0, 115))
font = QtGui.QFont()
font.setPointSize(10)
self.groupBox_2.setFont(font)
self.groupBox_2.setObjectName('groupBox_2')
self.gridLayoutWidget = QtWidgets.QWidget(self.groupBox_2)
self.gridLayoutWidget.setGeometry(QtCore.QRect(10, 30, 361, 31))
self.gridLayoutWidget.setObjectName('gridLayoutWidget')
self.gridLayout_2 = QtWidgets.QGridLayout(self.gridLayoutWidget)
self.gridLayout_2.setContentsMargins(0, 0, 0, 0)
self.gridLayout_2.setObjectName('gridLayout_2')
self.spinCount = QtWidgets.QSpinBox(self.gridLayoutWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.spinCount.sizePolicy().hasHeightForWidth())
self.spinCount.setSizePolicy(sizePolicy)
self.spinCount.setMinimumSize(QtCore.QSize(55, 0))
self.spinCount.setMaximumSize(QtCore.QSize(55, ))
font = QtGui.QFont()
font.setPointSize(10)
self.spinCount.setFont(font)
self.spinCount.setMinimum(25)
self.spinCount.setMaximum(255)
self.spinCount.setObjectName('spinCount')
self.gridLayout_2.addWidget(self.spinCount, 0, 2, 1, 1)
self.sliderCount = QtWidgets.QSlider(self.gridLayoutWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.sliderCount.sizePolicy().hasHeightForWidth())
self.sliderCount.setSizePolicy(sizePolicy)
self.sliderCount.setMinimumSize(QtCore.QSize(179, 0))
self.sliderCount.setMinimum(25)
self.sliderCount.setMaximum(255)
self.sliderCount.setOrientation(QtCore.Qt.Horizontal)
self.sliderCount.setObjectName('sliderCount')
self.gridLayout_2.addWidget(self.sliderCount, 0, 1, 1, 1)
self.label = QtWidgets.QLabel(self.gridLayoutWidget)
self.label.setMinimumSize(QtCore.QSize(90, 0))
self.label.setMaximumSize(QtCore.QSize(90, ))
font = QtGui.QFont()
font.setPointSize(10)
self.label.setFont(font)
self.label.setObjectName('label')
self.gridLayout_2.addWidget(self.label, 0, 0, 1, 1)
self.gridLayoutWidget_3 = QtWidgets.QWidget(self.groupBox_2)
self.gridLayoutWidget_3.setGeometry(QtCore.QRect(10, 70, 361, 31))
self.gridLayoutWidget_3.setObjectName('gridLayoutWidget_3')
self.gridLayout_4 = QtWidgets.QGridLayout(self.gridLayoutWidget_3)
self.gridLayout_4.setContentsMargins(0, 0, 0, 0)
self.gridLayout_4.setObjectName('gridLayout_4')
self.spinThreads = QtWidgets.QSpinBox(self.gridLayoutWidget_3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.spinThreads.sizePolicy().hasHeightForWidth())
self.spinThreads.setSizePolicy(sizePolicy)
self.spinThreads.setMinimumSize(QtCore.QSize(55, 0))
self.spinThreads.setMaximumSize(QtCore.QSize(55, ))
font = QtGui.QFont()
font.setPointSize(10)
self.spinThreads.setFont(font)
self.spinThreads.setMinimum(5)
self.spinThreads.setMaximum(255)
self.spinThreads.setProperty('value', 5)
self.spinThreads.setObjectName('spinThreads')
self.gridLayout_4.addWidget(self.spinThreads, 0, 2, 1, 1)
self.sliderThreads = QtWidgets.QSlider(self.gridLayoutWidget_3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.sliderThreads.sizePolicy().hasHeightForWidth())
self.sliderThreads.setSizePolicy(sizePolicy)
self.sliderThreads.setMinimumSize(QtCore.QSize(179, 0))
self.sliderThreads.setMinimum(5)
self.sliderThreads.setMaximum(255)
self.sliderThreads.setProperty('value', 5)
self.sliderThreads.setOrientation(QtCore.Qt.Horizontal)
self.sliderThreads.setObjectName('sliderThreads')
self.gridLayout_4.addWidget(self.sliderThreads, 0, 1, 1, 1)
self.label_2 = QtWidgets.QLabel(self.gridLayoutWidget_3)
self.label_2.setMinimumSize(QtCore.QSize(90, 0))
self.label_2.setMaximumSize(QtCore.QSize(90, ))
font = QtGui.QFont()
font.setPointSize(10)
self.label_2.setFont(font)
self.label_2.setObjectName('label_2')
self.gridLayout_4.addWidget(self.label_2, 0, 0, 1, 1)
self.gridLayout.addWidget(self.groupBox_2, 2, 0, 1, 4)
self.horizontalLayout.addLayout(self.gridLayout)
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate('MainWindow', 'Settings'))
self.groupBox_3.setTitle(_translate('MainWindow', 'Misc.'))
self.chkMinimized.setText(_translate('MainWindow', 'Minimize on exit'))
self.chkRemember.setText(_translate('MainWindow', 'Remember killed devices'))
self.chkAutoupdate.setText(_translate('MainWindow', 'Check updates on start'))
self.chkAutostart.setText(_translate('MainWindow', 'Start with Windows'))
self.btnUpdate.setText(_translate('MainWindow', 'Check for update'))
self.groupBox.setTitle(_translate('MainWindow', 'Theme'))
self.rdbLight.setText(_translate('MainWindow', 'Light'))
self.rdbDark.setText(_translate('MainWindow', 'Dark'))
self.btnDefaults.setText(_translate('MainWindow', 'Defaults'))
self.btnApply.setText(_translate('MainWindow', 'Apply'))
self.groupBox_4.setTitle(_translate('MainWindow', 'Network Interface'))
self.groupBox_2.setTitle(_translate('MainWindow', 'Performance'))
self.label.setText(_translate('MainWindow', 'Device Count'))
self.label_2.setText(_translate('MainWindow', 'Max Threads')) |
def extractLadysheeptrslWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesColumnrangeSonificationDefaultinstrumentoptionsMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.parametrize('test_input,case_id,entity_id,expected', [('110', 1, 2, [{'text': '110', 'title': '11', 'type': 'regulation', 'url': '/regulations/110/CURRENT'}]), ('110.21', 1, 2, [{'text': '110.21', 'title': '11', 'type': 'regulation', 'url': '/regulations/110-21/CURRENT'}]), ('114.5(a)(3)', 1, 2, [{'text': '114.5(a)(3)', 'title': '11', 'type': 'regulation', 'url': '/regulations/114-5/CURRENT'}]), ('114.5(a)(3)-(5)', 1, 2, [{'text': '114.5(a)(3)-(5)', 'title': '11', 'type': 'regulation', 'url': '/regulations/114-5/CURRENT'}]), ('102.17(a)(l)(i), (b)(l), (b)(2), and (c)(3)', 1, 2, [{'text': '102.17(a)(l)(i), (b)(l), (b)(2), and (c)(3)', 'title': '11', 'type': 'regulation', 'url': '/regulations/102-17/CURRENT'}]), ('102.5(a)(2); 104.3(a)(4)(i); 114.5(a)(3)-(5); 114.5(g)(1)', 1, 2, [{'text': '102.5(a)(2)', 'title': '11', 'type': 'regulation', 'url': '/regulations/102-5/CURRENT'}, {'text': '104.3(a)(4)(i)', 'title': '11', 'type': 'regulation', 'url': '/regulations/104-3/CURRENT'}, {'text': '114.5(a)(3)-(5)', 'title': '11', 'type': 'regulation', 'url': '/regulations/114-5/CURRENT'}, {'text': '114.5(g)(1)', 'title': '11', 'type': 'regulation', 'url': '/regulations/114-5/CURRENT'}])])
def test_parse_regulatory_citations(test_input, case_id, entity_id, expected):
assert (parse_regulatory_citations(test_input, case_id, entity_id) == expected) |
def get_prompt(params, data_points, losses, hoped_for_loss):
return '# black box function to be minimized\ndef f({func_params}) -> float:\n """\n parameters:\n{docstring}\n\n returns:\n float: the loss\n """\n return black_box_loss({names})\n\n# experimentally observed data\n# new experiments MUST stay within the bounds, SHOULD fully explore the bounds, and SHOULD converge to minimum\n# bounds: f({bounds})\n{values}{hoped_for_loss} == f('.format(func_params=', '.join(('{name}: {type}'.format(name=name, type=('int' if (func == 'randrange') else (type(args[0][0]).__name__ if ((func == 'choice') and all_equal(map(type, args[0]))) else ('typing.Any' if (func == 'choice') else 'float')))) for (name, (func, args, _)) in params.items())), docstring='\n'.join((' {name}: in {func}({args})'.format(name=name, func=('range' if (func == 'randrange') else func), args=', '.join(map(repr, (args[:2] if ((func == 'randrange') and (args[(- 1)] == 1)) else args)))) for (name, (func, args, _)) in params.items())), names=', '.join(params), bounds=', '.join(('{name}: {func}({args})'.format(name=name, func=('range' if (func == 'randrange') else func), args=', '.join(map(repr, (args[:2] if ((func == 'randrange') and (args[(- 1)] == 1)) else args)))) for (name, (func, args, _)) in params.items())), values=''.join(('{loss} == f({named_args})\n'.format(named_args=', '.join(map((lambda name: '{_coconut_format_0}={_coconut_format_1!r}'.format(_coconut_format_0=name, _coconut_format_1=point[name])), params)), loss=loss) for (point, loss) in zip(data_points, losses))), hoped_for_loss=(int(hoped_for_loss) if (int(hoped_for_loss) == hoped_for_loss) else hoped_for_loss)) |
class _SecurityCenterOrganizationsFindingsRepository(repository_mixins.CreateQueryMixin, repository_mixins.ListQueryMixin, repository_mixins.PatchResourceMixin, _base_repository.GCPRepository):
def __init__(self, **kwargs):
LOGGER.debug('Creating _SecurityCenterOrganizationsFindingsRepositoryClient')
component = 'organizations.sources.findings'
super(_SecurityCenterOrganizationsFindingsRepository, self).__init__(key_field='name', component=component, max_results_field='pageSize', **kwargs) |
class AbstractExpiringValue():
def __init__(self, refresh_fn, max_age):
self._refresh_fn = refresh_fn
self._max_age = max_age
self._lock = threading.Lock()
def value(self, refresh=False):
with self._lock:
if ((not refresh) and (not self.is_expired())):
return self.load()
value = self._refresh_fn()
self.save(value)
LOG.info('refreshed data and saved in cache')
return value
def is_expired(self):
raise NotImplementedError
def load(self):
raise NotImplementedError
def save(self, value):
raise NotImplementedError |
def test_assert_log_level(log_capture):
with AssertLogLevel(log_capture, 'WARNING', contains_str='ABC'):
td.log.warning('ABC')
td.log.warning('ABC')
with pytest.raises(Exception):
assert_log_level(log_capture, 'WARNING', contains_str='DEF')
log_capture.clear()
td.log.info('ABC')
with pytest.raises(Exception):
assert_log_level(log_capture, 'WARNING', contains_str='ABC')
log_capture.clear()
td.log.warning('ABC')
with pytest.raises(Exception):
assert_log_level(log_capture, 'INFO')
log_capture.clear() |
def extractRothramnWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def dump_pe(process_controller: ProcessController, pe_file_path: str, image_base: int, oep: int, iat_addr: int, iat_size: int, add_new_iat: bool) -> bool:
process_controller.clear_cached_data()
gc.collect()
with TemporaryDirectory() as tmp_dir:
TMP_FILE_PATH1 = os.path.join(tmp_dir, 'unlicense.tmp2')
TMP_FILE_PATH2 = os.path.join(tmp_dir, 'unlicense.tmp2')
try:
pyscylla.dump_pe(process_controller.pid, image_base, oep, TMP_FILE_PATH1, pe_file_path)
except pyscylla.ScyllaException as scylla_exception:
LOG.error('Failed to dump PE: %s', str(scylla_exception))
return False
LOG.info('Fixing dump ...')
try:
pyscylla.fix_iat(process_controller.pid, image_base, iat_addr, iat_size, add_new_iat, TMP_FILE_PATH1, TMP_FILE_PATH2)
except pyscylla.ScyllaException as scylla_exception:
LOG.error('Failed to fix IAT: %s', str(scylla_exception))
return False
try:
pyscylla.rebuild_pe(TMP_FILE_PATH2, False, True, False)
except pyscylla.ScyllaException as scylla_exception:
LOG.error('Failed to rebuild PE: %s', str(scylla_exception))
return False
LOG.info('Rebuilding PE ...')
output_file_name = f'unpacked_{process_controller.main_module_name}'
_fix_pe(TMP_FILE_PATH2, output_file_name)
LOG.info("Output file has been saved at '%s'", output_file_name)
return True |
_decorator(deprecated, name='list')
_decorator(deprecated, name='retrieve')
class AwardAggregateViewSet(FilterQuerysetMixin, AggregateQuerysetMixin, CachedDetailViewSet):
serializer_class = AggregateSerializer
def get_queryset(self):
queryset = Award.objects.all()
queryset = self.filter_records(self.request, queryset=queryset)
queryset = self.aggregate(self.request, queryset=queryset)
queryset = self.order_records(self.request, queryset=queryset)
return queryset |
class desc_stats_reply(stats_reply):
version = 2
type = 19
stats_type = 0
def __init__(self, xid=None, flags=None, mfr_desc=None, hw_desc=None, sw_desc=None, serial_num=None, dp_desc=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (mfr_desc != None):
self.mfr_desc = mfr_desc
else:
self.mfr_desc = ''
if (hw_desc != None):
self.hw_desc = hw_desc
else:
self.hw_desc = ''
if (sw_desc != None):
self.sw_desc = sw_desc
else:
self.sw_desc = ''
if (serial_num != None):
self.serial_num = serial_num
else:
self.serial_num = ''
if (dp_desc != None):
self.dp_desc = dp_desc
else:
self.dp_desc = ''
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(struct.pack('!256s', self.mfr_desc))
packed.append(struct.pack('!256s', self.hw_desc))
packed.append(struct.pack('!256s', self.sw_desc))
packed.append(struct.pack('!32s', self.serial_num))
packed.append(struct.pack('!256s', self.dp_desc))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = desc_stats_reply()
_version = reader.read('!B')[0]
assert (_version == 2)
_type = reader.read('!B')[0]
assert (_type == 19)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 0)
obj.flags = reader.read('!H')[0]
reader.skip(4)
obj.mfr_desc = reader.read('!256s')[0].rstrip('\x00')
obj.hw_desc = reader.read('!256s')[0].rstrip('\x00')
obj.sw_desc = reader.read('!256s')[0].rstrip('\x00')
obj.serial_num = reader.read('!32s')[0].rstrip('\x00')
obj.dp_desc = reader.read('!256s')[0].rstrip('\x00')
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.mfr_desc != other.mfr_desc):
return False
if (self.hw_desc != other.hw_desc):
return False
if (self.sw_desc != other.sw_desc):
return False
if (self.serial_num != other.serial_num):
return False
if (self.dp_desc != other.dp_desc):
return False
return True
def pretty_print(self, q):
q.text('desc_stats_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REPLY_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('mfr_desc = ')
q.pp(self.mfr_desc)
q.text(',')
q.breakable()
q.text('hw_desc = ')
q.pp(self.hw_desc)
q.text(',')
q.breakable()
q.text('sw_desc = ')
q.pp(self.sw_desc)
q.text(',')
q.breakable()
q.text('serial_num = ')
q.pp(self.serial_num)
q.text(',')
q.breakable()
q.text('dp_desc = ')
q.pp(self.dp_desc)
q.breakable()
q.text('}') |
class OverworldMapProvider(wilderness.WildernessMapProvider):
def is_valid_coordinates(self, wilderness, coordinates):
in_lower_bound = super().is_valid_coordinates(wilderness, coordinates)
if (not in_lower_bound):
return False
(x, y) = coordinates
if (x >= map.OverworldMap.WIDTH):
return False
if (y >= map.OverworldMap.HEIGHT):
return False
return True
def get_location_name(self, coordinates):
location = map.OverworldMap.get(coordinates)
name = (location.name if location else 'The wilderness')
return name
def at_prepare_room(self, coordinates, caller, room):
location = map.OverworldMap.get(coordinates)
prototype_name = location.room_prototype
try:
room_prototype = search_prototype(prototype_name, require_single=True)
desc = room_prototype['desc']
except KeyError:
logger.error(f'No room prototype found with name {prototype_name} for overworld coordinates {coordinates}')
desc = 'Unknown'
landmark = OverworldLandmarks.get_by_coordinates(coordinates)
landmark_exit = next((re for re in room.exits if (re.key == 'enter')), None)
if landmark:
if (not landmark_exit):
landmark_exit = getattr(room, '_hidden_landmark_exit', None)
if (not landmark_exit):
landmark_exit = create_object(typeclass='typeclasses.exits.Exit', key='enter', location=room)
possible_exits = search_object_by_tag([landmark.key, 'area_exit'], ['area_id', 'area_def'])
if possible_exits:
possible_exit = random.choice(possible_exits)
landmark_exit.destination = possible_exit
elif ((not landmark) and landmark_exit):
landmark_exit.location = None
setattr(room, '_hidden_landmark_exit', landmark_exit)
room.db.desc = desc
def room_typeclass(self):
from typeclasses.rooms import OverworldRoom
return OverworldRoom
def exit_typeclass(self):
from typeclasses.exits import OverworldExit
return OverworldExit |
class OptionSeriesTreegraphSonificationContexttracksMappingHighpass(Options):
def frequency(self) -> 'OptionSeriesTreegraphSonificationContexttracksMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionSeriesTreegraphSonificationContexttracksMappingHighpassFrequency)
def resonance(self) -> 'OptionSeriesTreegraphSonificationContexttracksMappingHighpassResonance':
return self._config_sub_data('resonance', OptionSeriesTreegraphSonificationContexttracksMappingHighpassResonance) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.