code stringlengths 281 23.7M |
|---|
def test_minimal_renaming_basic_relation(graph_with_relations_easy, variable):
(task, interference_graph) = graph_with_relations_easy
minimal_variable_renamer = MinimalVariableRenamer(task, interference_graph)
var_18 = [Variable('var_18', Integer(32, True), i, True, None) for i in range(4)]
var_10_1 = Variable('var_10', Pointer(Integer(32, True), 32), 1, False, None)
variable[0].is_aliased = True
variable[1]._type = Pointer(Integer(32, True), 32)
assert (minimal_variable_renamer.renaming_map == {var_10_1: variable[1], var_18[2]: variable[0], var_18[3]: variable[0]}) |
class BodhiConfig(dict):
loaded = False
_defaults = {'acl_system': {'value': 'dummy', 'validator': str}, 'acl_dummy_committer': {'value': None, 'validator': _validate_none_or(str)}, 'admin_groups': {'value': ['proventesters', 'security_respons', 'bodhiadmin', 'sysadmin-main'], 'validator': _generate_list_validator()}, 'admin_packager_groups': {'value': ['provenpackager', 'releng', 'security_respons'], 'validator': _generate_list_validator()}, 'authtkt.secret': {'value': 'CHANGEME', 'validator': _validate_secret}, 'authtkt.secure': {'value': True, 'validator': _validate_bool}, 'authtkt.timeout': {'value': 86400, 'validator': int}, 'automatic_updates_blacklist': {'value': ['releng'], 'validator': _generate_list_validator()}, 'base_address': {'value': ' 'validator': str}, 'bodhi_email': {'value': '', 'validator': str}, 'bodhi_password': {'value': None, 'validator': _validate_none_or(str)}, 'buglink': {'value': ' 'validator': str}, 'bugtracker': {'value': None, 'validator': _validate_none_or(str)}, 'bugzilla_api_key': {'value': None, 'validator': _validate_none_or(str)}, 'buildroot_limit': {'value': 31, 'validator': int}, 'buildsystem': {'value': 'dev', 'validator': str}, 'bz_exclude_rels': {'value': [], 'validator': _generate_list_validator(',')}, 'bz_products': {'value': ['Fedora', 'Fedora EPEL', 'Fedora Modules'], 'validator': _generate_list_validator(',')}, 'bz_regex': {'value': '(?:fix(?:es)?|close(?:s)?|resolve(?:s)?)(?:\\:)?\\s(?:fedora|epel|rh(?:bz)?)#(\\d{5,})', 'validator': str}, 'bz_server': {'value': ' 'validator': str}, 'bz_server_rest': {'value': ' 'validator': str}, 'cache_dir': {'value': None, 'validator': _validate_none_or(validate_path)}, 'celery_config': {'value': '/etc/bodhi/celeryconfig.py', 'validator': str}, 'check_signed_builds_delay': {'value': 2, 'validator': int}, 'clean_old_composes': {'value': True, 'validator': _validate_bool}, 'container.destination_registry': {'value': 'registry.fedoraproject.org', 'validator': str}, 'container.source_registry': {'value': 'candidate-registry.fedoraproject.org', 'validator': str}, 'cors_connect_src': {'value': ' wss://hub.fedoraproject.org:9939/', 'validator': str}, 'cors_origins_ro': {'value': '*', 'validator': str}, 'cors_origins_rw': {'value': ' 'validator': str}, 'createrepo_c_config': {'value': '/etc/bodhi/createrepo_c.ini', 'validator': str}, 'critpath_pkgs': {'value': [], 'validator': _generate_list_validator()}, 'critpath.jsonpath': {'value': '/etc/bodhi/critpath', 'validator': str}, 'critpath.min_karma': {'value': 2, 'validator': int}, 'critpath.num_admin_approvals': {'value': 2, 'validator': int}, 'critpath.stable_after_days_without_negative_karma': {'value': 14, 'validator': int}, 'critpath.type': {'value': None, 'validator': _validate_none_or(str)}, 'default_email_domain': {'value': 'fedoraproject.org', 'validator': str}, 'disable_automatic_push_to_stable': {'value': 'Bodhi is disabling automatic push to stable due to negative karma. The maintainer may push manually if they determine that the issue is not severe.', 'validator': str}, 'dogpile.cache.arguments.filename': {'value': '/var/cache/bodhi-dogpile-cache.dbm', 'validator': str}, 'dogpile.cache.backend': {'value': 'dogpile.cache.dbm', 'validator': str}, 'dogpile.cache.expiration_time': {'value': 100, 'validator': int}, 'exclude_mail': {'value': [], 'validator': _generate_list_validator()}, 'file_url': {'value': ' 'validator': str}, 'fmn_url': {'value': ' 'validator': str}, 'important_groups': {'value': ['proventesters', 'provenpackager', 'releng', 'security_respons', 'packager', 'bodhiadmin'], 'validator': _generate_list_validator()}, 'initial_bug_msg': {'value': '{update_alias} ({update_beauty_title}) has been submitted as an update to {update_release}.\n{update_url}', 'validator': str}, 'greenwave_api_url': {'value': ' 'validator': _validate_rstripped_str}, 'greenwave_batch_size': {'value': 8, 'validator': int}, 'waiverdb_api_url': {'value': ' 'validator': _validate_rstripped_str}, 'waiverdb.access_token': {'value': None, 'validator': _validate_none_or(str)}, 'koji_web_url': {'value': ' 'validator': _validate_tls_url}, 'koji_hub': {'value': ' 'validator': str}, 'krb_ccache': {'value': None, 'validator': _validate_none_or(str)}, 'krb_keytab': {'value': None, 'validator': _validate_none_or(str)}, 'krb_principal': {'value': None, 'validator': _validate_none_or(str)}, 'legal_link': {'value': '', 'validator': str}, 'libravatar_dns': {'value': False, 'validator': _validate_bool}, 'libravatar_enabled': {'value': True, 'validator': _validate_bool}, 'libravatar_prefer_tls': {'value': True, 'validator': bool}, 'mail.templates_basepath': {'value': 'bodhi.server:email/templates/', 'validator': str}, 'mako.directories': {'value': 'bodhi.server:templates', 'validator': str}, 'mandatory_packager_groups': {'value': ['packager'], 'validator': _generate_list_validator()}, 'qa_groups': {'value': ['fedora-ci-users'], 'validator': _generate_list_validator()}, 'compose_dir': {'value': None, 'validator': _validate_none_or(str)}, 'compose_stage_dir': {'value': None, 'validator': _validate_none_or(str)}, 'max_concurrent_composes': {'value': 2, 'validator': int}, 'message_id_email_domain': {'value': 'admin.fedoraproject.org', 'validator': str}, 'not_yet_tested_epel_msg': {'value': 'This update has not yet met the minimum testing requirements defined in the <a href=" Update Policy</a>', 'validator': str}, 'not_yet_tested_msg': {'value': 'This update has not yet met the minimum testing requirements defined in the <a href=" Update Acceptance Criteria</a>', 'validator': str}, 'openid.provider': {'value': ' 'validator': str}, 'openid.sreg_required': {'value': 'email nickname', 'validator': str}, 'openid.success_callback': {'value': 'bodhi.server.auth.utils:remember_me', 'validator': str}, 'openid.url': {'value': ' 'validator': str}, 'openid_template': {'value': '{username}.id.fedoraproject.org', 'validator': str}, 'oidc.fedora.client_id': {'value': '', 'validator': str}, 'oidc.fedora.client_secret': {'value': '', 'validator': str}, 'oidc.fedora.server_metadata_url': {'value': ' 'validator': str}, 'pagure_namespaces': {'value': 'rpm:rpms, module:modules, container:container, flatpak:flatpaks', 'validator': _generate_dict_validator}, 'pagure_flatpak_main_branch': {'value': 'stable', 'validator': str}, 'pagure_module_main_branch': {'value': 'master', 'validator': str}, 'pagure_url': {'value': ' 'validator': _validate_tls_url}, 'privacy_link': {'value': '', 'validator': str}, 'pungi.basepath': {'value': '/etc/bodhi', 'validator': str}, 'pungi.cmd': {'value': '/usr/bin/pungi-koji', 'validator': str}, 'pungi.conf.module': {'value': 'pungi.module.conf', 'validator': str}, 'pungi.conf.rpm': {'value': 'pungi.rpm.conf', 'validator': str}, 'pungi.extracmdline': {'value': [], 'validator': _generate_list_validator()}, 'pungi.labeltype': {'value': 'Update', 'validator': str}, 'query_wiki_test_cases': {'value': False, 'validator': _validate_bool}, 'release_team_address': {'value': 'bodhiadmin-', 'validator': str}, 'session.secret': {'value': 'CHANGEME', 'validator': _validate_secret}, 'skopeo.cmd': {'value': '/usr/bin/skopeo', 'validator': str}, 'skopeo.extra_copy_flags': {'value': '', 'validator': str}, 'smtp_server': {'value': None, 'validator': _validate_none_or(str)}, 'sqlalchemy.url': {'value': 'postgresql://localhost/bodhi', 'validator': str}, 'stable_bug_msg': {'value': '{update_alias} ({update_beauty_title}) has been pushed to the {repo} repository.\nIf problem still persists, please make note of it in this bug report.', 'validator': str}, 'stats_blacklist': {'value': ['bodhi', 'anonymous'], 'validator': _generate_list_validator()}, 'system_users': {'value': ['bodhi'], 'validator': _generate_list_validator()}, 'test_case_base_url': {'value': ' 'validator': str}, 'testing_approval_msg': {'value': 'This update can be pushed to stable now if the maintainer wishes', 'validator': str}, 'testing_bug_epel_msg': {'value': '{update_alias} has been pushed to the {repo} repository.\n{install_instructions}\nYou can provide feedback for this update here: {update_url}\n\nSee also for more information on how to test updates.', 'validator': str}, 'testing_bug_msg': {'value': '{update_alias} has been pushed to the {repo} repository.\n{install_instructions}\nYou can provide feedback for this update here: {update_url}\n\nSee also for more information on how to test updates.', 'validator': str}, 'top_testers_timeframe': {'value': 7, 'validator': int}, 'test_gating.required': {'value': False, 'validator': _validate_bool}, 'test_gating.url': {'value': '', 'validator': str}, 'update_notes_maxlength': {'value': 10000, 'validator': int}, 'updateinfo_rights': {'value': 'Copyright (C) {} Red Hat, Inc. and others.'.format(datetime.now().year), 'validator': str}, 'wait_for_repo_sig': {'value': False, 'validator': _validate_bool}, 'warm_cache_on_start': {'value': True, 'validator': _validate_bool}, 'wiki_url': {'value': ' 'validator': str}, 'wiki_user_agent': {'value': 'FedoraLinuxBodhi-User-Agent', 'validator': str}}
def __getitem__(self, key: typing.Hashable) -> typing.Any:
if (not self.loaded):
self.load_config()
return super(BodhiConfig, self).__getitem__(key)
def get(self, *args, **kw) -> typing.Any:
if (not self.loaded):
self.load_config()
return super(BodhiConfig, self).get(*args, **kw)
def pop(self, *args, **kw) -> typing.Any:
if (not self.loaded):
self.load_config()
return super(BodhiConfig, self).pop(*args, **kw)
def copy(self) -> typing.Any:
if (not self.loaded):
self.load_config()
return super(BodhiConfig, self).copy()
def load_config(self, settings: typing.Mapping=None):
self._load_defaults()
configfile = get_configfile()
if settings:
self.update(settings)
else:
self.update(get_appsettings(configfile))
self.loaded = True
self._validate()
def clear(self):
super().clear()
self.loaded = False
def _load_defaults(self):
for (k, v) in self._defaults.items():
self[k] = v['value']
def _validate(self):
errors = []
for k in self._defaults.keys():
try:
self[k] = self._defaults[k]['validator'](self[k])
except ValueError as e:
errors.append('\t{}: {}'.format(k, str(e)))
if errors:
raise ValueError('Invalid config values were set: \n{}'.format('\n'.join(errors))) |
def update_chatroom(self, userName, detailedMember=False):
if (not isinstance(userName, list)):
userName = [userName]
url = ('%s/webwxbatchgetcontact?type=ex&r=%s' % (self.loginInfo['url'], int(time.time())))
headers = {'ContentType': 'application/json; charset=UTF-8', 'User-Agent': self.user_agent}
data = {'BaseRequest': self.loginInfo['BaseRequest'], 'Count': len(userName), 'List': [{'UserName': u, 'ChatRoomId': ''} for u in userName]}
chatroomList = json.loads(self.s.post(url, data=json.dumps(data), headers=headers).content.decode('utf8', 'replace')).get('ContactList')
if (not chatroomList):
return ReturnValue({'BaseResponse': {'ErrMsg': 'No chatroom found', 'Ret': (- 1001)}})
if detailedMember:
def get_detailed_member_info(encryChatroomId, memberList):
url = ('%s/webwxbatchgetcontact?type=ex&r=%s' % (self.loginInfo['url'], int(time.time())))
headers = {'ContentType': 'application/json; charset=UTF-8', 'User-Agent': self.user_agent}
data = {'BaseRequest': self.loginInfo['BaseRequest'], 'Count': len(memberList), 'List': [{'UserName': member['UserName'], 'EncryChatRoomId': encryChatroomId} for member in memberList]}
return json.loads(self.s.post(url, data=json.dumps(data), headers=headers).content.decode('utf8', 'replace'))['ContactList']
MAX_GET_NUMBER = 50
for chatroom in chatroomList:
totalMemberList = []
for i in range(int(((len(chatroom['MemberList']) / MAX_GET_NUMBER) + 1))):
memberList = chatroom['MemberList'][(i * MAX_GET_NUMBER):((i + 1) * MAX_GET_NUMBER)]
totalMemberList += get_detailed_member_info(chatroom['EncryChatRoomId'], memberList)
chatroom['MemberList'] = totalMemberList
update_local_chatrooms(self, chatroomList)
r = [self.storageClass.search_chatrooms(userName=c['UserName']) for c in chatroomList]
return (r if (1 < len(r)) else r[0]) |
class BokehChart():
_FORMATTERS = {'clock': "var v=tick, n=-1;if (v>=60) return ((v/60)|0)+':'+(+(100+v%60).toFixed(2)+'').substring(1);for (; v && Math.abs(v)<0.5; ++n) v*=1e3;return v && +v.toPrecision(5)+('mnp'[n]||'');", 'metric': "var v=tick, n=0;for (; v && Math.abs(v)>=1e3; n++) v/=1e3;for (; v && Math.abs(v)<0.5; n--) v*=1e3;return +v.toPrecision(5) + (n ? 'pnm_kMG'[n+4] : '');"}
def __init__(self, data, opts, rollover=None):
import bokeh.io, bokeh.plotting, bokeh.models, bokeh.models.tools
self.xy_mode = opts.pop('xy_mode', False)
xscale = opts.pop('xscale', 'clock')
yscale = opts.pop('yscale', 'metric')
if (xscale not in self._FORMATTERS):
opts.setdefault('x_axis_type', xscale)
if (yscale not in self._FORMATTERS):
opts.setdefault('y_axis_type', yscale)
if (type(data).__name__ == 'Frames'):
if self.xy_mode:
xscale = yscale
opts.setdefault('tools', 'pan,wheel_zoom,zoom_in,zoom_out,box_zoom,save,reset')
opts.setdefault('active_multi', 'box_zoom')
opts.setdefault('width', 250)
opts.setdefault('height', 250)
opts.setdefault('xlabel', data[0].name)
opts.setdefault('ylabel', data[1].name)
lines = [{'name': 'XY', 'x': data[0].y(), 'y': data[1].y(), 'xlim': data[0].ylim, 'ylim': data[1].ylim}]
else:
lines = data.to_dict()
else:
lines = data
labels = opts.pop('label', [d.get('name', str(i)) for (i, d) in enumerate(lines)])
opts.setdefault('frame_width', opts.pop('width', 600))
opts.setdefault('frame_height', opts.pop('height', 250))
opts.setdefault('lod_interval', 0)
opts.setdefault('x_axis_label', opts.pop('xlabel', None))
opts.setdefault('y_axis_label', opts.pop('ylabel', None))
opts.setdefault('color', ('#1f77b4', '#ff7f0e', '#ff0e7f'))
opts.setdefault('active_inspect', None)
opts.setdefault('active_drag', None)
opts.setdefault('active_multi', 'xbox_zoom')
opts.setdefault('tools', 'xpan,xwheel_zoom,xzoom_in,xzoom_out,xbox_zoom,crosshair,save,reset')
opts.setdefault('legend_label', labels)
opts.setdefault('output_backend', 'canvas')
axe_kw = set('alpha,color,muted,visible,legend_field,legend_group,legend_label'.split(','))
fig_opts = {k: v for (k, v) in opts.items() if (not ((k in axe_kw) or k.startswith('line_')))}
axe_opts = [{k: v[i] for (k, v) in opts.items() if (k not in fig_opts)} for i in range(len(lines))]
p = bokeh.plotting.Figure(**fig_opts)
p.grid.grid_line_alpha = 0.5
p.toolbar.logo = None
ds = bokeh.models.ColumnDataSource(data={})
y_range_name = 'default'
y_range = p.y_range
for (i, line) in enumerate(lines):
xs = _items(line['x'])
ys = _items(line['y'])
xlim = line.get('xlim')
ylim = line.get('ylim')
ds.data['x'] = xs
ds.data[labels[i]] = ys
if (not xlim):
xlim = (((xs[0] - 1e-09), (xs[(- 1)] + 1e-09)) if (len(xs) > 1) else (0, None))
(p.x_range.start, p.x_range.end) = p.x_range.bounds = xlim
if ylim:
if ((i > 0) and ((y_range.start != ylim[0]) or (y_range.end != ylim[1]))):
y_range_name = ('y_range_' + str(i))
y_range = p.extra_y_ranges[y_range_name] = bokeh.models.DataRange1d()
p.add_layout(bokeh.models.LinearAxis(y_range_name=y_range_name), 'right')
(y_range.start, y_range.end) = ylim
pl = p.line('x', labels[i], source=ds, y_range_name=y_range_name, **axe_opts[i])
y_range.renderers += (pl,)
for ax in p.xaxis:
ax.ticker.desired_num_ticks = 10
formatter = self._FORMATTERS.get(xscale)
if formatter:
ax.minor_tick_line_color = None
ax.formatter = bokeh.models.FuncTickFormatter(code=formatter)
for ax in p.yaxis:
ax.ticker.desired_num_ticks = 10
formatter = self._FORMATTERS.get(yscale)
if formatter:
ax.minor_tick_line_color = None
ax.formatter = bokeh.models.FuncTickFormatter(code=formatter)
lg = p.legend[0]
lg.location = 'left'
lg.click_policy = 'hide'
lg.orientation = 'horizontal'
lg.margin = 0
lg.padding = 2
lg.spacing = 30
lg.border_line_width = 0
p.add_layout(lg, 'above')
p.select(type=bokeh.models.ZoomInTool).factor = 0.5
p.select(type=bokeh.models.ZoomOutTool).factor = 1
p.select(type=bokeh.models.ZoomOutTool).maintain_focus = False
p.select(type=bokeh.models.WheelZoomTool).maintain_focus = False
self.figure = p
self.handle = None
self.labels = labels
self.data_source = ds
self.rollover = rollover
def __call__(self, source):
self.update(source)
def show(self):
import bokeh.io
if _is_notebook():
bokeh.io.output_notebook(hide_banner=True)
self.handle = bokeh.io.show(self.figure, notebook_handle=True)
else:
self.handle = bokeh.io.show(self.figure)
return self
def update(self, source):
import bokeh.io
source_cls = type(source)
if (source_cls is tuple):
data = {(self.labels[(i - 1)] if i else 'x'): _items(item) for (i, item) in enumerate(source)}
elif (source_cls is list):
data = {}
for (i, frame) in enumerate(source):
data['x'] = _items(frame['x'])
data[frame['name']] = _items(frame['y'])
elif (source_cls is dict):
data = source
elif (source_cls.__name__ == 'Frames'):
if self.xy_mode:
data = {'x': source.ch1.y(), self.labels[0]: source.ch2.y()}
else:
data = {'x': source.x()}
for (i, frame) in enumerate(source):
data[self.labels[i]] = frame.y()
elif (source_cls.__name__ == 'DataFrame'):
data = {'x': source.index.to_numpy(dtype=np.float32)}
for (i, col) in enumerate(source):
data[self.labels[i]] = source[col].to_numpy(dtype=np.float32)
else:
raise ValueError('Invalid argument source')
if (0 < len(data['x']) < 10):
self.data_source.stream(data, self.rollover)
else:
self.data_source.data = data
assert (self.handle is not None)
bokeh.io.push_notebook(handle=self.handle) |
def install_parser(subparser):
subparser.set_defaults(func=install)
subparser.add_argument('-pkg', '--packages', required=True, help='The packages list comma separated')
subparser.add_argument('-p', '--path', help='The UI project path')
subparser.add_argument('-f', '--force', help='Y / N flag to Force the package update') |
def stub_legacy(wordmap):
tn = int(wordmap['kotus_tn'])
if ((tn in range(1, 5)) or (tn in [8, 21, 22, 32, 48])):
wordmap['stub'] = wordmap['stub']
if (tn in [5, 6]):
if wordmap['extra_i']:
wordmap['stub'] = wordmap['stub'][:(- 1)]
else:
wordmap['stub'] = wordmap['stub']
elif ((tn in [7, 16]) or (tn in range(33, 38)) or (tn in range(39, 47))):
wordmap['stub'] = wordmap['stub'][:(- 1)]
elif (tn in range(9, 16)):
wordmap['stub'] = wordmap['stub'][:(- 1)]
elif (tn in [16, 23, 24, 26]):
wordmap['stub'] = wordmap['stub'].rstrip('i')
elif ((tn in range(17, 19)) or (tn == 20)):
wordmap['stub'] = wordmap['stub'][:(- 1)]
elif (tn == 19):
wordmap['stub'] = wordmap['stub'][:(- 2)]
elif (tn in [25, 27]):
wordmap['stub'] = wordmap['stub'][:(- 2)]
elif (tn == 28):
wordmap['stub'] = wordmap['stub'][:(- 2)]
elif (tn in [29, 30, 31, 38]):
wordmap['stub'] = wordmap['stub'][:(- 3)]
elif (tn == 47):
wordmap['stub'] = wordmap['stub'][:(- 2)]
elif (tn == 49):
if wordmap['stub'].endswith('e'):
wordmap['extra_e'] = True
elif (tn in [52, 78]):
wordmap['stub'] = wordmap['stub'][:(- 1)]
elif (tn in [53, 56, 77]):
wordmap['stub'] = wordmap['stub'][:(- 2)]
elif (tn == 54):
if wordmap['kotus_av']:
wordmap['stub'] = wordmap['stub'][:(- 7)]
else:
wordmap['stub'] = wordmap['stub'][:(- 3)]
elif (tn in [55, 57, 76]):
if wordmap['kotus_av']:
wordmap['stub'] = wordmap['stub'][:(- 7)]
else:
wordmap['stub'] = wordmap['stub'][:(- 3)]
elif (tn == 58):
wordmap['stub'] = wordmap['stub'][:(- 2)]
elif (tn == 59):
if wordmap['kotus_av']:
wordmap['stub'] = wordmap['stub'][:(- 7)]
else:
wordmap['stub'] = wordmap['stub'][:(- 3)]
elif (tn == 60):
if wordmap['kotus_av']:
wordmap['stub'] = wordmap['stub'][:(- 8)]
else:
wordmap['stub'] = wordmap['stub'][:(- 4)]
elif (tn == 61):
wordmap['stub'] = wordmap['stub'][:(- 2)]
elif (tn in [62, 68]):
wordmap['stub'] = wordmap['stub'][:(- 3)]
elif (tn in [62, 63, 65]):
wordmap['stub'] = wordmap['stub'][:(- 3)]
elif (tn == 64):
wordmap['stub'] = wordmap['stub'][:(- 4)]
elif (tn in [66, 67, 1067, 69, 72, 73, 74, 75, 77]):
wordmap['stub'] = wordmap['stub'][:(- 2)]
elif (tn in [70, 71, 71]):
wordmap['stub'] = wordmap['stub'][:(- 3)]
elif (tn == 1007):
wordmap['stub'] = wordmap['stub'][:(- 1)]
elif (tn in [1010, 1009]):
if wordmap['kotus_av']:
wordmap['stub'] = wordmap['stub'][:(- 7)]
else:
wordmap['stub'] = wordmap['stub'][:(- 3)]
elif (tn in [1024, 1026]):
wordmap['stub'] = wordmap['stub'][:(- 1)]
return wordmap |
class vfp_class_id(bsn_tlv):
type = 107
def __init__(self, value=None):
if (value != None):
self.value = value
else:
self.value = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!H', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.value))
length = sum([len(x) for x in packed])
packed[1] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = vfp_class_id()
_type = reader.read('!H')[0]
assert (_type == 107)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.value = reader.read('!L')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.value != other.value):
return False
return True
def pretty_print(self, q):
q.text('vfp_class_id {')
with q.group():
with q.indent(2):
q.breakable()
q.text('value = ')
q.text(('%#x' % self.value))
q.breakable()
q.text('}') |
def test_expanding_sum_single_var_periods(df_time):
expected_results = {'ambient_temp_expanding_sum': [np.nan, np.nan, 31.31, 62.82, 94.97, 127.36, 159.98, 192.48, 225.0, 257.68, 291.44, 325.57, 359.65, 393.35, 427.24]}
expected_df = df_time.copy()
expected_df['ambient_temp_expanding_sum'] = expected_results['ambient_temp_expanding_sum']
transformer = ExpandingWindowFeatures(variables=['ambient_temp'], functions='sum', periods=2)
df_tr = transformer.fit_transform(df_time)
assert_frame_equal(df_tr, expected_df) |
def tile_title(tilename, tiledata, grid_x, grid_y, grid):
title = [tilename]
segdata = None
if ('segment' in tiledata):
segdata = grid[tilename]['bits'].get('CLB_IO_CLK', None)
title.append(tiledata['segment'])
title.append(('GRID_POSITION: %d %d' % (grid_x, grid_y)))
if ('sites' in tiledata):
for (sitename, sitetype) in tiledata['sites'].items():
title.append(('%s site: %s' % (sitetype, sitename)))
if segdata:
if ('baseaddr' in segdata):
title.append(('Baseaddr: %s' % segdata['baseaddr']))
else:
print(('Warning: no baseaddr in segment %s (via tile %s).' % (tiledata['segment'], tilename)))
return title |
def log_null_device_setting(data, fos):
vdom = data['vdom']
log_null_device_setting_data = data['log_null_device_setting']
filtered_data = underscore_to_hyphen(filter_log_null_device_setting_data(log_null_device_setting_data))
return fos.set('log.null-device', 'setting', data=filtered_data, vdom=vdom) |
class Sensor(GenericSensor):
SENSOR_SCHEMA: CerberusSchemaType = {'type': dict(type='string', required=False, default='temperature', allowed=['temperature', 'humidity', 'pressure']), 'oversampling': dict(type='string', required=False, allowed=['none', '1x', '2x', '4x', '8x', '16x'])}
def setup_module(self) -> None:
from smbus2 import SMBus
import bme680
self.i2c_addr: int = self.config['chip_addr']
self.i2c_device = SMBus(self.config['i2c_bus_num'])
self.sensor = bme680.BME680(self.i2c_addr, self.i2c_device)
self.oversampling_map = {'none': bme680.OS_NONE, '1x': bme680.OS_1X, '2x': bme680.OS_2X, '4x': bme680.OS_4X, '8x': bme680.OS_8X, '16x': bme680.OS_16X}
def setup_sensor(self, sens_conf: ConfigType) -> None:
sens_type: str = sens_conf['type']
if ('oversampling' in sens_conf):
set_oversampling = getattr(self.sensor, f'set_{sens_type}_oversample')
set_oversampling(self.oversampling_map[sens_conf['oversampling']])
def get_value(self, sens_conf: ConfigType) -> SensorValueType:
sens_type = sens_conf['type']
if (not self.sensor.get_sensor_data()):
return None
return cast(float, dict(temperature=self.sensor.data.temperature, humidity=self.sensor.data.humidity, pressure=self.sensor.data.pressure)[sens_type]) |
_admin_required
def SubscriptionManageGenerateAllBills(request, location_slug, subscription_id):
subscription = get_object_or_404(Subscription, pk=subscription_id)
subscription.generate_all_bills()
messages.add_message(request, messages.INFO, 'Bills up to the current period were generated.')
return HttpResponseRedirect(reverse('subscription_manage_detail', args=(location_slug, subscription.id))) |
class GptsPlanStorage(StorageItem):
conv_id: str
sub_task_num: int
sub_task_content: Optional[str]
sub_task_title: Optional[str] = None
sub_task_agent: Optional[str] = None
resource_name: Optional[str] = None
rely: Optional[str] = None
agent_model: Optional[str] = None
retry_times: Optional[int] = 0
max_retry_times: Optional[int] = 5
state: Optional[str] = Status.TODO.value
result: Optional[str] = None
_identifier: GptsPlanIdentifier = dataclasses.field(init=False)
def from_dict(d: Dict[(str, Any)]):
return GptsPlanStorage(conv_id=d.get('conv_id'), sub_task_num=d['sub_task_num'], sub_task_content=d['sub_task_content'], sub_task_agent=d['sub_task_agent'], resource_name=d['resource_name'], rely=d['rely'], agent_model=d['agent_model'], retry_times=d['retry_times'], max_retry_times=d['max_retry_times'], state=d['state'], result=d['result'])
def to_dict(self) -> Dict[(str, Any)]:
return dataclasses.asdict(self)
def _check(self):
if (self.conv_id is None):
raise ValueError('conv_id cannot be None')
if (self.sub_task_num is None):
raise ValueError('sub_task_num cannot be None')
if (self.sub_task_content is None):
raise ValueError('sub_task_content cannot be None')
if (self.state is None):
raise ValueError('state cannot be None')
def identifier(self) -> GptsPlanIdentifier:
return self._identifier
def merge(self, other: 'StorageItem') -> None:
if (not isinstance(other, GptsPlanStorage)):
raise ValueError(f'Cannot merge {type(other)} into {type(self)} because they are not the same type.')
self.from_object(other) |
def functionPreambleExpressionForSelf():
import re
arch = currentArch()
expressionForSelf = None
if (arch == 'i386'):
expressionForSelf = '*(id*)($esp+4)'
elif (arch == 'x86_64'):
expressionForSelf = '(id)$rdi'
elif (arch == 'arm64'):
expressionForSelf = '(id)$x0'
elif re.match('^armv.*$', arch):
expressionForSelf = '(id)$r0'
return expressionForSelf |
class TestProjectForking(CoprsTestCase):
def test_create_object(self, f_users, f_coprs, f_mock_chroots, f_builds, f_db):
forking = ProjectForking(self.u1)
o1 = FooModel(x=1, y=2, z=3)
o2 = forking.create_object(FooModel, o1)
assert (o2.x == o1.x == 1)
assert (o2.y == o1.y == 2)
assert (o2.z == o1.z == 3)
o3 = forking.create_object(FooModel, o1, exclude=['z'])
assert (o3.x == o1.x == 1)
assert (o3.y == o1.y == 2)
assert (o3.z != o1.z)
assert (not o3.z)
def test_fork_build(self, f_users, f_coprs, f_mock_chroots, f_builds, f_db):
forking = ProjectForking(self.u1)
fb1 = forking.fork_build(self.b1, self.c2, self.p2, self.b1.build_chroots)
assert (fb1.id != self.b1.id)
assert (fb1.state == 'forked')
assert (len(self.b1.build_chroots) == len(fb1.build_chroots))
(ch, fch) = (self.b1.build_chroots[0], fb1.build_chroots[0])
assert (ch.build_id != fch.build_id)
assert (ch.git_hash == fch.git_hash)
assert (ch.started_on == fch.started_on)
assert (ch.ended_on == fch.ended_on)
assert (ch.mock_chroot_id == fch.mock_chroot_id)
.usefixtures('f_copr_chroots_assigned_finished')
def test_fork_check_assigned_copr_chroot(self):
_side_effects = self
assert (len(self.c2.copr_chroots) == 2)
assert (self.b1.copr != self.c2)
new_cch = CoprChrootsLogic.create_chroot(user=self.u1, copr=self.c2, mock_chroot=self.b1_bc[0].mock_chroot)
self.db.session.add(new_cch)
forking = ProjectForking(self.u1)
fork_b = forking.fork_build(self.b1, self.c2, self.p2, self.b1.build_chroots)
assert (len(new_cch.build_chroots) == 1)
assert (fork_b.build_chroots == new_cch.build_chroots)
def test_fork_package(self, f_users, f_coprs, f_mock_chroots, f_builds, f_db):
forking = ProjectForking(self.u1)
fp1 = forking.fork_package(self.p1, self.c2)
assert (fp1.id != self.p1.id)
assert (fp1.name == self.p1.name)
def test_fork_copr(self, f_users, f_coprs, f_mock_chroots, f_builds, f_db):
with mock.patch('flask.g') as mc_flask_g:
mc_flask_g.user.name = self.u2.name
forking = ProjectForking(self.u1)
fc1 = forking.fork_copr(self.c1, 'new-name')
assert (fc1.id != self.c1.id)
assert (fc1.name == 'new-name')
assert (fc1.forked_from_id == self.c1.id)
assert (fc1.mock_chroots == self.c1.mock_chroots)
('u2')
.usefixtures('f_users', 'f_coprs', 'f_mock_chroots', 'f_builds', 'f_db')
def test_forking_into_existing_project(self):
self.db.session.add_all([self.c1, self.c3, self.u1, self.u2])
src_copr = self.c1
src_user = self.u1
dest_copr = self.c3
dest_user = self.u2
assert (len(dest_copr.builds) == 0)
data = {'name': dest_copr.name, 'ownername': dest_user.name, 'source': '{0}/{1}'.format(dest_user.name, dest_copr.name)}
self.tc.post('/coprs/{0}/{1}/fork/'.format(src_user.name, src_copr.name), data=data)
dest_copr = models.Copr.query.filter_by(id=dest_copr.id).one()
assert (len(dest_copr.builds) == 0)
data['confirm'] = 'y'
self.tc.post('/coprs/{0}/{1}/fork/'.format(src_user.name, src_copr.name), data=data)
dest_copr = models.Copr.query.filter_by(id=dest_copr.id).one()
assert (len(dest_copr.builds) == 1)
def test_copr_by_repo_safe(self, f_users, f_coprs, f_mock_chroots, f_builds, f_db):
assert (ComplexLogic.get_copr_by_repo('xxx') is None)
assert (ComplexLogic.get_copr_by_repo('copr://') is None)
assert (ComplexLogic.get_copr_by_repo('copr://a/b/c') is None)
assert (ComplexLogic.get_copr_by_repo('copr://user1/foocopr') is not None)
assert (ComplexLogic.get_copr_by_repo('copr:///user1/foocopr') is None)
assert (ComplexLogic.get_copr_by_repo('copr://user1//foocopr') is None)
.usefixtures('f_users', 'f_coprs', 'f_mock_chroots', 'f_builds', 'f_db')
def test_generate_build_config_with_dep_mistake(self):
bcl = BuildConfigLogic
main_repo = {'id': 'copr_base', 'name': 'Copr repository', 'baseurl': ' 'priority': None}
build_config = bcl.generate_build_config(self.c1, 'fedora-18-x86_64')
assert (build_config['repos'] == [main_repo])
self.c1.repos = 'copr://non/existing'
build_config = bcl.generate_build_config(self.c1, 'fedora-18-x86_64')
assert (len(build_config['repos']) == 2)
assert (build_config['repos'][1]['id'] == 'copr_non_existing') |
def update_linker(linker, examples):
labeled_pairs = {'distinct': [], 'match': []}
for e in examples:
record_a = {}
record_b = {}
for field in e['fields']:
record_a[field['name']] = field['a_value']
record_b[field['name']] = field['b_value']
record_pair = (record_a, record_b)
if (e['answer'] == 'accept'):
labeled_pairs['match'].append(record_pair)
elif (e['answer'] == 'reject'):
labeled_pairs['distinct'].append(record_pair)
linker.markPairs(labeled_pairs)
return linker |
def get_trees(text, parser=None):
if (not re.match('^#NEXUS\\s*\\n', text, flags=re.I)):
raise NexusError('text does not start with "#NEXUS"')
commands = get_section(text, 'TREES')
translate = {}
if ('TRANSLATE' in commands):
if (len(commands['TRANSLATE']) != 1):
raise NexusError('multiple TRANSLATE commands')
pairs = commands['TRANSLATE'][0].split(',')
translate.update((pair.split(maxsplit=1) for pair in pairs))
trees = {}
for command in commands.get('TREE', []):
(name_ugly, newick_ugly) = command.split('=', maxsplit=1)
name = name_ugly.strip('\t\r\n "\'')
newick = (newick_ugly.strip() + ';')
if newick.startswith('['):
newick = newick[(newick.find(']') + 1):].strip()
trees[name] = apply_translations(translate, newick, parser)
return trees |
class Plugin(plugin.PluginProto):
PLUGIN_ID = 522
PLUGIN_NAME = 'Image - OpenCV RTSP stream To JPEG'
PLUGIN_VALUENAME1 = 'jpeg'
def __init__(self, taskindex):
plugin.PluginProto.__init__(self, taskindex)
self.dtype = rpieGlobals.DEVICE_TYPE_DUMMY
self.vtype = rpieGlobals.SENSOR_TYPE_NONE
self.readinprogress = 0
self.valuecount = 0
self.senddataoption = False
self.timeroption = False
self.timeroptional = True
self.formulaoption = False
self._nextdataservetime = 0
self.lastread = 0
self.videostream = None
self.lastinit = 0
def plugin_init(self, enableplugin=None):
plugin.PluginProto.plugin_init(self, enableplugin)
self.uservar[0] = 0
if (enableplugin is None):
try:
if self.videostream:
del self.videostream
except:
pass
self.initialized = False
self.readinprogress = 0
if self.enabled:
if self.taskdevicepluginconfig[4]:
os.environ['OPENCV_FFMPEG_CAPTURE_OPTIONS'] = 'rtsp_transport;udp'
rtsp_stream_link = str(self.taskdevicepluginconfig[0])
try:
self.videostream = VideoGrab(rtsp_stream_link)
self.initialized = True
except Exception as e:
pass
if (self.videostream is None):
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, 'Videostream can not be initialized! ')
return False
elif ((time.time() - self.lastinit) > 10):
self.capture_start()
self.lastinit = time.time()
else:
try:
self.capture_stop()
del self.videostream
except:
pass
def webform_load(self):
webserver.addFormTextBox('RTSP stream', 'plugin_522_url', str(self.taskdevicepluginconfig[0]), 255)
webserver.addFormNote('Specify the full URL to access stream, with password if needed')
webserver.addFormCheckBox('Enable resize', 'plugin_522_resize', self.taskdevicepluginconfig[1])
webserver.addFormNumericBox('Width to resize', 'plugin_522_w', self.taskdevicepluginconfig[2], 0, 4096)
webserver.addFormNumericBox('Height to resize', 'plugin_522_h', self.taskdevicepluginconfig[3], 0, 2160)
webserver.addFormNote('Resize is a bit resource hungry, use only if really needed')
webserver.addFormCheckBox('Force FFMPEG UDP', 'plugin_522_udp', self.taskdevicepluginconfig[4])
webserver.addFormNote('Certain cheap cameras only knows UDP, and OpenCV >3.0 defaults to TCP. OpenCV >4.0 accepts override.')
webserver.addFormNote("In case the installed opencv is too old, upgrade manually: 'sudo apt remove python3-opencv && sudo pip3 install opencv-python'")
try:
if (self.initialized and self.enabled):
try:
pname = self.gettaskname()
except:
pname = ''
if (pname == ''):
pname = '[NAME]'
url = ('image?name=' + str(pname))
webserver.addHtml('<tr><td>Output image url:</td>')
if (pname == '[NAME]'):
webserver.addHtml(('<td> + pname))
else:
webserver.addHtml((((("<td><a href='" + url) + "'>/") + url) + '</a></td></tr>'))
except:
pass
return True
def webform_save(self, params):
self.capture_stop()
self.taskdevicepluginconfig[0] = webserver.arg('plugin_522_url', params)
self.taskdevicepluginconfig[1] = (webserver.arg('plugin_522_resize', params) == 'on')
self.taskdevicepluginconfig[2] = int(webserver.arg('plugin_522_w', params))
self.taskdevicepluginconfig[3] = int(webserver.arg('plugin_522_h', params))
self.taskdevicepluginconfig[4] = (webserver.arg('plugin_522_udp', params) == 'on')
self.capture_start(self.taskdevicepluginconfig[0])
return True
def capture_start(self, src=None):
try:
self.videostream.start(src)
except Exception as e:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ('Videostream can not be initialized! ' + str(e)))
def capture_stop(self):
try:
self.videostream.stop()
except:
pass
def plugin_exit(self):
plugin.PluginProto.plugin_exit(self)
try:
if (self.videostream is not None):
self.videostream.__exit__()
self.videostream = None
except:
pass |
class OptionPlotoptionsTilemapSonificationTracksMappingHighpass(Options):
def frequency(self) -> 'OptionPlotoptionsTilemapSonificationTracksMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsTilemapSonificationTracksMappingHighpassFrequency)
def resonance(self) -> 'OptionPlotoptionsTilemapSonificationTracksMappingHighpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsTilemapSonificationTracksMappingHighpassResonance) |
class AbstractObject(collections_abc.MutableMapping):
_default_read_fields = []
_field_types = {}
class Field():
pass
def __init__(self):
self._data = {}
self._field_checker = TypeChecker(self._field_types, self._get_field_enum_info())
def __getitem__(self, key):
return self._data[str(key)]
def __setitem__(self, key, value):
if key.startswith('_'):
self.__setattr__(key, value)
else:
self._data[key] = self._field_checker.get_typed_value(key, value)
return self
def __eq__(self, other):
return ((other is not None) and hasattr(other, 'export_all_data') and (self.export_all_data() == other.export_all_data()))
def __delitem__(self, key):
del self._data[key]
def __iter__(self):
return iter(self._data)
def __len__(self):
return len(self._data)
def __contains__(self, key):
return (key in self._data)
def __unicode__(self):
return unicode(self._data)
def __repr__(self):
return ('<%s> %s' % (self.__class__.__name__, json.dumps(self.export_value(self._data), sort_keys=True, indent=4, separators=(',', ': '))))
def _set_data(self, data):
if hasattr(data, 'items'):
for (key, value) in data.items():
self[key] = value
else:
raise FacebookBadObjectError('Bad data to set object data')
self._json = data
def _get_field_enum_info(cls):
return {}
def get_endpoint(cls):
raise NotImplementedError(('%s must have implemented get_endpoint.' % cls.__name__))
def get_default_read_fields(cls):
return cls._default_read_fields
def set_default_read_fields(cls, fields):
cls._default_read_fields = fields
def _assign_fields_to_params(cls, fields, params):
if (fields is None):
fields = cls.get_default_read_fields()
if fields:
params['fields'] = ','.join(fields)
def set_data(self, data):
self._set_data(data)
def export_value(self, data):
if isinstance(data, AbstractObject):
data = data.export_all_data()
elif isinstance(data, dict):
data = dict(((k, self.export_value(v)) for (k, v) in data.items() if (v is not None)))
elif isinstance(data, list):
data = [self.export_value(v) for v in data]
return data
def export_data(self):
return self.export_all_data()
def export_all_data(self):
return self.export_value(self._data)
def create_object(cls, api, data, target_class):
new_object = target_class(api=api)
new_object._set_data(data)
return new_object |
def test_message_params(logger):
logger.info('This is a test of %s', 'args')
assert (len(logger.client.events) == 1)
event = logger.client.events[ERROR][0]
assert ('exception' not in event)
assert ('param_message' in event['log'])
assert (event['log']['message'] == 'This is a test of args')
assert (event['log']['param_message'] == 'This is a test of %s') |
class LaunchGraze(BaseUseGraze):
card_usage = 'launch'
def process_card(self, card):
g = self.game
tgt = self.target
return g.process_action(LaunchCard(tgt, [tgt], card, GrazeAction(tgt, tgt)))
def ask_for_action_verify(self, p, cl, tl):
tgt = self.target
return LaunchCard(tgt, [tgt], cl[0], GrazeAction(tgt, tgt)).can_fire() |
class VariableRenamer():
def __init__(self, task: DecompilerTask, interference_graph: InterferenceGraph):
self.cfg = task.graph
self.interference_graph = interference_graph
self.variable_for_function_arg: Dict[(str, Variable)] = self._get_function_argument_variables(task.function_parameters)
self._add_interference_for_function_args()
self._variables_contracted_to: Dict[(Variable, List[Variable])] = {var: [var] for var in self.interference_graph}
self._contract_variables_that_need_same_name()
self.renaming_map: Dict[(Variable, Variable)] = dict()
self.new_variable_name = 'var_'
self.check_variable_name()
def check_variable_name(self):
if (self.new_variable_name in self.variable_for_function_arg):
error_message = f'We need to think of a different name than {self.new_variable_name} for the replacement variable, because an function argument has the same name.'
logging.error(error_message)
raise NameError(error_message)
def rename(self):
for instruction in self.cfg.instructions:
for variable in (instruction.requirements + instruction.definitions):
self._replace_variable_in_instruction(variable, instruction)
self._remove_redundant_assignments()
def _replace_variable_in_instruction(self, variable: Variable, instruction: Instruction) -> None:
if (variable.ssa_label is None):
return
replacement_variable = self.renaming_map[variable].copy()
replacement_variable.ssa_name = variable.copy()
instruction.substitute(variable, replacement_variable)
if isinstance(instruction, Relation):
instruction.rename(variable, replacement_variable)
def _remove_redundant_assignments(self):
for basic_block in self.cfg.nodes:
new_instructions = list()
for instruction in basic_block.instructions:
if (not isinstance(instruction, BaseAssignment)):
new_instructions.append(instruction)
elif (instruction.destination != instruction.value):
if isinstance(instruction, Relation):
raise ValueError(f'In Relation {instruction} not all variables have the same name after renaming!')
new_instructions.append(instruction)
basic_block.instructions = new_instructions
def _get_function_argument_variables(self, function_parameters: List[Variable]) -> Dict[(str, Variable)]:
function_argument_variables: Dict[(str, Variable)] = dict()
for variable in self.interference_graph.nodes:
if (variable.name in [var.name for var in function_parameters]):
if ((variable.name not in function_argument_variables.keys()) or (function_argument_variables[variable.name].ssa_label > variable.ssa_label)):
function_argument_variables[variable.name] = variable
return function_argument_variables
def _add_interference_for_function_args(self):
for (arg1, arg2) in combinations(self.variable_for_function_arg.values(), 2):
self.interference_graph.add_edge(arg1, arg2)
def _contract_variables_that_need_same_name(self) -> None:
dependency_graph = self.create_same_name_dependency_graph()
for connected_component in connected_components(dependency_graph):
connected_component = sorted(connected_component, key=attrgetter('ssa_label'))
self.interference_graph.contract_independent_set(connected_component)
self._variables_contracted_to[connected_component[0]] = connected_component
def create_same_name_dependency_graph(self):
graph = Graph()
for relation in [instruction for instruction in self.cfg.instructions if isinstance(instruction, Relation)]:
graph.add_edge(relation.destination, relation.value)
return graph
def compute_new_name_for_each_variable(self):
counter: int = 0
for variable_class in self._variable_classes_handler.variable_class.values():
(new_variable, counter) = self._new_variable_name_for(variable_class, counter)
for variable in variable_class:
if isinstance(variable, GlobalVariable):
tmp = variable.copy()
tmp.ssa_label = None
self.renaming_map[variable] = tmp
else:
self.renaming_map[variable] = new_variable
def _new_variable_name_for(self, variable_class: Set[Variable], counter: int) -> Tuple[(Variable, int)]:
if (argument_set := set(self.variable_for_function_arg.values()).intersection(variable_class)):
if (len(argument_set) > 1):
error_message = f'All input arguments should interfere, but the arguments in {argument_set} are in the same color class.'
logging.error(error_message)
raise ValueError(error_message)
argument = argument_set.pop()
new_variable = Variable(argument.name, argument.type)
else:
(variable, *_) = variable_class
new_variable = Variable(f'{self.new_variable_name}{counter}', variable.type, is_aliased=variable.is_aliased)
counter += 1
return (new_variable, counter) |
def report_atlas_overlap(df, label_data, atlas, surf_va_LR, min_percent_overlap=5):
(atlas_data, atlas_dict) = ciftify.niio.load_LR_label(atlas['path'], int(atlas['map_number']))
o_col = '{}_overlap'.format(atlas['name'])
df[o_col] = ''
for pd_idx in df.index.get_values():
df.loc[(pd_idx, o_col)] = ciftify.report.get_label_overlap_summary(pd_idx, label_data, atlas_data, atlas_dict, surf_va_LR, min_percent_overlap=min_percent_overlap)
return df |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = 'name'
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'firewall_ssh_host_key': {'required': False, 'type': 'dict', 'default': None, 'no_log': True, 'options': {}}}
for attribute_name in module_spec['options']:
fields['firewall_ssh_host_key']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['firewall_ssh_host_key']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=True)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'firewall_ssh_host_key')
(is_error, has_changed, result, diff) = fortios_firewall_ssh(module.params, fos, module.check_mode)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
class ConstFloatArg(ConstArg):
def __init__(self, value):
super().__init__(value)
def cformat(self):
return '%.17f'
def format(self):
return ('%.17f' % self.value)
def __str__(self):
return ('Float(%s)' % self.format())
def is_one(self):
return (self.value == 1.0)
def is_zero(self):
return (self.value == 0.0) |
(TCP_OPTION_KIND_SACK, 2)
class TCPOptionSACK(TCPOption):
_PACK_STR = '!BB'
_BLOCK_PACK_STR = '!II'
def __init__(self, blocks, kind=None, length=None):
super(TCPOptionSACK, self).__init__(kind, length)
self.blocks = blocks
def parse(cls, buf):
(_, length) = struct.unpack_from(cls._PACK_STR, buf)
blocks_buf = buf[2:length]
blocks = []
while blocks_buf:
lr_block = struct.unpack_from(cls._BLOCK_PACK_STR, blocks_buf)
blocks.append(lr_block)
blocks_buf = blocks_buf[8:]
return (cls(blocks, cls.cls_kind, length), buf[length:])
def serialize(self):
buf = bytearray()
for (left, right) in self.blocks:
buf += struct.pack(self._BLOCK_PACK_STR, left, right)
self.length = (self.cls_length + len(buf))
return (struct.pack(self._PACK_STR, self.kind, self.length) + buf) |
class TalkPEApi(Api):
def __init__(self, config):
super(TalkPEApi, self).__init__(config, endpoint=EndpointFactory('talk_pe'), object_type='talk_pe')
def __call__(self, *args, **kwargs):
raise ZenpyException('You cannot call this endpoint directly!')
_id(User)
def display_user(self, agent, user):
url = self._build_url(self.endpoint.display_user(agent, user))
return self._post(url, payload='')
_id(User, Ticket)
def display_ticket(self, agent, ticket):
url = self._build_url(self.endpoint.display_ticket(agent, ticket))
return self._post(url, payload='')
_id(User)
def create_ticket(self, agent, ticket):
url = self._build_url(self.endpoint.create_ticket())
payload = {'display_to_agent': (agent if agent else ''), 'ticket': ticket}
return self._post(url, payload=payload) |
class TestInfo():
def test_url_flag(self, mocked_client_class):
mocked_client_class.send_request.return_value = client_test_data.EXAMPLE_RELEASE_MUNCH
runner = testing.CliRunner()
result = runner.invoke(cli.info_release, ['--url', ' 'F27'])
assert (result.exit_code == 0)
assert (result.output == client_test_data.EXPECTED_RELEASE_OUTPUT.replace('Saved r', 'R'))
mocked_client_class.send_request.assert_called_once_with('releases/F27', verb='GET', auth=False)
def test_info_with_errors(self, mocked_client_class):
mocked_client_class.send_request.return_value = {'errors': [{'description': 'an error was encountered... :('}]}
runner = testing.CliRunner()
result = runner.invoke(cli.info_release, ['--url', ' 'F27'])
assert (result.exit_code == 1)
assert (result.output == 'ERROR: an error was encountered... :(\n') |
def compress_G2(pt: G2Uncompressed) -> G2Compressed:
if (not is_on_curve(pt, b2)):
raise ValueError('The given point is not on the twisted curve over FQ**2')
if is_inf(pt):
return G2Compressed(((POW_2_383 + POW_2_382), 0))
(x, y) = normalize(pt)
(x_re, x_im) = x.coeffs
(y_re, y_im) = y.coeffs
a_flag1 = (((y_im * 2) // q) if (y_im > 0) else ((y_re * 2) // q))
z1 = ((x_im + (a_flag1 * POW_2_381)) + POW_2_383)
z2 = x_re
return G2Compressed((z1, z2)) |
def is_homogeneous_tuple_type(t: Type[Tuple]) -> bool:
if (not is_tuple(t)):
return False
type_arguments = get_type_arguments(t)
if (not type_arguments):
return True
assert isinstance(type_arguments, tuple), type_arguments
if ((len(type_arguments) == 2) and (type_arguments[1] is Ellipsis)):
return True
return (len(set(type_arguments)) == 1) |
def prepare_bytes_decimal(data, schema):
if (not isinstance(data, decimal.Decimal)):
return data
scale = schema.get('scale', 0)
precision = schema['precision']
(sign, digits, exp) = data.as_tuple()
if (len(digits) > precision):
raise ValueError('The decimal precision is bigger than allowed by schema')
delta = (exp + scale)
if (delta < 0):
raise ValueError('Scale provided in schema does not match the decimal')
unscaled_datum = 0
for digit in digits:
unscaled_datum = ((unscaled_datum * 10) + digit)
unscaled_datum = ((10 ** delta) * unscaled_datum)
bytes_req = ((unscaled_datum.bit_length() + 8) // 8)
if sign:
unscaled_datum = (- unscaled_datum)
return unscaled_datum.to_bytes(bytes_req, byteorder='big', signed=True) |
def upgrade():
op.execute("SET TIME ZONE 'utc'")
op.alter_column('access_codes', 'created_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('access_codes', 'valid_from', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('access_codes', 'valid_till', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('activities', 'time', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.execute("UPDATE call_for_papers SET timezone = 'UTC' where timezone not in (SELECT name from pg_timezone_names)", execution_options=None)
op.execute("UPDATE events SET timezone = 'UTC' where timezone not in (SELECT name from pg_timezone_names)", execution_options=None)
op.execute("UPDATE events_version SET timezone = 'UTC' where timezone not in (SELECT name from pg_timezone_names)", execution_options=None)
op.execute('ALTER TABLE call_for_papers ALTER COLUMN start_date TYPE TIMESTAMP WITH TIME ZONE USING start_date AT TIME ZONE timezone', execution_options=None)
op.execute('ALTER TABLE call_for_papers ALTER COLUMN end_date TYPE TIMESTAMP WITH TIME ZONE USING start_date AT TIME ZONE timezone', execution_options=None)
op.alter_column('discount_codes', 'created_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('discount_codes', 'valid_from', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('discount_codes', 'valid_till', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('event_invoices', 'completed_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('event_invoices', 'created_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('events', 'created_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('events', 'deleted_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.execute('ALTER TABLE events ALTER COLUMN ends_at TYPE TIMESTAMP WITH TIME ZONE USING ends_at AT TIME ZONE timezone', execution_options=None)
op.alter_column('events', 'schedule_published_on', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.execute('ALTER TABLE events ALTER COLUMN starts_at TYPE TIMESTAMP WITH TIME ZONE USING starts_at AT TIME ZONE timezone', execution_options=None)
op.alter_column('events_version', 'deleted_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.execute('ALTER TABLE events_version ALTER COLUMN ends_at TYPE TIMESTAMP WITH TIME ZONE USING ends_at AT TIME ZONE timezone', execution_options=None)
op.execute('ALTER TABLE events_version ALTER COLUMN starts_at TYPE TIMESTAMP WITH TIME ZONE USING starts_at AT TIME ZONE timezone', execution_options=None)
op.alter_column('export_jobs', 'starts_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('import_jobs', 'starts_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('mails', 'time', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('notification', 'received_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('orders', 'completed_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('orders', 'created_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('orders', 'trashed_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('role_invites', 'created_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('sessions', 'created_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('sessions', 'deleted_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('sessions', 'ends_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=False)
op.alter_column('sessions', 'starts_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=False)
op.alter_column('sessions', 'submitted_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('sessions_version', 'created_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('sessions_version', 'deleted_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('sessions_version', 'ends_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('sessions_version', 'starts_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('sessions_version', 'submitted_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('tickets', 'sales_ends_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('tickets', 'sales_starts_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('users', 'created_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('users', 'deleted_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('users', 'last_accessed_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True)
op.alter_column('users', 'signup_at', existing_type=postgresql.TIMESTAMP(), type_=sa.DateTime(timezone=True), existing_nullable=True) |
class Recommendation(RecommendsBaseModel):
user = models.PositiveIntegerField()
score = models.FloatField(null=True, blank=True, default=None)
objects = RecommendationManager()
class Meta():
unique_together = ('object_ctype', 'object_id', 'user')
ordering = ['-score']
def __str__(self):
return ('Recommendation for user %s' % self.user) |
def test_package_with_symlink(flyte_project, tmp_path):
archive_fname = fast_package(source=(flyte_project / 'src'), output_dir=tmp_path, deref_symlinks=True)
with tarfile.open(archive_fname, dereference=True) as tar:
assert (tar.getnames() == ['', 'util', 'workflows', 'workflows/hello_world.py'])
util = tar.getmember('util')
assert util.isfile()
assert str(os.path.basename(archive_fname)).startswith(FAST_PREFIX)
assert str(archive_fname).endswith(FAST_FILEENDING) |
def extractTimelesstranslationsHomeBlog(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('RM', 'Rivers and Mountains', 'translated'), ('eow', 'The End of the World', 'translated'), ('slg', 'Suspended Life Game', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def ttfont():
glyphs = '\n .notdef space slash fraction semicolon period comma ampersand\n quotedblleft quotedblright quoteleft quoteright\n zero one two three four five six seven eight nine\n zero.oldstyle one.oldstyle two.oldstyle three.oldstyle\n four.oldstyle five.oldstyle six.oldstyle seven.oldstyle\n eight.oldstyle nine.oldstyle onequarter onehalf threequarters\n onesuperior twosuperior threesuperior ordfeminine ordmasculine\n A B C D E F G H I J K L M N O P Q R S T U V W X Y Z\n a b c d e f g h i j k l m n o p q r s t u v w x y z\n A.sc B.sc C.sc D.sc E.sc F.sc G.sc H.sc I.sc J.sc K.sc L.sc M.sc\n N.sc O.sc P.sc Q.sc R.sc S.sc T.sc U.sc V.sc W.sc X.sc Y.sc Z.sc\n A.alt1 A.alt2 A.alt3 B.alt1 B.alt2 B.alt3 C.alt1 C.alt2 C.alt3\n a.alt1 a.alt2 a.alt3 a.end b.alt c.mid d.alt d.mid\n e.begin e.mid e.end m.begin n.end s.end z.end\n Eng Eng.alt1 Eng.alt2 Eng.alt3\n A.swash B.swash C.swash D.swash E.swash F.swash G.swash H.swash\n I.swash J.swash K.swash L.swash M.swash N.swash O.swash P.swash\n Q.swash R.swash S.swash T.swash U.swash V.swash W.swash X.swash\n Y.swash Z.swash\n f_l c_h c_k c_s c_t f_f f_f_i f_f_l f_i o_f_f_i s_t f_i.begin\n a_n_d T_h T_h.swash germandbls ydieresis yacute breve\n grave acute dieresis macron circumflex cedilla umlaut ogonek caron\n damma hamza sukun kasratan lam_meem_jeem noon.final noon.initial\n by feature lookup sub table uni0327 uni0328 e.fina\n '.split()
glyphs.extend(('cid{:05d}'.format(cid) for cid in range(800, (1001 + 1))))
font = TTFont()
font.setGlyphOrder(glyphs)
return font |
class Plugin(plugin.PluginProto):
PLUGIN_ID = 6
PLUGIN_NAME = 'Environment - BMP085/180 (TESTING)'
PLUGIN_VALUENAME1 = 'Temperature'
PLUGIN_VALUENAME2 = 'Pressure'
def __init__(self, taskindex):
plugin.PluginProto.__init__(self, taskindex)
self.dtype = rpieGlobals.DEVICE_TYPE_I2C
self.vtype = rpieGlobals.SENSOR_TYPE_TEMP_BARO
self.readinprogress = 0
self.valuecount = 2
self.senddataoption = True
self.timeroption = True
self.timeroptional = False
self.formulaoption = True
self._nextdataservetime = 0
self.lastread = 0
self.bmp = None
def plugin_init(self, enableplugin=None):
plugin.PluginProto.plugin_init(self, enableplugin)
self.uservar[0] = 0
self.uservar[1] = 0
self.initialized = False
if self.enabled:
try:
self.bmp = None
try:
i2cl = self.i2c
except:
i2cl = (- 1)
if (i2cl == (- 1)):
i2cbus = gpios.HWPorts.i2cbus
else:
i2cbus = gpios.HWPorts.i2c_init(i2cl)
sensoraddress = int(self.taskdevicepluginconfig[0])
self.bmp = Bmp180(i2c_bus=i2cbus, sensor_address=sensoraddress)
if ((self.bmp is not None) and (i2cbus is not None) and self.bmp.init):
if (self.interval > 2):
nextr = (self.interval - 2)
else:
nextr = self.interval
self._lastdataservetime = (rpieTime.millis() - (nextr * 1000))
self.lastread = 0
self.initialized = True
else:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, 'BMP180 can not be initialized!')
self.initialized = False
self.readinprogress = 0
except Exception as e:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, str(e))
self.initialized = False
self.bmp = None
self.readinprogress = 0
def webform_load(self):
choice1 = self.taskdevicepluginconfig[0]
options = ['0x77']
optionvalues = [119]
webserver.addFormSelector('I2C address', 'plugin_6_addr', len(options), options, optionvalues, None, int(choice1))
webserver.addFormNote("Enable <a href='pinout'>I2C bus</a> first, than <a href='i2cscanner'>search for the used address</a>!")
return True
def webform_save(self, params):
par = webserver.arg('plugin_6_addr', params)
if (par == ''):
par = 119
self.taskdevicepluginconfig[0] = int(par)
return True
def plugin_read(self):
result = False
if (self.initialized and (self.readinprogress == 0) and self.enabled):
self.readinprogress = 1
(temp, press) = self.bmp.get_data()
self.set_value(1, temp, False)
self.set_value(2, press, False)
self.plugin_senddata()
self._lastdataservetime = rpieTime.millis()
result = True
self.readinprogress = 0
return result |
class SuffixPostprocessPlugin():
CHANGES_AST = True
def update_mdit(mdit: MarkdownIt):
pass
def _text_postprocess(text: str, tree: RenderTreeNode, context: RenderContext) -> str:
return (text + 'Suffixed!')
RENDERERS: dict = {}
POSTPROCESSORS = {'text': _text_postprocess} |
class CellAPI(JsPackage):
lib_alias = {'js': 'datatables', 'css': 'datatables'}
lib_selector = 'cell'
lib_set_var = False
def deselect(self):
return self.fnc('deselect()')
def select(self):
return self.fnc('select()')
def render(self):
return self.fnc('render()')
def node(self):
self.fnc('node()')
def jquery_nodes(self):
self.nodes()
self._js.append('nodes().to$()')
return JsQuery.JQuery(page=self.page, component=self.component, js_code=self.toStr())
def invalidate(self):
return self.fnc('invalidate()')
def index(self):
return JsObjects.JsNumber.JsNumber(('%s.index()' % self.getStr()))
def cache(self):
def data(self):
return self.fnc('data()')
def focus(self):
return self.fnc('focus()')
def blur(self):
return self.fnc('blur()') |
.django_db
def test_defc_filter(subaward_data):
filters = {'def_codes': ['A']}
results = subaward_filter(filters).all()
assert (len(results) == 2)
filters = {'def_codes': ['Z']}
results = subaward_filter(filters).all()
assert (len(results) == 1)
filters = {'def_codes': ['L']}
results = subaward_filter(filters).all()
assert (len(results) == 1)
filters = {'def_codes': ['L', 'Z']}
results = subaward_filter(filters).all()
assert (len(results) == 2) |
(auto_attribs=True)
class BodyUploadFile():
file: File
additional_properties: Dict[(str, Any)] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[(str, Any)]:
file = self.file.to_tuple()
field_dict: Dict[(str, Any)] = {}
field_dict.update(self.additional_properties)
field_dict.update({'file': file})
return field_dict
def to_multipart(self) -> Dict[(str, Any)]:
file = self.file.to_tuple()
field_dict: Dict[(str, Any)] = {}
field_dict.update({key: (None, str(value).encode(), 'text/plain') for (key, value) in self.additional_properties.items()})
field_dict.update({'file': file})
return field_dict
def from_dict(cls: Type[T], src_dict: Dict[(str, Any)]) -> T:
d = src_dict.copy()
file = File(payload=BytesIO(d.pop('file')))
body_upload_file = cls(file=file)
body_upload_file.additional_properties = d
return body_upload_file
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return (key in self.additional_properties) |
class MainModule(pl.LightningModule):
def __init__(self, num_classes):
super().__init__()
self.model = models.resnet18(num_classes=num_classes)
def forward(self, x):
return self.model(x)
def training_step(self, batch, batch_idx):
(x, y) = batch
scores = self(x)
loss = F.cross_entropy(scores, y)
self.mylog('train_loss', loss, train=True)
return loss
def validation_step(self, batch, batch_idx):
(x, y) = batch
scores = self(x)
loss = F.cross_entropy(scores, y)
acc = (y == scores.argmax((- 1))).float().mean()
self.mylog('valid_loss', loss)
self.mylog('valid_acc', acc)
return loss
def configure_optimizers(self):
optimizer = torch.optim.Adam(self.parameters(), lr=0.001)
return optimizer
def mylog(self, name, value, train=False):
self.log(name, value, on_epoch=True, sync_dist=(not train), on_step=True, prog_bar=True) |
def curlClient(filename, blksize=1400, port=1069):
p = subprocess.Popen([find_executable('curl'), '--tftp-blksize', str(blksize), f'tftp://localhost:{port}/{filename}'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate(timeout=1)
return (stdout, stderr, p.returncode) |
class FuncEnum(Enum):
ADD = 1
SUB = 2
MUL = 3
DIV = 4
TANH = 5
COS = 6
SIN = 7
SIGN = 8
ABS = 9
LOGE = 10
EXP = 11
SQRT = 12
MAX = 13
MIN = 14
SIGMOID = 15
LRELU = 16
HARDTANH = 17
RELU = 18
NAN_TO_NUM = 19
CLAMP_NAN_TO_NUM = 20
SILU = 21
POW = 22
GELU = 23
FASTGELU = 24
SOFTPLUS = 25
ELU = 26
SOFTSIGN = 27
FLOOR_DIV = 28
CELU = 29 |
.parametrize('val, expected', ((AttributeDict({'one': HexBytes('0x1')}), '{"one": "0x01"}'), (AttributeDict({'two': HexBytes(2)}), '{"two": "0x02"}'), (AttributeDict({'three': AttributeDict({'four': 4})}), '{"three": {"four": 4}}'), ({'three': 3}, '{"three": 3}')))
def test_to_json(val, expected):
assert (Web3.to_json(val) == expected) |
def bytes_io() -> Callable[([bytes], Callable[([], Tuple[(Tuple[BytesIO], Dict)])])]:
def make_setup(bytes_: bytes) -> Callable[([], Tuple[(Tuple[BytesIO], Dict)])]:
def setup() -> Tuple[(Tuple[BytesIO], Dict)]:
return ((BytesIO(bytes_),), {})
return setup
return make_setup |
class OptionPlotoptionsXrangeSonificationDefaultinstrumentoptionsMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMapping(Options):
def frequency(self) -> 'OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingFrequency)
def gapBetweenNotes(self) -> 'OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingGapbetweennotes)
def highpass(self) -> 'OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingHighpass':
return self._config_sub_data('highpass', OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingHighpass)
def lowpass(self) -> 'OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingLowpass':
return self._config_sub_data('lowpass', OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingLowpass)
def noteDuration(self) -> 'OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingNoteduration':
return self._config_sub_data('noteDuration', OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingNoteduration)
def pan(self) -> 'OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingPan':
return self._config_sub_data('pan', OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingPan)
def pitch(self) -> 'OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingPitch':
return self._config_sub_data('pitch', OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingPitch)
def playDelay(self) -> 'OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingPlaydelay':
return self._config_sub_data('playDelay', OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingPlaydelay)
def time(self) -> 'OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingTime':
return self._config_sub_data('time', OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingTime)
def tremolo(self) -> 'OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingTremolo':
return self._config_sub_data('tremolo', OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingTremolo)
def volume(self) -> 'OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingVolume':
return self._config_sub_data('volume', OptionPlotoptionsBarSonificationDefaultinstrumentoptionsMappingVolume) |
class MetaMapping():
_mappings = {'D:displayname': ('displayName', None, None)}
def _reverse_mapping(cls, mappings):
mappings.update({i[1][0]: (i[0], i[1][1], i[1][2]) for i in mappings.items()})
def _mapping_get(self, key):
return self.__class__._mappings.get(key, (key, None, None))
def map_get(self, info, key):
(key, get_transform, set_transform) = self._mapping_get(key)
value = info.get(key, None)
if (get_transform is not None):
value = get_transform(value)
if (key == 'C:supported-calendar-component-set'):
return (key, getattr(self, 'supported_calendar_component', 'none'))
return (key, value)
def map_set(self, key, value):
(key, get_transform, set_transform) = self._mapping_get(key)
if (set_transform is not None):
value = set_transform(value)
return (key, value) |
.parametrize('ops', XP_OPS)
.parametrize('dtype', FLOAT_TYPES)
def test_seq2col_window_two(ops, dtype):
seq = ops.asarray([[1.0], [2.0], [3.0], [4]], dtype=dtype)
cols = ops.seq2col(seq, 2)
if (not isinstance(cols, numpy.ndarray)):
cols = cols.get()
assert_allclose(cols[0], [0.0, 0.0, 1.0, 2.0, 3.0])
assert_allclose(cols[1], [0.0, 1.0, 2.0, 3.0, 4.0])
assert_allclose(cols[2], [1.0, 2.0, 3.0, 4.0, 0.0])
assert_allclose(cols[3], [2.0, 3.0, 4.0, 0.0, 0.0]) |
class OptionPlotoptionsDependencywheelLevelsDatalabelsFilter(Options):
def operator(self):
return self._config_get(None)
def operator(self, value: Any):
self._config(value, js_type=False)
def property(self):
return self._config_get(None)
def property(self, text: str):
self._config(text, js_type=False) |
class OkLCh(LCh):
BASE = 'oklab'
NAME = 'oklch'
SERIALIZE = ('--oklch',)
CHANNELS = (Channel('l', 0.0, 1.0, flags=FLG_OPT_PERCENT), Channel('c', 0.0, 0.4, flags=FLG_OPT_PERCENT), Channel('h', 0.0, 360.0, flags=FLG_ANGLE))
CHANNEL_ALIASES = {'lightness': 'l', 'chroma': 'c', 'hue': 'h'}
WHITE = WHITES['2deg']['D65'] |
def test_content_length_set_on_head_with_no_body(asgi):
class NoBody():
def on_get(self, req, resp):
pass
on_head = on_get
app = create_app(asgi)
app.add_route('/', NoBody())
result = testing.simulate_head(app, '/')
assert (result.status_code == 200)
assert (result.headers['content-length'] == '0') |
class TestDeprecatedAllAndNotOneValidator(unittest.TestCase):
def setUp(self):
self.validator = DeprecatedAllAndNotOneValidator(validators=[Int(min=3), Int(max=5)], number=4)
def test_1_warnings(self):
with warnings.catch_warnings(record=True) as runtime_warnings:
warnings.simplefilter('default')
self.validator.to_python('3')
for output in (runtime_warnings, all_and_not_one_warnings):
output = '\n'.join(map(str, output))
msg = 'attempt_convert is deprecated; use _attempt_convert instead'
self.assertIn(msg, output, (output or 'no warnings'))
def test_is_validator(self):
self.assertTrue(is_validator(DeprecatedAllAndNotOneValidator))
self.assertTrue(is_validator(self.validator))
def test_to_python(self):
cav = self.validator
with warnings.catch_warnings(record=True):
self.assertRaises(Invalid, cav.to_python, '1')
self.assertRaises(Invalid, cav.to_python, '2')
self.assertEqual(cav.to_python('3'), 3)
try:
cav.to_python('4')
except Invalid as e:
self.assertIn('must not be 4', str(e), e)
else:
self.fail('4 should be invalid')
self.assertEqual(cav.to_python('5'), 5)
self.assertRaises(Invalid, cav.to_python, '6')
self.assertRaises(Invalid, cav.to_python, '7') |
def reboot_evennia(pprofiler=False, sprofiler=False):
global AMP_CONNECTION
def _portal_stopped(*args):
print('... Portal stopped. Evennia shut down. Rebooting ...')
global AMP_CONNECTION
AMP_CONNECTION = None
start_evennia(pprofiler, sprofiler)
def _server_stopped(*args):
print('... Server stopped.\nStopping Portal ...')
send_instruction(PSHUTD, {})
wait_for_status(False, None, _portal_stopped)
def _portal_running(response):
(prun, srun, ppid, spid, _, _) = _parse_status(response)
if srun:
print('Server stopping ...')
send_instruction(SSHUTD, {})
wait_for_status_reply(_server_stopped)
else:
print('Server already stopped.\nStopping Portal ...')
send_instruction(PSHUTD, {})
wait_for_status(False, None, _portal_stopped)
def _portal_not_running(fail):
print('Evennia not running. Starting ...')
start_evennia()
collectstatic()
send_instruction(PSTATUS, None, _portal_running, _portal_not_running) |
def test_plan_from_pull(session):
data = {'event_timezone': '', 'event_creator_id': '1234', 'event_id': '1111', 'event_type': 'EVENT', 'event_track_rsvp': '1', 'event_title': 'abc', 'event_time': '', 'event_seconds_to_notify_before': '3600', 'guest_state_list': '[{"guest_list_state":"INVITED","node":{"id":"1234"}},{"guest_list_state":"INVITED","node":{"id":"2356"}},{"guest_list_state":"DECLINED","node":{"id":"3456"}},{"guest_list_state":"GOING","node":{"id":"4567"}}]'}
assert (PlanData(session=session, id='1111', time=datetime.datetime(2017, 7, 14, 2, 40, tzinfo=datetime.timezone.utc), title='abc', author_id='1234', guests={'1234': GuestStatus.INVITED, '2356': GuestStatus.INVITED, '3456': GuestStatus.DECLINED, '4567': GuestStatus.GOING}) == PlanData._from_pull(session, data)) |
class OptionSeriesPackedbubbleSonificationContexttracksMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestPyQtColor(unittest.TestCase):
def test_default(self):
obj = ObjectWithColor()
self.assertIsInstance(obj.color, QtGui.QColor)
self.assertEqual(obj.color.getRgb(), (255, 255, 255, 255))
self.assertIsInstance(obj.color_, QtGui.QColor)
self.assertEqual(obj.color_.getRgb(), (255, 255, 255, 255))
def test_tuple_rgb(self):
obj = ObjectWithColor(color=(0, 128, 255))
self.assertIsInstance(obj.color, QtGui.QColor)
self.assertEqual(obj.color.getRgb(), (0, 128, 255, 255))
self.assertIsInstance(obj.color_, QtGui.QColor)
self.assertEqual(obj.color_.getRgb(), (0, 128, 255, 255))
def test_tuple_rgba(self):
obj = ObjectWithColor(color=(0, 128, 255, 64))
self.assertIsInstance(obj.color, QtGui.QColor)
self.assertEqual(obj.color.getRgb(), (0, 128, 255, 64))
self.assertIsInstance(obj.color_, QtGui.QColor)
self.assertEqual(obj.color_.getRgb(), (0, 128, 255, 64))
def test_name_string(self):
obj = ObjectWithColor(color='rebeccapurple')
self.assertIsInstance(obj.color, QtGui.QColor)
self.assertEqual(obj.color.getRgb(), (102, 51, 153, 255))
self.assertIsInstance(obj.color_, QtGui.QColor)
self.assertEqual(obj.color_.getRgb(), (102, 51, 153, 255))
def test_name_string_with_space(self):
obj = ObjectWithColor(color='rebecca purple')
self.assertIsInstance(obj.color, QtGui.QColor)
self.assertEqual(obj.color.getRgb(), (102, 51, 153, 255))
self.assertIsInstance(obj.color_, QtGui.QColor)
self.assertEqual(obj.color_.getRgb(), (102, 51, 153, 255))
def test_rgb_string(self):
obj = ObjectWithColor(color='(0, 128, 255)')
self.assertIsInstance(obj.color, QtGui.QColor)
self.assertEqual(obj.color.getRgb(), (0, 128, 255, 255))
self.assertIsInstance(obj.color_, QtGui.QColor)
self.assertEqual(obj.color_.getRgb(), (0, 128, 255, 255))
def test_rgba_string(self):
obj = ObjectWithColor(color='(0, 128, 255, 64)')
self.assertIsInstance(obj.color, QtGui.QColor)
self.assertEqual(obj.color.getRgb(), (0, 128, 255, 64))
self.assertIsInstance(obj.color_, QtGui.QColor)
self.assertEqual(obj.color_.getRgb(), (0, 128, 255, 64))
def test_rgb_int_string_3(self):
obj = ObjectWithColor(color='#08f')
self.assertIsInstance(obj.color, QtGui.QColor)
self.assertEqual(obj.color.getRgb(), (0, 136, 255, 255))
self.assertIsInstance(obj.color_, QtGui.QColor)
self.assertEqual(obj.color_.getRgb(), (0, 136, 255, 255))
def test_rgb_int_string_6(self):
obj = ObjectWithColor(color='#0088ff')
self.assertIsInstance(obj.color, QtGui.QColor)
self.assertEqual(obj.color.getRgb(), (0, 136, 255, 255))
self.assertIsInstance(obj.color_, QtGui.QColor)
self.assertEqual(obj.color_.getRgb(), (0, 136, 255, 255))
def test_rgb_int_string_12(self):
obj = ObjectWithColor(color='#ffff')
self.assertIsInstance(obj.color, QtGui.QColor)
self.assertEqual(obj.color.getRgb(), (0, 136, 255, 255))
self.assertIsInstance(obj.color_, QtGui.QColor)
self.assertEqual(obj.color_.getRgb(), (0, 136, 255, 255))
def test_rgb_int(self):
obj = ObjectWithColor(color=35071)
self.assertIsInstance(obj.color, QtGui.QColor)
self.assertEqual(obj.color.getRgb(), (0, 136, 255, 255))
self.assertIsInstance(obj.color_, QtGui.QColor)
self.assertEqual(obj.color_.getRgb(), (0, 136, 255, 255))
def test_qcolor(self):
obj = ObjectWithColor(color=QtGui.QColor(0, 128, 255, 64))
self.assertIsInstance(obj.color, QtGui.QColor)
self.assertEqual(obj.color.getRgb(), (0, 128, 255, 64))
self.assertIsInstance(obj.color_, QtGui.QColor)
self.assertEqual(obj.color_.getRgb(), (0, 128, 255, 64))
def test_pyface_color(self):
obj = ObjectWithColor(color=PyfaceColor(rgba=(0.0, 0.5, 1.0, 0.25)))
self.assertIsInstance(obj.color, QtGui.QColor)
self.assertEqual(obj.color.getRgb(), (0, 128, 255, 64))
self.assertIsInstance(obj.color_, QtGui.QColor)
self.assertEqual(obj.color_.getRgb(), (0, 128, 255, 64))
def test_default_none(self):
obj = ObjectWithColorAllowsNone(color=None)
self.assertIsNone(obj.color)
self.assertIsNone(obj.color_)
def test_bad_color(self):
with self.assertRaises(TraitError):
ObjectWithColor(color='not a color')
def test_bad_tuple(self):
with self.assertRaises(TraitError):
ObjectWithColor(color=(255, 255))
def test_bad_tuple_not_int(self):
with self.assertRaises(TraitError):
ObjectWithColor(color=('not an int', 255, 255))
def test_bad_tuple_string(self):
with self.assertRaises(TraitError):
ObjectWithColor(color='(0xff, 0xff)') |
(short_help='Generate python stubs for OpenAPI Dispatch and Event.')
('--dispatch-class-name', metavar='NAME', help='Name for class with Dispatch functions.', default=default_dispatch_class_name, show_default=True)
('--dispatch-file-path', type=click.Path(), help='Path for python-stub with Dispatch functions.', default=default_dispatch_file_path, show_default=True)
('--event-class-name', metavar='NAME', help='Name for class with Event functions.', default=default_event_class_name, show_default=True)
('--event-file-path', type=click.Path(), help='Path for python-stub with Event functions.', default=default_event_file_path, show_default=True)
('--encoding', metavar='ENCODING', help='Encoding for stub files.', default=default_encoding, show_default=True)
('--force-overwrite', help='Force overwrite even if target file already exists.', is_flag=True, show_default=True)
def python_stubs(dispatch_class_name, dispatch_file_path, event_class_name, event_file_path, encoding, force_overwrite):
generate_python_stubs(dispatch_class_name=dispatch_class_name, dispatch_file_path=dispatch_file_path, event_class_name=event_class_name, event_file_path=event_file_path, encoding=encoding, force_overwrite=force_overwrite) |
class ImageMarker(Object):
def __init__(self, latlng: s2sphere.LatLng, png_file: str, origin_x: int, origin_y: int) -> None:
Object.__init__(self)
self._latlng = latlng
self._png_file = png_file
self._origin_x = origin_x
self._origin_y = origin_y
self._width = 0
self._height = 0
self._image_data: typing.Optional[bytes] = None
def origin_x(self) -> int:
return self._origin_x
def origin_y(self) -> int:
return self._origin_y
def width(self) -> int:
if (self._image_data is None):
self.load_image_data()
return self._width
def height(self) -> int:
if (self._image_data is None):
self.load_image_data()
return self._height
def image_data(self) -> bytes:
if (self._image_data is None):
self.load_image_data()
assert self._image_data
return self._image_data
def latlng(self) -> s2sphere.LatLng:
return self._latlng
def bounds(self) -> s2sphere.LatLngRect:
return s2sphere.LatLngRect.from_point(self._latlng)
def extra_pixel_bounds(self) -> PixelBoundsT:
return (max(0, self._origin_x), max(0, self._origin_y), max(0, (self.width() - self._origin_x)), max(0, (self.height() - self._origin_y)))
def render_pillow(self, renderer: PillowRenderer) -> None:
(x, y) = renderer.transformer().ll2pixel(self.latlng())
image = renderer.create_image(self.image_data())
overlay = PIL_Image.new('RGBA', renderer.image().size, (255, 255, 255, 0))
overlay.paste(image, (int(((x - self.origin_x()) + renderer.offset_x())), int((y - self.origin_y()))), mask=image)
renderer.alpha_compose(overlay)
def render_svg(self, renderer: SvgRenderer) -> None:
(x, y) = renderer.transformer().ll2pixel(self.latlng())
image = renderer.create_inline_image(self.image_data())
renderer.group().add(renderer.drawing().image(image, insert=((x - self.origin_x()), (y - self.origin_y())), size=(self.width(), self.height())))
def render_cairo(self, renderer: CairoRenderer) -> None:
(x, y) = renderer.transformer().ll2pixel(self.latlng())
image = renderer.create_image(self.image_data())
renderer.context().translate((x - self.origin_x()), (y - self.origin_y()))
renderer.context().set_source_surface(image)
renderer.context().paint()
def load_image_data(self) -> None:
with open(self._png_file, 'rb') as f:
self._image_data = f.read()
image = PIL_Image.open(io.BytesIO(self._image_data))
(self._width, self._height) = image.size |
def mix_model(network1, network2, output_path, voice_ratio=(0.5, 0.5), tone_ratio=(0.5, 0.5)):
if hasattr(network1, 'module'):
state_dict1 = network1.module.state_dict()
state_dict2 = network2.module.state_dict()
else:
state_dict1 = network1.state_dict()
state_dict2 = network2.state_dict()
for k in state_dict1.keys():
if (k not in state_dict2.keys()):
continue
if ('enc_p' in k):
state_dict1[k] = ((state_dict1[k].clone() * tone_ratio[0]) + (state_dict2[k].clone() * tone_ratio[1]))
else:
state_dict1[k] = ((state_dict1[k].clone() * voice_ratio[0]) + (state_dict2[k].clone() * voice_ratio[1]))
for k in state_dict2.keys():
if (k not in state_dict1.keys()):
state_dict1[k] = state_dict2[k].clone()
torch.save({'model': state_dict1, 'iteration': 0, 'optimizer': None, 'learning_rate': 0}, output_path) |
def test_correct_propagation_relation():
var_14 = vars('var_14', 6, Integer(32, True), True)
var_28 = vars('var_28', 2, Pointer(Integer(32, True), 32), False)
eax = Variable('eax', Integer(32, True), 1, False, None)
instructions = [_assign(var_14[1], var_14[0]), _assign(var_28[0], UnaryOperation(OperationType.address, [var_14[1]], Pointer(Integer(32, True), 32), None, False)), _assign(ListOperation([]), Call(Constant('__isoc99_scanf', UnknownType()), [Constant(, Integer(32, True)), var_28[0]], Pointer(CustomType('void', 0), 32), 2)), Relation(var_14[2], var_14[1]), _assign(eax, var_14[2]), _assign(ListOperation([]), Call(Constant('printf', UnknownType()), [Constant(, Pointer(Integer(8, True), 32)), eax], Pointer(CustomType('void', 0), 32), 3)), _assign(var_14[3], var_14[2]), _assign(var_14[4], _add(var_14[3], Constant(2))), _assign(var_28[1], UnaryOperation(OperationType.address, [var_14[4]], Pointer(Integer(32, True), 32), None, False)), _assign(ListOperation([]), Call(Constant('__isoc99_scanf', UnknownType()), [Constant(, Integer(32, True)), var_28[1]], Pointer(CustomType('void', 0), 32), 5)), Relation(var_14[5], var_14[4])]
cfg = ControlFlowGraph()
cfg.add_node(BasicBlock(0, [i.copy() for i in instructions]))
_run_expression_propagation(cfg)
assert (list(cfg.instructions) == instructions) |
class SpeedTestWidget(ui.Widget):
def init(self, pycomp):
self.pycomp = pycomp
self._start_time = 0
self._start_times = []
with ui.VBox():
with ui.HBox() as self.buttons:
ui.Button(text='1 x 1 MiB roundtrip')
ui.Button(text='1 x 5 MiB roundtrip')
ui.Button(text='10 x 1 MiB roundtrip')
ui.Button(text='10 x 5 MiB roundtrip')
ui.Button(text='100 x 1 MiB roundtrip')
ui.Button(text='100 x 5 MiB roundtrip')
self.progress = ui.ProgressBar()
self.status = ui.Label(text='Status: waiting for button press ...', wrap=1, flex=1, style='overflow-y:scroll;')
('buttons.children*.pointer_down')
def run_test(self, *events):
global window, perf_counter
self.status.set_text('Test results: ')
self.progress.set_value(0)
tests = []
for ev in events:
if isinstance(ev.source, ui.Button):
sze = (5 if ('5' in ev.source.text) else 1)
n = int(ev.source.text.split(' ')[0])
for i in range(n):
tests.append(sze)
self.progress.set_max(len(tests))
self._start_time = perf_counter()
for n in tests:
data = window.Uint8Array(((n * 1024) * 1024)).buffer
self.send_data(data)
def send_data(self, data):
global perf_counter
self._start_times.append(perf_counter())
self.pycomp.echo(data)
def receive_data(self, data):
global perf_counter
t = (perf_counter() - self._start_times.pop(0))
mib = ((data.byteLength / 1024) / 1024)
text = ('Received %i MiB in %s seconds.' % (mib, str(t)[:5]))
self.status.set_html(((self.status.html + ' ') + text))
self.progress.set_value((self.progress.value + 1))
if (len(self._start_times) == 0):
t = (perf_counter() - self._start_time)
text = ('Total time %s.' % str(t)[:5])
self.status.set_html(((self.status.html + ' ') + text)) |
def _get_variable_initialisation(ast: AbstractSyntaxTree, variable: Variable) -> Optional[AstInstruction]:
for code_node in ast.get_code_nodes_topological_order():
for (position, instruction) in enumerate(code_node.instructions):
if (variable in instruction.definitions):
return AstInstruction(instruction, position, code_node) |
class OptionSeriesOrganizationSonificationTracksMappingHighpass(Options):
def frequency(self) -> 'OptionSeriesOrganizationSonificationTracksMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionSeriesOrganizationSonificationTracksMappingHighpassFrequency)
def resonance(self) -> 'OptionSeriesOrganizationSonificationTracksMappingHighpassResonance':
return self._config_sub_data('resonance', OptionSeriesOrganizationSonificationTracksMappingHighpassResonance) |
class OptionsCalMonth(Options):
def daynames(self):
return self._config_get()
def daynames(self, values):
self._config(values)
def startDayOfWeek(self):
return self._config_get(0)
def startDayOfWeek(self, num):
self._config(num)
def narrowWeekend(self):
return self._config_get(True)
def narrowWeekend(self, flag):
self._config(flag)
def visibleWeeksCount(self):
return self._config_get(0)
def visibleWeeksCount(self, num):
self._config(num)
def isAlways6Week(self):
return self._config_get(0)
.setter
def isAlways6Week(self, flag):
self._config(flag)
def workweek(self):
return self._config_get(False)
def workweek(self, flag):
self._config(flag)
def visibleScheduleCount(self):
return self._config_get()
def visibleScheduleCount(self, num):
self._config(num)
def moreLayerSize(self):
return self._config_get()
def moreLayerSize(self, value):
self._config(value)
def grid(self):
return self._config_get()
def grid(self, value):
self._config(value)
def scheduleFilter(self, js_funcs, profile):
self._config(('function (value){%s}' % JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)), js_type=True) |
class OptionPlotoptionsVennSonificationDefaultinstrumentoptionsMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class zalesak_disk(object):
def __init__(self, L, scaling=0.75):
self.radius = 0.15
self.xc = 0.5
self.yc = 0.75
self.scaling = scaling
def uOfXT(self, X, t):
import numpy as np
x = X[0]
y = X[1]
r = math.sqrt((((x - self.xc) ** 2) + ((y - self.yc) ** 2)))
dist_circle = (self.radius - r)
xslot1 = (self.xc - 0.025)
xslot2 = (self.xc + 0.025)
yslot1 = (0.75 - np.sqrt(((self.radius ** 2) - (0.025 ** 2))))
yslot2 = 0.85
aux1 = np.abs((x - xslot1))
aux2 = np.abs((x - xslot2))
aux3 = np.abs((y - yslot2))
aux4 = np.abs((y - yslot1))
if (y > yslot1):
if ((xslot1 < x) and (x <= xslot2) and (y <= yslot2)):
dist = (- np.min([aux1, aux2, aux3]))
elif (x <= xslot1):
if (y <= yslot2):
dist = np.min([dist_circle, aux1])
else:
dist = np.min([dist_circle, np.sqrt(((aux1 ** 2) + (aux3 ** 2)))])
elif (x >= xslot2):
if (y <= yslot2):
dist = np.min([dist_circle, aux2])
else:
dist = np.min([dist_circle, np.sqrt(((aux2 ** 2) + (aux3 ** 2)))])
else:
dist = np.min([dist_circle, aux3])
elif ((x > xslot1) and (x < xslot2)):
dist = (- np.min([np.sqrt(((aux1 ** 2) + (aux4 ** 2))), np.sqrt(((aux2 ** 2) + (aux4 ** 2)))]))
else:
dist = dist_circle
return (self.scaling * dist) |
class TestTemplateRender():
unittest_template_internal_vars = loader.default_internal_template_vars(clock=StaticClock, build_flavor='test', serverless_operator=False)
def test_render_simple_template(self):
template = '\n {\n "key": {{\'01-01-2000\' | days_ago(now)}},\n "key2": "static value",\n "key3": "{{build_flavor}}",\n "key4": {{serverless_operator}}\n }\n '
rendered = loader.render_template(template, template_internal_vars=self.unittest_template_internal_vars)
expected = '\n {\n "key": 5864,\n "key2": "static value",\n "key3": "test",\n "key4": False\n }\n '
assert (rendered == expected)
def test_render_template_with_external_variables(self):
template = '\n {\n "greeting": "{{greeting | default("Aloha")}}",\n "name": "{{name | default("stranger")}}"\n }\n '
rendered = loader.render_template(template, template_vars={'greeting': 'Hi'}, template_internal_vars=self.unittest_template_internal_vars)
expected = '\n {\n "greeting": "Hi",\n "name": "stranger"\n }\n '
assert (rendered == expected)
def test_render_template_with_globbing(self):
def key_globber(e):
if (e == 'dynamic-key-*'):
return ['dynamic-key-1', 'dynamic-key-2', 'dynamic-key-3']
else:
return []
template = '\n {% import "rally.helpers" as rally %}\n {\n "key1": "static value",\n {{ rally.collect(parts="dynamic-key-*") }}\n\n }\n '
source = io.DictStringFileSourceFactory({'dynamic-key-1': [textwrap.dedent('"dkey1": "value1"')], 'dynamic-key-2': [textwrap.dedent('"dkey2": "value2"')], 'dynamic-key-3': [textwrap.dedent('"dkey3": "value3"')]})
template_source = loader.TemplateSource('', 'track.json', source=source, fileglobber=key_globber)
template_source.load_template_from_string(template)
rendered = loader.render_template(template_source.assembled_source, template_internal_vars=self.unittest_template_internal_vars)
expected = '\n {\n "key1": "static value",\n "dkey1": "value1",\n "dkey2": "value2",\n "dkey3": "value3"\n\n }\n '
assert_equal_ignore_whitespace(expected, rendered)
def test_render_template_with_variables(self):
template = '\n {% set _clients = clients if clients is defined else 16 %}\n {% set _bulk_size = bulk_size if bulk_size is defined else 100 %}\n {% import "rally.helpers" as rally with context %}\n {\n "key1": "static value",\n "dkey1": {{ _clients }},\n "dkey2": {{ _bulk_size }}\n }\n '
rendered = loader.render_template(template, template_vars={'clients': 8}, template_internal_vars=self.unittest_template_internal_vars)
expected = '\n {\n "key1": "static value",\n "dkey1": 8,\n "dkey2": 100\n }\n '
assert_equal_ignore_whitespace(expected, rendered)
def test_render_template_with_conditions(self):
template = '\n {\n {%- if build_flavor != "test" %}\n "key1": "build_flavor"\n {%- elif serverless_operator %}\n "key1": "serverless_operator"\n {%- elif lifecycle == "ilm" %}\n "key1": "ilm"\n {%- else %}\n "key1": "else"\n {%- endif %}\n }\n '
rendered = loader.render_template(template, template_vars={'lifecycle': 'ilm'}, template_internal_vars=self.unittest_template_internal_vars)
expected = '\n {\n "key1": "ilm"\n }\n '
assert_equal_ignore_whitespace(expected, rendered) |
class OptionSeriesVectorMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def states(self) -> 'OptionSeriesVectorMarkerStates':
return self._config_sub_data('states', OptionSeriesVectorMarkerStates)
def symbol(self):
return self._config_get(None)
def symbol(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
def create_rto_return(package_info, client: UnicommerceAPIClient):
package_code = package_info['code']
invoice = frappe.db.get_value('Sales Invoice', {SHIPPING_PACKAGE_CODE_FIELD: package_code}, ['name', ORDER_CODE_FIELD, CHANNEL_ID_FIELD], as_dict=True)
already_returned = frappe.db.get_value('Sales Invoice', {SHIPPING_PACKAGE_CODE_FIELD: package_code, 'is_return': 1})
if ((not invoice) or already_returned):
return
so_data = client.get_sales_order(invoice.get(ORDER_CODE_FIELD))
rto_returns = [r for r in so_data['returns'] if ((r['type'] == 'Courier Returned') and (r['code'] == package_code))]
if rto_returns:
credit_note = create_credit_note(invoice.name)
credit_note.save() |
def get_vm_snapdefs(vm, sr=('vm', 'periodic_task', 'periodic_task__crontab')):
if (vm.is_notcreated() and vm.template):
qs = vm.template.vm_define_snapshot_web_data
else:
qs = SnapshotDefine.objects.select_related(*sr).filter(vm=vm).order_by('-id')
setattr(qs, 'definition', False)
return qs |
.long_test
.download
.skipif(NO_CDS, reason='No access to CDS')
def test_multi():
s1 = load_source('cds', 'reanalysis-era5-single-levels', product_type='reanalysis', param='2t', date='2021-03-01', format='netcdf')
s1.to_xarray()
s2 = load_source('cds', 'reanalysis-era5-single-levels', product_type='reanalysis', param='2t', date='2021-03-02', format='netcdf')
s2.to_xarray()
source = load_source('multi', s1, s2)
for s in source:
print(s)
source.to_xarray() |
class Attribute(BaseDescriptor):
def __init__(self, doc=''):
if (not isinstance(doc, str)):
raise TypeError('event.Attribute() doc must be a string.')
self._doc = doc
self._set_name('anonymous_attribute')
def _set_name(self, name):
self._name = name
self.__doc__ = self._format_doc('attribute', name, self._doc)
def __set__(self, instance, value):
t = 'Cannot set attribute %r.'
raise AttributeError((t % self._name))
def __get__(self, instance, owner):
if (instance is None):
return self
return getattr(instance, ('_' + self._name)) |
class PreviousButtonControl(ButtonControl):
name = 'previous'
title = _('Previous')
description = _('Go to the previous track')
def __init__(self):
ButtonControl.__init__(self)
self.set_image_from_icon_name('media-skip-backward')
self.set_tooltip_text(_('Previous track'))
def do_clicked(self):
player.QUEUE.prev() |
class LiteEthIPTX(LiteXModule):
def __init__(self, mac_address, ip_address, arp_table, dw=8):
self.sink = sink = stream.Endpoint(eth_ipv4_user_description(dw))
self.source = source = stream.Endpoint(eth_mac_description(dw))
self.target_unreachable = Signal()
self.checksum = checksum = LiteEthIPV4Checksum(skip_checksum=True)
self.comb += checksum.ce.eq(sink.valid)
self.comb += checksum.reset.eq(((source.valid & source.last) & source.ready))
self.packetizer = packetizer = LiteEthIPV4Packetizer(dw)
self.comb += [sink.connect(packetizer.sink, keep={'last', 'last_be', 'protocol', 'data'}), packetizer.sink.valid.eq((sink.valid & checksum.done)), sink.ready.eq((packetizer.sink.ready & checksum.done)), packetizer.sink.target_ip.eq(sink.ip_address), packetizer.sink.total_length.eq((ipv4_header.length + sink.length)), packetizer.sink.version.eq(4), packetizer.sink.ihl.eq((ipv4_header.length // 4)), packetizer.sink.identification.eq(0), packetizer.sink.ttl.eq(128), packetizer.sink.sender_ip.eq(ip_address), checksum.header.eq(packetizer.header), packetizer.sink.checksum.eq(checksum.value)]
target_mac = Signal(48, reset_less=True)
self.fsm = fsm = FSM(reset_state='IDLE')
fsm.act('IDLE', If(packetizer.source.valid, If((sink.ip_address[0:8] == bcast_ip_mask), NextValue(target_mac, bcast_mac_address), NextState('SEND')).Elif((sink.ip_address[28:] == mcast_ip_mask), NextValue(target_mac, Cat(sink.ip_address[:23], 0, mcast_oui)), NextState('SEND')).Else(NextState('SEND_MAC_ADDRESS_REQUEST'))))
self.comb += arp_table.request.ip_address.eq(sink.ip_address)
fsm.act('SEND_MAC_ADDRESS_REQUEST', arp_table.request.valid.eq(1), If((arp_table.request.valid & arp_table.request.ready), NextState('WAIT_MAC_ADDRESS_RESPONSE')))
fsm.act('WAIT_MAC_ADDRESS_RESPONSE', If(arp_table.response.valid, NextValue(target_mac, arp_table.response.mac_address), arp_table.response.ready.eq(1), If(arp_table.response.failed, self.target_unreachable.eq(1), NextState('DROP')).Else(NextState('SEND'))))
fsm.act('SEND', packetizer.source.connect(source), source.ethernet_type.eq(ethernet_type_ip), source.target_mac.eq(target_mac), source.sender_mac.eq(mac_address), If(((source.valid & source.last) & source.ready), NextState('IDLE')))
fsm.act('DROP', packetizer.source.ready.eq(1), If(((packetizer.source.valid & packetizer.source.last) & packetizer.source.ready), NextState('IDLE'))) |
class WafFirewallVersionDataAttributes(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'allowed_ (str,), 'allowed_methods': (str,), 'allowed_request_content_type': (str,), 'allowed_request_content_type_charset': (str,), 'arg_name_length': (int,), 'arg_length': (int,), 'combined_file_sizes': (int,), 'comment': (str, none_type), 'critical_anomaly_score': (int,), 'crs_validate_utf8_encoding': (bool,), 'error_anomaly_score': (int,), 'high_risk_country_codes': (str,), ' (int,), 'inbound_anomaly_score_threshold': (int,), 'lfi_score_threshold': (int,), 'locked': (bool,), 'max_file_size': (int,), 'max_num_args': (int,), 'notice_anomaly_score': (int,), 'number': (int,), 'paranoia_level': (int,), 'php_injection_score_threshold': (int,), 'rce_score_threshold': (int,), 'restricted_extensions': (str,), 'restricted_headers': (str,), 'rfi_score_threshold': (int,), 'session_fixation_score_threshold': (int,), 'sql_injection_score_threshold': (int,), 'total_arg_length': (int,), 'warning_anomaly_score': (int,), 'xss_score_threshold': (int,)}
_property
def discriminator():
return None
attribute_map = {'allowed_ 'allowed_ 'allowed_methods': 'allowed_methods', 'allowed_request_content_type': 'allowed_request_content_type', 'allowed_request_content_type_charset': 'allowed_request_content_type_charset', 'arg_name_length': 'arg_name_length', 'arg_length': 'arg_length', 'combined_file_sizes': 'combined_file_sizes', 'comment': 'comment', 'critical_anomaly_score': 'critical_anomaly_score', 'crs_validate_utf8_encoding': 'crs_validate_utf8_encoding', 'error_anomaly_score': 'error_anomaly_score', 'high_risk_country_codes': 'high_risk_country_codes', ' ' 'inbound_anomaly_score_threshold': 'inbound_anomaly_score_threshold', 'lfi_score_threshold': 'lfi_score_threshold', 'locked': 'locked', 'max_file_size': 'max_file_size', 'max_num_args': 'max_num_args', 'notice_anomaly_score': 'notice_anomaly_score', 'number': 'number', 'paranoia_level': 'paranoia_level', 'php_injection_score_threshold': 'php_injection_score_threshold', 'rce_score_threshold': 'rce_score_threshold', 'restricted_extensions': 'restricted_extensions', 'restricted_headers': 'restricted_headers', 'rfi_score_threshold': 'rfi_score_threshold', 'session_fixation_score_threshold': 'session_fixation_score_threshold', 'sql_injection_score_threshold': 'sql_injection_score_threshold', 'total_arg_length': 'total_arg_length', 'warning_anomaly_score': 'warning_anomaly_score', 'xss_score_threshold': 'xss_score_threshold'}
read_only_vars = {'number'}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class TestWorksheet(unittest.TestCase):
def setUp(self):
self.wb = Workbook()
def test_too_long_title(self):
long_title = 'This is a very long title indeed for a worksheet'
trunc_title = long_title[0:MAX_LEN_WORKSHEET_TITLE]
ws = self.wb.addSheet(long_title)
self.assertEqual(ws.title, trunc_title)
def test_add_tab_data(self):
ws = self.wb.addSheet('test sheet')
self.assertEqual(len(ws.data), 0)
self.assertEqual(ws.ncols, 0)
data = ['1\t2\t3', '4\t5\t6']
ws.addTabData(data)
self.assertEqual(len(ws.data), 2)
self.assertEqual(ws.ncols, 3)
for i in range(2):
self.assertEqual(data[i], ws.data[i])
def test_add_text(self):
ws = self.wb.addSheet('test sheet')
self.assertEqual(len(ws.data), 0)
self.assertEqual(ws.ncols, 0)
data = ['1\t2\t3', '4\t5\t6']
text = '\n'.join(data)
ws.addText(text)
self.assertEqual(len(ws.data), 2)
self.assertEqual(ws.ncols, 3)
for i in range(2):
self.assertEqual(data[i], ws.data[i])
def test_set_cell_value(self):
ws = self.wb.addSheet('test sheet')
self.assertEqual(len(ws.data), 0)
self.assertEqual(ws.ncols, 0)
data = ['1\t2\t3', '4\t5\t6']
ws.addTabData(data)
ws.setCellValue(0, 1, '7')
self.assertEqual(len(ws.data), 2)
self.assertEqual(ws.ncols, 3)
new_data = ['1\t7\t3', '4\t5\t6']
for i in range(2):
self.assertEqual(new_data[i], ws.data[i])
def test_set_cell_value_in_empty_sheet(self):
ws = self.wb.addSheet('test sheet')
self.assertEqual(len(ws.data), 0)
self.assertEqual(ws.ncols, 0)
ws.setCellValue(2, 1, '7')
self.assertEqual(len(ws.data), 3)
self.assertEqual(ws.ncols, 2)
new_data = ['', '', '\t7']
for i in range(1):
self.assertEqual(new_data[i], ws.data[i])
def test_get_column_id_from_index(self):
ws = self.wb.addSheet('test sheet')
self.assertEqual(ws.column_id_from_index(0), 'A')
self.assertEqual(ws.column_id_from_index(25), 'Z')
self.assertEqual(ws.column_id_from_index(26), 'AA')
self.assertEqual(ws.column_id_from_index(27), 'AB')
self.assertEqual(ws.column_id_from_index(32), 'AG') |
def match_attribute_docs(attr_name, attr_docs, attr_type_str, attr_default=NOTHING):
a_str = None
for a_doc in attr_docs:
match_re = re.search((('(?i)^' + attr_name) + '?:'), a_doc)
if match_re:
a_str = a_doc[match_re.end():].strip()
return {attr_name: {'type': attr_type_str, 'desc': (a_str if (a_str is not None) else ''), 'default': ((('(default: ' + repr(attr_default)) + ')') if (type(attr_default).__name__ != '_Nothing') else ''), 'len': {'name': len(attr_name), 'type': len(attr_type_str)}}} |
def process_cancel_build(build):
try:
builds_logic.BuildsLogic.cancel_build(flask.g.user, build)
except (InsufficientRightsException, ConflictingRequest) as e:
flask.flash(str(e), 'error')
else:
db.session.commit()
flask.flash('Build {} has been canceled successfully.'.format(build.id))
return flask.redirect(helpers.url_for_copr_builds(build.copr)) |
def test_synchronizer_check_prints_exception_with_function_raising_exception_and_catch_exceptions_to_true(sample_directory, capsys, output_filename):
def sync_function(trace_object):
raise scared.ResynchroError('Error.')
input_filename = f'{sample_directory}/synchronization/ets_file.ets'
ths = estraces.read_ths_from_ets_file(input_filename)[:10]
synchronizer = scared.Synchronizer(ths, output_filename, sync_function)
synchronizer.check(nb_traces=1, catch_exceptions=True)
captured = capsys.readouterr()
assert captured.out.startswith('Raised scared.synchronization.ResynchroError: Error. in sync_function line')
ths.close() |
class ImagePreviewCollection():
def __init__(self, max_size: tuple=(128, 128), lazy_load: bool=True):
self._collection = bpy.utils.previews.new()
self._max_size = max_size
self._lazy_load = lazy_load
if self._lazy_load:
self._pool = Pool(processes=cpu_count())
self._event = None
self._queue = Queue()
if (not bpy.app.timers.is_registered(self._timer)):
bpy.app.timers.register(self._timer, persistent=True)
def __len__(self) -> int:
return len(self._collection)
def __iter__(self) -> Iterator[str]:
return iter(self._collection)
def __contains__(self, key) -> bool:
return (key in self._collection)
def __getitem__(self, key) -> ImagePreview:
return self._collection[key]
def pop(self, key: str) -> ImagePreview:
return self._collection.pop(key)
def get(self, key: str, default=None) -> ImagePreview:
return self._collection.get(key, default)
def keys(self) -> KeysView[str]:
return self._collection.keys()
def values(self) -> ValuesView[ImagePreview]:
return self._collection.values()
def items(self) -> ItemsView[(str, ImagePreview)]:
return self._collection.items()
def new_safe(self, name: str) -> ImagePreview:
if (name in self):
return self[name]
return self.new(name)
def new(self, name: str) -> ImagePreview:
return self._collection.new(name)
def load_safe(self, name: str, filepath: str, filetype: str) -> ImagePreview:
if (name in self):
return self[name]
return self.load(name, filepath, filetype)
def load(self, name: str, filepath: str, filetype: str) -> ImagePreview:
if ((filetype != 'IMAGE') or (not can_load(filepath))):
return self._load_fallback(name, filepath, filetype)
if (not self._lazy_load):
return self._load_eager(name, filepath)
preview = self.new(name)
self._pool.apply_async(func=self._load_async, args=(name, filepath, self._get_event()), error_callback=print)
return preview
def _load_fallback(self, name: str, filepath: str, filetype: str) -> ImagePreview:
preview = self._collection.load(name, filepath, filetype)
if (not self._lazy_load):
preview.icon_size[:]
preview.image_size[:]
return preview
def _load_eager(self, name: str, filepath: str) -> ImagePreview:
data = load_file(filepath)
preview = self.new(name)
preview.icon_size = data['icon_size']
preview.icon_pixels = data['icon_pixels']
preview.image_size = data['image_size']
preview.image_pixels = data['image_pixels']
return preview
def _load_async(self, name: str, filepath: str, event: Event):
if (not event.is_set()):
data = load_file(filepath)
if (not event.is_set()):
self._queue.put((name, data, event))
def _timer(self):
now = time()
redraw = False
delay = 0.1
while ((time() - now) < 0.1):
try:
args = self._queue.get(block=False)
except:
break
try:
self._load_queued(*args)
except:
print_exc()
else:
redraw = True
else:
delay = 0.0
if redraw:
tag_redraw()
return delay
def _load_queued(self, name: str, data: dict, event: Event):
if (not event.is_set()):
if (name in self):
preview = self[name]
preview.icon_size = data['icon_size']
preview.icon_pixels = data['icon_pixels']
preview.image_size = data['image_size']
preview.image_pixels = data['image_pixels']
def clear(self):
if self._lazy_load:
self._set_event()
with self._queue.mutex:
self._queue.queue.clear()
self._collection.clear()
def close(self):
if self._lazy_load:
self._set_event()
if bpy.app.timers.is_registered(self._timer):
bpy.app.timers.unregister(self._timer)
self._collection.close()
def _get_event(self) -> Event:
if (self._event is None):
self._event = Event()
return self._event
def _set_event(self):
if (self._event is not None):
self._event.set()
self._event = None |
_filter('timestamp_diff')
def timestamp_diff(time_in, until=None):
if (time_in is None):
return 0
if (until is not None):
now = datetime.datetime.fromtimestamp(until)
else:
now = datetime.datetime.now()
diff = (now - datetime.datetime.fromtimestamp(time_in))
return int(diff.total_seconds()) |
class Character():
def __init__(self):
self.strength = self.ability()
self.dexterity = self.ability()
self.constitution = self.ability()
self.intelligence = self.ability()
self.wisdom = self.ability()
self.charisma = self.ability()
self.hitpoints = (10 + modifier(self.constitution))
def ability(self):
dice_rolls = sorted((random.randint(1, 6) for _ in range(4)))
return sum(dice_rolls[1:]) |
class PayPalPaymentsManager():
def configure_paypal():
settings = get_settings()
paypal_mode = settings.get('paypal_mode', ('live' if (settings['app_environment'] == Environment.PRODUCTION) else 'sandbox'))
paypal_key = None
if (paypal_mode == 'sandbox'):
paypal_key = 'paypal_sandbox'
elif (paypal_mode == 'live'):
paypal_key = 'paypal'
if (not paypal_key):
raise ConflictError({'pointer': ''}, "Paypal Mode must be 'live' or 'sandbox'")
paypal_client = settings.get(f'{paypal_key}_client', None)
paypal_secret = settings.get(f'{paypal_key}_secret', None)
if ((not paypal_client) or (not paypal_secret)):
raise ConflictError({'pointer': ''}, 'Payments through Paypal have not been configured on the platform')
return paypalrestsdk.configure({'mode': paypal_mode, 'client_id': paypal_client, 'client_secret': paypal_secret})
def create_payment(order, return_url, cancel_url, payee_email=None):
payee_email = (payee_email or order.event.paypal_email)
if (not payee_email):
raise ConflictError({'pointer': ''}, "Payments through Paypal hasn't been configured for the billing")
PayPalPaymentsManager.configure_paypal()
payment = paypalrestsdk.Payment({'intent': 'sale', 'payer': {'payment_method': 'paypal'}, 'redirect_urls': {'return_url': return_url, 'cancel_url': cancel_url}, 'transactions': [{'amount': {'total': float(round_money(order.amount)), 'currency': order.event.payment_currency}, 'payee': {'email': payee_email}}]})
if payment.create():
return (True, payment.id)
return (False, payment.error)
def verify_payment(payment_id, order):
PayPalPaymentsManager.configure_paypal()
try:
payment_server = paypalrestsdk.Payment.find(payment_id)
if (payment_server.state != 'approved'):
return (False, (('Payment has not been approved yet. Status is ' + payment_server.state) + '.'))
transaction = payment_server.transactions[0]
amount_server = transaction.amount.total
currency_server = transaction.amount.currency
sale_state = transaction.related_resources[0].sale.state
if (float(amount_server) != order.amount):
return (False, 'Payment amount does not match order')
if (currency_server != order.event.payment_currency):
return (False, 'Payment currency does not match order')
if (sale_state != 'completed'):
return (False, 'Sale not completed')
if PayPalPaymentsManager.used_payment(payment_id, order):
return (False, 'Payment already been verified')
return (True, None)
except paypalrestsdk.ResourceNotFound:
return (False, 'Payment Not Found')
def used_payment(payment_id, order):
if (Order.query.filter((Order.paypal_token == payment_id)).first() is None):
order.paypal_token = payment_id
save_to_db(order)
return False
return True
def execute_payment(paypal_payer_id, paypal_payment_id):
PayPalPaymentsManager.configure_paypal()
payment = paypalrestsdk.Payment.find(paypal_payment_id)
if payment.execute({'payer_id': paypal_payer_id}):
return (True, 'Successfully Executed')
return (False, payment.error) |
.django_db
def test_award_update_contract_executive_comp():
award = baker.make('search.AwardSearch', award_id=1, generated_unique_award_id='AWARD_CONT_IDV')
baker.make('search.TransactionSearch', transaction_id=13, is_fpds=True, award=award, action_date='2011-10-01', officer_1_name='Professor Plum', officer_1_amount=1, officer_2_name='Mrs. White', officer_2_amount=2, officer_3_name='Mrs. Peacock', officer_3_amount=3, officer_4_name='Mr. Green', officer_4_amount=4, officer_5_name='Colonel Mustard', officer_5_amount=5, generated_unique_award_id='AWARD_CONT_IDV')
baker.make('search.TransactionSearch', transaction_id=14, is_fpds=True, award=award, action_date='2012-10-01', officer_1_name='Jack Mustard', officer_1_amount=100, officer_2_name='Jacob Green', officer_2_amount=200, officer_3_name='Diane White', officer_3_amount=300, officer_4_name='Kasandra Scarlet', officer_4_amount=400, officer_5_name='Victor Plum', officer_5_amount=500, generated_unique_award_id='AWARD_CONT_IDV')
update_procurement_awards()
award.refresh_from_db()
assert (award.officer_1_name == 'Jack Mustard')
assert (award.officer_5_amount == 500)
baker.make('search.TransactionSearch', transaction_id=26, award=award, action_date='2013-10-01', generated_unique_award_id='AWARD_CONT_IDV')
update_procurement_awards()
award.refresh_from_db()
assert (award.officer_1_name == 'Jack Mustard')
assert (award.officer_5_amount == 500) |
def test_different_seed_alters_traversal() -> None:
t1 = generate_fully_connected_resources(5)
field(t1, 'dr_1', 'ds_1', 'f1').identity = 'email'
field(t1, 'dr_2', 'ds_2', 'f1').identity = 'user_id'
field(t1, 'dr_3', 'ds_3', 'f1').identity = 'ssn'
field(t1, 'dr_4', 'ds_4', 'f1').identity = 'email'
graph = DatasetGraph(*t1)
assert (len(Traversal(graph, {'email': '1'}).root_node.children) == 2)
assert (len(Traversal(graph, {'user_id': '1'}).root_node.children) == 1)
assert (len(Traversal(graph, {'ssn': '1'}).root_node.children) == 1)
assert (len(Traversal(graph, {'ssn': '1', 'email': '1'}).root_node.children) == 3)
assert (len(Traversal(graph, {'ssn': '1', 'email': 1, 'user_id': 1}).root_node.children) == 4) |
def downgrade():
op.execute('ALTER TABLE systems DROP CONSTRAINT systems_pkey CASCADE')
op.execute('CREATE SEQUENCE systems_id_seq')
op.alter_column(table_name='systems', column_name='id', existing_type=sa.String(255), type_=sa.Integer, nullable=False)
op.create_primary_key(table_name='systems', constraint_name='systems_pkey', columns=['id'])
op.execute('ALTER TABLE registries DROP CONSTRAINT registries_pkey CASCADE')
op.execute('CREATE SEQUENCE registries_id_seq')
op.alter_column(table_name='registries', column_name='id', existing_type=sa.String(255), type_=sa.Integer, nullable=False)
op.create_primary_key(table_name='registries', constraint_name='registries_pkey', columns=['id'])
op.execute('ALTER TABLE policies DROP CONSTRAINT policies_pkey CASCADE')
op.execute('CREATE SEQUENCE policies_id_seq')
op.alter_column(table_name='policies', column_name='id', existing_type=sa.String(255), type_=sa.Integer, nullable=False)
op.create_primary_key(table_name='policies', constraint_name='policies_pkey', columns=['id'])
op.execute('ALTER TABLE organizations DROP CONSTRAINT organizations_pkey CASCADE')
op.execute('CREATE SEQUENCE policies_id_seq')
op.alter_column(table_name='organizations', column_name='id', existing_type=sa.String(255), type_=sa.Integer, nullable=False)
op.create_primary_key(table_name='organizations', constraint_name='organizations_pkey', columns=['id'])
op.execute('ALTER TABLE evaluations DROP CONSTRAINT evaluations_pkey CASCADE')
op.execute('CREATE SEQUENCE policies_id_seq')
op.alter_column(table_name='evaluations', column_name='id', existing_type=sa.String(255), type_=sa.Integer, nullable=False)
op.create_primary_key(table_name='evaluations', constraint_name='evaluations_pkey', columns=['id'])
op.drop_column('evaluations', 'updated_at')
op.drop_column('evaluations', 'created_at')
op.execute('ALTER TABLE datasets DROP CONSTRAINT datasets_pkey CASCADE')
op.execute('CREATE SEQUENCE datasets_id_seq')
op.alter_column(table_name='datasets', column_name='id', existing_type=sa.String(255), type_=sa.Integer, nullable=False)
op.create_primary_key(table_name='datasets', constraint_name='datasets_pkey', columns=['id'])
op.execute('ALTER TABLE data_uses DROP CONSTRAINT data_uses_pkey CASCADE')
op.execute('CREATE SEQUENCE data_uses_id_seq')
op.alter_column(table_name='data_uses', column_name='id', existing_type=sa.String(255), type_=sa.Integer, nullable=False)
op.create_primary_key(table_name='data_uses', constraint_name='data_uses_pkey', columns=['id'])
op.execute('ALTER TABLE data_subjects DROP CONSTRAINT data_subjects_pkey CASCADE')
op.execute('CREATE SEQUENCE data_uses_id_seq')
op.alter_column(table_name='data_subjects', column_name='id', existing_type=sa.String(255), type_=sa.Integer, nullable=False)
op.create_primary_key(table_name='data_subjects', constraint_name='data_subjects_pkey', columns=['id'])
op.execute('ALTER TABLE data_qualifiers DROP CONSTRAINT data_qualifiers_pkey CASCADE')
op.execute('CREATE SEQUENCE data_uses_id_seq')
op.alter_column(table_name='data_qualifiers', column_name='id', existing_type=sa.String(255), type_=sa.Integer, nullable=False)
op.create_primary_key(table_name='data_qualifiers', constraint_name='data_qualifiers_pkey', columns=['id'])
op.execute('ALTER TABLE data_categories DROP CONSTRAINT data_categories_pkey CASCADE')
op.execute('CREATE SEQUENCE data_categories_id_seq')
op.alter_column(table_name='data_categories', column_name='id', existing_type=sa.String(255), type_=sa.Integer, nullable=False)
op.create_primary_key(table_name='data_categories', constraint_name='data_categories_pkey', columns=['id'])
op.drop_index(op.f('ix_fidesuserpermissions_id'), table_name='fidesuserpermissions')
op.drop_table('fidesuserpermissions')
op.drop_index(op.f('ix_client_id'), table_name='client')
op.drop_index(op.f('ix_client_fides_key'), table_name='client')
op.drop_table('client')
op.drop_index(op.f('ix_fidesuser_username'), table_name='fidesuser')
op.drop_index(op.f('ix_fidesuser_id'), table_name='fidesuser')
op.drop_table('fidesuser')
op.drop_index(op.f('ix_auditlog_user_id'), table_name='auditlog')
op.drop_index(op.f('ix_auditlog_privacy_request_id'), table_name='auditlog')
op.drop_index(op.f('ix_auditlog_id'), table_name='auditlog')
op.drop_index(op.f('ix_auditlog_action'), table_name='auditlog')
op.drop_table('auditlog') |
_view(['GET'])
def spending_by_org(request, format=None, org_type=None):
codes = utils.param_to_list(request.query_params.get('code', []))
codes = utils.get_bnf_codes_from_number_str(codes)
org_ids = utils.param_to_list(request.query_params.get('org', []))
org_type = request.query_params.get('org_type', org_type)
date = request.query_params.get('date', None)
if (org_type == 'CCG'):
org_type = 'ccg'
if (org_type == 'practice'):
org_ids = utils.get_practice_ids_from_org(org_ids)
if ((not date) and (not org_ids)):
return Response('Error: You must supply either a list of practice IDs or a date parameter, e.g. date=2015-04-01', status=400)
if (org_type == 'pcn'):
extra_ids = Practice.objects.filter(ccg_id__in=org_ids).values_list('pcn', flat=True)
org_ids = set(org_ids).union(extra_ids)
if (org_type == 'practice'):
orgs = Practice.objects.all()
elif (org_type == 'ccg'):
orgs = PCT.objects.filter(org_type='CCG')
elif (org_type == 'pcn'):
orgs = PCN.objects.all()
elif (org_type == 'stp'):
orgs = STP.objects.all()
elif (org_type == 'regional_team'):
orgs = RegionalTeam.objects.all()
else:
return Response('Error: unrecognised org_type parameter', status=400)
if org_ids:
orgs = orgs.filter(code__in=org_ids)
orgs = orgs.order_by('code')
if (org_type != 'practice'):
orgs = orgs.only('code', 'name')
data = list(_get_prescribing_entries(codes, orgs, org_type, date=date))
response = Response(data)
if (request.accepted_renderer.format == 'csv'):
filename = 'spending-by-{}-{}.csv'.format(org_type, '-'.join(codes))
response['content-disposition'] = 'attachment; filename={}'.format(filename)
return response |
class group_modify(group_mod):
version = 4
type = 15
command = 1
def __init__(self, xid=None, group_type=None, group_id=None, buckets=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (group_type != None):
self.group_type = group_type
else:
self.group_type = 0
if (group_id != None):
self.group_id = group_id
else:
self.group_id = 0
if (buckets != None):
self.buckets = buckets
else:
self.buckets = []
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.command))
packed.append(struct.pack('!B', self.group_type))
packed.append(('\x00' * 1))
packed.append(struct.pack('!L', self.group_id))
packed.append(loxi.generic_util.pack_list(self.buckets))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = group_modify()
_version = reader.read('!B')[0]
assert (_version == 4)
_type = reader.read('!B')[0]
assert (_type == 15)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_command = reader.read('!H')[0]
assert (_command == 1)
obj.group_type = reader.read('!B')[0]
reader.skip(1)
obj.group_id = reader.read('!L')[0]
obj.buckets = loxi.generic_util.unpack_list(reader, ofp.common.bucket.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.group_type != other.group_type):
return False
if (self.group_id != other.group_id):
return False
if (self.buckets != other.buckets):
return False
return True
def pretty_print(self, q):
q.text('group_modify {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('group_type = ')
value_name_map = {0: 'OFPGT_ALL', 1: 'OFPGT_SELECT', 2: 'OFPGT_INDIRECT', 3: 'OFPGT_FF'}
if (self.group_type in value_name_map):
q.text(('%s(%d)' % (value_name_map[self.group_type], self.group_type)))
else:
q.text(('%#x' % self.group_type))
q.text(',')
q.breakable()
q.text('group_id = ')
q.text(('%#x' % self.group_id))
q.text(',')
q.breakable()
q.text('buckets = ')
q.pp(self.buckets)
q.breakable()
q.text('}') |
class ApplicationWindow(Gtk.ApplicationWindow, ApplicationDialogs):
def __init__(self, application):
Gtk.ApplicationWindow.__init__(self, application=application, default_width=application.userPrefs.getint('main_window', 'width'), default_height=application.userPrefs.getint('main_window', 'height'), name='main_window', show_menubar=True)
self.application = application
self.quit_when_done = False
self.save_filename = None
self.compress_saves = True
self.use_preview = True
self.normal_display_size = None
self.normal_window_size = None
self.saveas_fs = None
self.saveimage_fs = None
self.hires_image_fs = None
self.open_fs = None
self.four_d_sensitives = []
self.f = gtkfractal.T(application.compiler, self)
self.f.freeze()
self.filename = application_widgets.FractalFilename(self.f)
self.preview = gtkfractal.Preview(application.compiler, TOOLITEM_SIZE, TOOLITEM_SIZE)
self.preview.widget.set_tooltip_text(_('Preview'))
theme_provider = Gtk.CssProvider()
theme_provider.load_from_resource('/io/github/fract4d/gnofract4d.css')
Gtk.StyleContext.add_provider_for_display(self.get_display(), theme_provider, Gtk.STYLE_PROVIDER_PRIORITY_APPLICATION)
self.vbox = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
self.set_child(self.vbox)
self.create_toolbar()
panes = Gtk.Paned(vexpand=True, resize_end_child=False, shrink_end_child=False)
self.vbox.append(panes)
self.statusbar = Gtk.ProgressBar(show_text=True)
self.vbox.append(self.statusbar)
try:
self.f.set_cmap(fractconfig.T.find_resource('basic.map', 'maps'))
except Exception:
pass
self.fractalWindow = application_widgets.FractalWindow(self.f, application.compiler)
panes.set_start_child(self.fractalWindow)
self.settingsPane = settings.SettingsPane(self, self.f)
panes.set_end_child(self.settingsPane)
def add_fourway(self, name, tip, axis, is4dsensitive):
my_fourway = fourway.T(name, tip, axis, TOOLITEM_SIZE)
self.toolbar.append(my_fourway)
my_fourway.connect('value-slightly-changed', self.on_drag_fourway)
my_fourway.connect('value-changed', self.on_release_fourway)
if is4dsensitive:
self.four_d_sensitives.append(my_fourway)
def add_warpmenu(self, tip):
self.warpmenu = utils.combo_box_text_with_items(['None'], tip)
self.warpmenu.set_id_column(0)
def update_warp_param(menu, f):
param = menu.get_active_id()
if (param == 'None'):
param = None
f.set_warp_param(param)
self.on_formula_change(f)
self.f.connect('formula-changed', self.populate_warpmenu)
self.warpmenu.connect('changed', update_warp_param, self.f)
self.toolbar.append(self.warpmenu)
def add_angle(self, name, tip, axis, is4dsensitive):
my_angle = angle.T(name, tip, axis, round((TOOLITEM_SIZE * 0.75)))
my_angle.connect('value-slightly-changed', self.on_angle_slightly_changed)
my_angle.connect('value-changed', self.on_angle_changed)
self.f.connect('parameters-changed', self.update_angle_widget, my_angle)
self.toolbar.append(my_angle)
if is4dsensitive:
self.four_d_sensitives.append(my_angle)
def create_resolution_menu(self):
self.resolutions = [(320, 240), (640, 480), (800, 600), (1024, 768), (1280, 800), (1280, 960), (1280, 1024), (1400, 1050), (1440, 900), (1600, 1200), (1680, 1050), (1920, 1200), (2560, 1600), (3840, 2160)]
res_names = [('%dx%d' % (w, h)) for (w, h) in self.resolutions]
res_menu = utils.dropdown_with_items(res_names, _('Resolution'))
def set_selected_resolution(prefs):
res = (w, h) = (prefs.getint('display', 'width'), prefs.getint('display', 'height'))
try:
index = self.resolutions.index(res)
except ValueError:
self.resolutions.append(res)
item = ('%dx%d' % (w, h))
res_menu.get_model().append(item)
index = (len(self.resolutions) - 1)
res_menu.set_selected(int(index))
def set_resolution(*args):
index = res_menu.get_selected()
if (index != (- 1)):
(w, h) = self.resolutions[index]
self.userPrefs.set_size(w, h)
self.update_subfracts()
self.set_focus()
set_selected_resolution(self.application.userPrefs)
res_menu.connect('notify::selected-item', set_resolution)
self.application.userPrefs.connect('preferences-changed', set_selected_resolution)
return res_menu
def create_toolbar(self):
self.toolbar = application_widgets.Toolbar()
self.vbox.append(self.toolbar)
self.toolbar.add_space()
self.toolbar.append(self.preview.widget)
self.toolbar.add_space()
self.add_angle(_('xy'), _('Angle in the XY plane'), fractal.T.XYANGLE, False)
self.add_angle(_('xz'), _('Angle in the XZ plane'), fractal.T.XZANGLE, True)
self.add_angle(_('xw'), _('Angle in the XW plane'), fractal.T.XWANGLE, True)
self.add_angle(_('yz'), _('Angle in the YZ plane'), fractal.T.YZANGLE, True)
self.add_angle(_('yw'), _('Angle in the YW plane'), fractal.T.YWANGLE, True)
self.add_angle(_('zw'), _('Angle in the ZW plane'), fractal.T.ZWANGLE, True)
self.toolbar.add_space()
self.add_fourway(_('pan'), _('Pan around the image'), 0, False)
self.add_fourway(_('warp'), _('Mutate the image by moving along the other 2 axes'), 2, True)
self.add_warpmenu(_('Which parameter is being warped'))
self.toolbar.add_space()
self.toolbar.add_button('improve_now', _('Double the maximum number of iterations and tighten periodicity. This will fill in some black areas but increase drawing time'), 'app.ImproveNow')
res_menu = self.create_resolution_menu()
self.toolbar.append(res_menu)
self.toolbar.add_space()
self.toolbar.add_button('edit-undo', _('Undo the last change'), 'app.EditUndoAction')
self.toolbar.add_button('edit-redo', _('Redo the last undone change'), 'app.EditRedoAction')
self.toolbar.add_space()
self.explorer_toolbar_button = self.toolbar.add_toggle('explorer_mode', _('Toggle Explorer Mode'), 'app.ToolsExplorerAction')
self.weirdbox = Gtk.Grid(name='weirdbox', tooltip_text=_('Weirdness'), column_homogeneous=False, row_spacing=5)
self.weirdness_adjustment = Gtk.Adjustment.new(20.0, 0.0, 100.0, 5.0, 5.0, 0.0)
weirdness = Gtk.Scale(adjustment=self.weirdness_adjustment, width_request=120, value_pos=Gtk.PositionType.RIGHT)
shape_label = Gtk.Label(label=_('Shape:'), xalign=0.0)
self.weirdbox.attach(shape_label, 0, 0, 1, 1)
self.weirdbox.attach(weirdness, 1, 0, 1, 1)
self.color_weirdness_adjustment = Gtk.Adjustment.new(20.0, 0.0, 100.0, 5.0, 5.0, 0.0)
color_weirdness = Gtk.Scale(adjustment=self.color_weirdness_adjustment, value_pos=Gtk.PositionType.RIGHT)
color_label = Gtk.Label(label=_('Color:'), xalign=0.0)
self.weirdbox.attach(color_label, 0, 1, 1, 1)
self.weirdbox.attach(color_weirdness, 1, 1, 1, 1)
self.toolbar.append(self.weirdbox)
def on_weirdness_changed(adjustment):
self.update_subfracts()
self.weirdness_adjustment.connect('value-changed', on_weirdness_changed)
self.color_weirdness_adjustment.connect('value-changed', on_weirdness_changed) |
.gui()
def test_window_esc_key_pressed(qtbot, temp_settings):
image = QtGui.QImage(600, 400, QtGui.QImage.Format.Format_RGB32)
screen = models.Screen(device_pixel_ratio=1.0, left=0, top=0, right=600, bottom=400, index=0, screenshot=image)
win = window.Window(screen=screen, settings=temp_settings, parent=None)
qtbot.add_widget(win)
with qtbot.waitSignal(win.com.on_esc_key_pressed, timeout=1000):
qtbot.keyPress(win, QtCore.Qt.Key.Key_Escape) |
class OptionSeriesPieDataDatalabelsFilter(Options):
def operator(self):
return self._config_get(None)
def operator(self, value: Any):
self._config(value, js_type=False)
def property(self):
return self._config_get(None)
def property(self, text: str):
self._config(text, js_type=False) |
def __start_flet_server(host, port, upload_dir, web_renderer: Optional[WebRenderer], use_color_emoji, route_url_strategy):
server_ip = ('127.0.0.1' if (host in [None, '', '*']) else host)
if (port == 0):
port = get_free_tcp_port()
logger.info(f'Starting local Flet Server on port {port}...')
fletd_exe = ('fletd.exe' if is_windows() else 'fletd')
fletd_path = os.path.join(get_package_bin_dir(), fletd_exe)
if os.path.exists(fletd_path):
logger.info(f'Flet Server found in: {fletd_path}')
else:
fletd_path = which(fletd_exe)
if (not fletd_path):
fletd_path = __download_fletd()
else:
logger.info('Flet Server found in PATH')
fletd_env = {**os.environ}
if upload_dir:
if (not Path(upload_dir).is_absolute()):
upload_dir = str(Path(get_current_script_dir()).joinpath(upload_dir).resolve())
logger.info(f'Upload path configured: {upload_dir}')
fletd_env['FLET_UPLOAD_ROOT_DIR'] = upload_dir
if (host not in [None, '']):
logger.info(f'Host binding configured: {host}')
fletd_env['FLET_SERVER_IP'] = (host if (host != '*') else '')
if (host != '127.0.0.1'):
fletd_env['FLET_ALLOW_REMOTE_HOST_CLIENTS'] = 'true'
if (web_renderer and (web_renderer not in [WebRenderer.AUTO])):
logger.info(f'Web renderer configured: {web_renderer.value}')
fletd_env['FLET_WEB_RENDERER'] = web_renderer.value
logger.info(f'Use color emoji: {use_color_emoji}')
fletd_env['FLET_USE_COLOR_EMOJI'] = str(use_color_emoji).lower()
if (route_url_strategy is not None):
logger.info(f'Route URL strategy configured: {route_url_strategy}')
fletd_env['FLET_ROUTE_URL_STRATEGY'] = route_url_strategy
web_root_dir = get_package_web_dir()
if (not os.path.exists(web_root_dir)):
raise Exception(f'Web root path not found: {web_root_dir}')
args = [fletd_path, '--content-dir', web_root_dir, '--port', str(port)]
creationflags = 0
start_new_session = False
if (os.getenv('FLET_DETACH_FLETD') is None):
args.append('--attached')
elif is_windows():
creationflags = subprocess.CREATE_NEW_PROCESS_GROUP
else:
start_new_session = True
log_level = logging.getLogger(flet_runtime.__name__).getEffectiveLevel()
if (log_level == logging.CRITICAL):
log_level = logging.FATAL
if (log_level != logging.NOTSET):
log_level_name = logging.getLevelName(log_level).lower()
args.extend(['--log-level', log_level_name])
startupinfo = None
if is_windows():
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
subprocess.Popen(args, env=fletd_env, creationflags=creationflags, start_new_session=start_new_session, stdout=(subprocess.DEVNULL if (log_level >= logging.WARNING) else None), stderr=(subprocess.DEVNULL if (log_level >= logging.WARNING) else None), startupinfo=startupinfo)
return f' |
class EventManager(models.Manager):
def upcoming(self, upto=None, current_user=None, location=None):
today = timezone.now()
logger.debug(today)
qs = super().get_queryset()
upcoming = qs.filter(end__gte=today).exclude(status=Event.CANCELED).order_by('start')
if location:
upcoming = upcoming.filter(location=location)
viewable_upcoming = []
for event in upcoming:
if event.is_viewable(current_user):
viewable_upcoming.append(event)
if (upto and (len(viewable_upcoming) == upto)):
break
logger.debug(viewable_upcoming)
return viewable_upcoming
class Meta():
app_label = 'gather' |
class VideoUploadTransferRequestManager(VideoUploadRequestManager):
def send_request(self, context):
request = VideoUploadRequest(self._api)
self._start_offset = context.start_offset
self._end_offset = context.end_offset
filepath = context.file_path
file_size = os.path.getsize(filepath)
retry = max((file_size / ((1024 * 1024) * 10)), 2)
f = open(filepath, 'rb')
while (self._start_offset != self._end_offset):
f.seek(self._start_offset)
chunk = f.read((self._end_offset - self._start_offset))
context.start_offset = self._start_offset
context.end_offset = self._end_offset
request.setParams(self.getParamsFromContext(context), {'video_file_chunk': (os.path.basename(context.file_path), chunk, 'multipart/form-data')})
try:
response = request.send((context.account_id, 'advideos')).json()
self._start_offset = int(response['start_offset'])
self._end_offset = int(response['end_offset'])
except FacebookRequestError as e:
subcode = e.api_error_subcode()
body = e.body()
if (subcode == 1363037):
if (body and ('error' in body) and ('error_data' in body['error']) and ('start_offset' in body['error']['error_data']) and (retry > 0)):
self._start_offset = int(body['error']['error_data']['start_offset'])
self._end_offset = int(body['error']['error_data']['end_offset'])
retry = max((retry - 1), 0)
continue
elif (('error' in body) and ('is_transient' in body['error'])):
if body['error']['is_transient']:
time.sleep(1)
continue
f.close()
raise e
f.close()
return response
def getParamsFromContext(self, context):
return {'upload_phase': 'transfer', 'start_offset': context.start_offset, 'upload_session_id': context.session_id} |
.parametrize('name, expected', [('parameter1', 1.0), ('parameter2', 2.0)])
def test_that_correct_named_parameter_is_fetched_when_name_comes_last(name, expected):
content = dedent('roff-asc\n #ROFF file#\n #Creator: Ert#\n tag dimensions\n int nX 1\n int nY 1\n int nZ 1\n endtag\n tag parameter\n array float data 1\n 1.0\n char name "parameter1"\n endtag\n tag parameter\n array float data 1\n 2.0\n char name "parameter2"\n endtag\n tag eof\n endtag\n ')
values = import_roff(StringIO(content), name)
assert (values[(0, 0, 0)] == expected) |
def test_multiplexer_transform_wildcard(dash_duo):
def make_callback(i):
(Output({'type': 'div', 'id': ALL}, 'children'), Input({'type': f'button{i}', 'id': ALL}, 'n_clicks'))
def func(n):
return ([f'Hello from group {i}'] * len(n))
def make_block(i):
layout = [html.Button(f'Button 0 in group {i}', id={'type': f'button{i}', 'id': 'x'}), html.Button(f'Button 1 in group {i}', id={'type': f'button{i}', 'id': 'y'}), html.Div(f'Div {i}', id={'type': f'div', 'id': i})]
make_callback(i)
return layout
app = DashProxy(transforms=[MultiplexerTransform()], prevent_initial_callbacks=True)
app.layout = html.Div((make_block('0') + make_block('1')))
dash_duo.start_server(app)
dash_duo.find_element(_cssid(id='x', type='button0')).click()
assert (dash_duo.find_element(_cssid(id='0', type='div')).text == 'Hello from group 0')
dash_duo.find_element(_cssid(id='y', type='button1')).click()
assert (dash_duo.find_element(_cssid(id='1', type='div')).text == 'Hello from group 1') |
class FunctionSpaceData(object):
__slots__ = ('real_tensorproduct', 'map_cache', 'entity_node_lists', 'node_set', 'cell_boundary_masks', 'interior_facet_boundary_masks', 'offset', 'offset_quotient', 'extruded', 'mesh', 'global_numbering')
.EventDecorator()
def __init__(self, mesh, ufl_element):
if (type(ufl_element) is finat.ufl.MixedElement):
raise ValueError("Can't create FunctionSpace for MixedElement")
finat_element = create_element(ufl_element)
real_tensorproduct = eutils.is_real_tensor_product_element(finat_element)
entity_dofs = finat_element.entity_dofs()
nodes_per_entity = tuple(mesh.make_dofs_per_plex_entity(entity_dofs))
try:
entity_permutations = finat_element.entity_permutations
except NotImplementedError:
entity_permutations = None
key = (nodes_per_entity, real_tensorproduct)
global_numbering = get_global_numbering(mesh, key)
node_set = get_node_set(mesh, key)
edofs_key = entity_dofs_key(entity_dofs)
eperm_key = (entity_permutations_key(entity_permutations) if entity_permutations else None)
self.real_tensorproduct = real_tensorproduct
self.map_cache = get_map_cache(mesh, (edofs_key, real_tensorproduct, eperm_key))
if isinstance(mesh, mesh_mod.ExtrudedMeshTopology):
self.offset = eutils.calculate_dof_offset(finat_element)
else:
self.offset = None
if (isinstance(mesh, mesh_mod.ExtrudedMeshTopology) and mesh.extruded_periodic):
self.offset_quotient = eutils.calculate_dof_offset_quotient(finat_element)
else:
self.offset_quotient = None
self.entity_node_lists = get_entity_node_lists(mesh, (edofs_key, real_tensorproduct, eperm_key), entity_dofs, entity_permutations, global_numbering, self.offset)
self.node_set = node_set
self.cell_boundary_masks = get_boundary_masks(mesh, (edofs_key, 'cell'), finat_element)
self.interior_facet_boundary_masks = get_boundary_masks(mesh, (edofs_key, 'interior_facet'), finat_element)
self.extruded = mesh.cell_set._extruded
self.mesh = mesh
self.global_numbering = global_numbering
def __eq__(self, other):
if (type(self) is not type(other)):
return False
return all(((getattr(self, s) is getattr(other, s)) for s in FunctionSpaceData.__slots__))
def __ne__(self, other):
return (not self.__eq__(other))
def __repr__(self):
return ('FunctionSpaceData(%r, %r)' % (self.mesh, self.node_set))
def __str__(self):
return ('FunctionSpaceData(%s, %s)' % (self.mesh, self.node_set))
.EventDecorator()
def boundary_nodes(self, V, sub_domain):
if (sub_domain in ['bottom', 'top']):
if (not V.extruded):
raise ValueError("Invalid subdomain '%s' for non-extruded mesh", sub_domain)
entity_dofs = eutils.flat_entity_dofs(V.finat_element.entity_dofs())
key = (entity_dofs_key(entity_dofs), sub_domain)
return get_top_bottom_boundary_nodes(V.mesh(), key, V)
else:
if (sub_domain == 'on_boundary'):
sdkey = sub_domain
else:
sdkey = as_tuple(sub_domain)
key = (entity_dofs_key(V.finat_element.entity_dofs()), sdkey)
return get_facet_closure_nodes(V.mesh(), key, V)
.EventDecorator()
def get_map(self, V, entity_set, map_arity, name, offset, offset_quotient):
assert (len(V) == 1), 'get_map should not be called on MixedFunctionSpace'
entity_node_list = self.entity_node_lists[entity_set]
val = self.map_cache[entity_set]
if (val is None):
val = op2.Map(entity_set, self.node_set, map_arity, entity_node_list, (('%s_' + name) % V.name), offset=offset, offset_quotient=offset_quotient)
self.map_cache[entity_set] = val
return val |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.