code stringlengths 281 23.7M |
|---|
class _Menu(QtGui.QMenu):
def __init__(self, manager, parent, controller):
QtGui.QMenu.__init__(self, parent)
self._parent = parent
self._manager = manager
self._controller = controller
self.menu_items = []
self.refresh()
self._manager.observe(self.refresh, 'changed')
self._manager.observe(self._on_enabled_changed, 'enabled')
self._manager.observe(self._on_visible_changed, 'visible')
self._manager.observe(self._on_name_changed, 'name')
self._manager.observe(self._on_image_changed, 'action:image')
self.setEnabled(self._manager.enabled)
self.menuAction().setVisible(self._manager.visible)
return
def dispose(self):
self._manager.observe(self.refresh, 'changed', remove=True)
self._manager.observe(self._on_enabled_changed, 'enabled', remove=True)
self._manager.observe(self._on_visible_changed, 'visible', remove=True)
self._manager.observe(self._on_name_changed, 'name', remove=True)
self._manager.observe(self._on_image_changed, 'action:image', remove=True)
self.clear()
def clear(self):
for item in self.menu_items:
item.dispose()
self.menu_items = []
super().clear()
def is_empty(self):
return self.isEmpty()
def refresh(self, event=None):
self.clear()
manager = self._manager
parent = self._parent
previous_non_empty_group = None
for group in manager.groups:
previous_non_empty_group = self._add_group(parent, group, previous_non_empty_group)
self.setEnabled(manager.enabled)
def show(self, x=None, y=None):
if ((x is None) or (y is None)):
point = QtGui.QCursor.pos()
else:
point = QtCore.QPoint(x, y)
self.popup(point)
def _on_enabled_changed(self, event):
self.setEnabled(event.new)
def _on_visible_changed(self, event):
self.menuAction().setVisible(event.new)
def _on_name_changed(self, event):
self.menuAction().setText(event.new)
def _on_image_changed(self, event):
self.menuAction().setIcon(event.new.create_icon())
def _add_group(self, parent, group, previous_non_empty_group=None):
if (len(group.items) > 0):
if ((previous_non_empty_group is not None) and group.separator):
self.addSeparator()
for item in group.items:
if isinstance(item, Group):
if (len(item.items) > 0):
self._add_group(parent, item, previous_non_empty_group)
if ((previous_non_empty_group is not None) and previous_non_empty_group.separator and item.separator):
self.addSeparator()
previous_non_empty_group = item
else:
item.add_to_menu(parent, self, self._controller)
previous_non_empty_group = group
return previous_non_empty_group |
def _get_kwargs(*, client: Client, multipart_data: BodyUploadFile) -> Dict[(str, Any)]:
url = '{}/storage/upload'.format(client.base_url)
headers: Dict[(str, str)] = client.get_headers()
cookies: Dict[(str, Any)] = client.get_cookies()
multipart_multipart_data = multipart_data.to_multipart()
return {'method': 'post', 'url': url, 'headers': headers, 'cookies': cookies, 'timeout': client.get_timeout(), 'follow_redirects': client.follow_redirects, 'files': multipart_multipart_data} |
def lambda_handler(event, context):
cognito_id = event['requestContext']['authorizer']['claims']['sub']
print('user id', cognito_id)
user = user_service.get_single_user(cognito_id)
subscribed_lists = []
for subscription in user.subscriptions:
if (subscription.status == 'subscribed'):
subscribed_lists.append(subscription)
user.subscriptions = subscribed_lists
print('subscribed ', user)
return {'statusCode': 200, 'headers': {'Access-Control-Allow-Methods': 'GET,OPTIONS', 'Access-Control-Allow-Origin': '*'}, 'body': json.dumps(asdict(user))} |
def b2i(binaryStringIn):
if (len(binaryStringIn) != 64):
print(('ERROR: Passed string not 64 characters. String length = %s' % len(binaryStringIn)))
print(("ERROR: String value '%s'" % binaryStringIn))
raise ValueError('Input strings must be 64 chars long!')
val = Bits(bin=binaryStringIn)
return val.int |
class OptionSeriesErrorbarSonificationDefaultspeechoptionsMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_unnamed_typing_tuple():
def z(a: int, b: str) -> typing.Tuple[(int, str)]:
return (5, 'hello world')
result = transform_variable_map(extract_return_annotation(typing.get_type_hints(z).get('return', None)))
assert (result['o0'].type.simple == 1)
assert (result['o1'].type.simple == 3) |
class OptionSeriesFunnel3dSonificationDefaultinstrumentoptionsMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def parse_info(wininfo_name, egginfo_name):
egginfo = None
if egginfo_name:
egginfo = egg_info_re.search(egginfo_name)
if (not egginfo):
raise ValueError(('Egg info filename %s is not valid' % (egginfo_name,)))
(w_name, sep, rest) = wininfo_name.partition('-')
if (not sep):
raise ValueError(('Installer filename %s is not valid' % (wininfo_name,)))
rest = rest[:(- 4)]
(rest2, sep, w_pyver) = rest.rpartition('-')
if (sep and w_pyver.startswith('py')):
rest = rest2
w_pyver = w_pyver.replace('.', '')
else:
w_pyver = 'py2.py3'
(w_ver, sep, w_arch) = rest.rpartition('.')
if (not sep):
raise ValueError(('Installer filename %s is not valid' % (wininfo_name,)))
if egginfo:
w_name = egginfo.group('name')
w_ver = egginfo.group('ver')
return {'name': w_name, 'ver': w_ver, 'arch': w_arch, 'pyver': w_pyver} |
class Luhn():
def __init__(self, card_num):
self.card_num = card_num
self.checksum = (- 1)
digits = card_num.replace(' ', '')
length = len(digits)
if (digits.isdigit() and (length > 1)):
self.checksum = 0
cadence = (length % 2)
for (idx, digit) in enumerate(digits):
num = int(digit)
if ((idx % 2) == cadence):
num *= 2
if (num > 9):
num -= 9
self.checksum += num
def valid(self):
return ((self.checksum % 10) == 0) |
class ListEventTestCase(unittest.TestCase):
def test_initialization(self):
foo = MyClass()
self.assertEqual(foo.l, [1, 2, 3])
self.assertEqual(len(foo.l_events), 0)
def test_append(self):
foo = MyClass()
foo.l.append(4)
self.assertEqual(foo.l, [1, 2, 3, 4])
self.assertEqual(len(foo.l_events), 1)
event = foo.l_events[0]
self.assertEqual(event.added, [4])
self.assertEqual(event.removed, [])
self.assertEqual(event.index, 3)
def test_extend(self):
foo = MyClass()
foo.l.extend([4, 5, 6])
self.assertEqual(foo.l, [1, 2, 3, 4, 5, 6])
self.assertEqual(len(foo.l_events), 1)
event = foo.l_events[0]
self.assertEqual(event.added, [4, 5, 6])
self.assertEqual(event.removed, [])
self.assertEqual(event.index, 3)
def test_extend_via_inplace_addition(self):
foo = MyClass()
foo.l += [4, 5, 6]
self.assertEqual(foo.l, [1, 2, 3, 4, 5, 6])
self.assertEqual(len(foo.l_events), 1)
event = foo.l_events[0]
self.assertEqual(event.added, [4, 5, 6])
self.assertEqual(event.removed, [])
self.assertEqual(event.index, 3)
def test_insert(self):
foo = MyClass()
foo.l.insert(1, 99)
self.assertEqual(foo.l, [1, 99, 2, 3])
self.assertEqual(len(foo.l_events), 1)
event = foo.l_events[0]
self.assertEqual(event.added, [99])
self.assertEqual(event.removed, [])
self.assertEqual(event.index, 1)
def test_insert_with_negative_argument(self):
foo = MyClass()
foo.l.insert((- 1), 99)
self.assertEqual(foo.l, [1, 2, 99, 3])
self.assertEqual(len(foo.l_events), 1)
event = foo.l_events[0]
self.assertEqual(event.added, [99])
self.assertEqual(event.removed, [])
self.assertEqual(event.index, 2)
def test_insert_index_invariants(self):
for index in range((- 10), 10):
foo = MyClass()
foo.l.insert(index, 1729)
self.assertEqual(len(foo.l_events), 1)
event = foo.l_events[0]
self.assertEqual(event.added, [1729])
self.assertEqual(event.removed, [])
self.assertGreaterEqual(event.index, 0)
self.assertEqual(foo.l[event.index], 1729)
def test_pop_with_no_argument(self):
foo = MyClass()
item = foo.l.pop()
self.assertEqual(item, 3)
self.assertEqual(foo.l, [1, 2])
self.assertEqual(len(foo.l_events), 1)
event = foo.l_events[0]
self.assertEqual(event.added, [])
self.assertEqual(event.removed, [3])
self.assertEqual(event.index, 2)
def test_pop(self):
foo = MyClass()
item = foo.l.pop(0)
self.assertEqual(item, 1)
self.assertEqual(foo.l, [2, 3])
self.assertEqual(len(foo.l_events), 1)
event = foo.l_events[0]
self.assertEqual(event.added, [])
self.assertEqual(event.removed, [1])
self.assertEqual(event.index, 0)
def test_pop_with_negative_argument(self):
foo = MyClass()
item = foo.l.pop((- 2))
self.assertEqual(item, 2)
self.assertEqual(foo.l, [1, 3])
self.assertEqual(len(foo.l_events), 1)
event = foo.l_events[0]
self.assertEqual(event.added, [])
self.assertEqual(event.removed, [2])
self.assertEqual(event.index, 1)
def test_pop_out_of_range(self):
foo = MyClass()
with self.assertRaises(IndexError):
foo.l.pop((- 4))
with self.assertRaises(IndexError):
foo.l.pop(3)
self.assertEqual(foo.l, [1, 2, 3])
self.assertEqual(len(foo.l_events), 0)
def test_remove(self):
foo = MyClass()
foo.l.remove(2)
self.assertEqual(foo.l, [1, 3])
self.assertEqual(len(foo.l_events), 1)
event = foo.l_events[0]
self.assertEqual(event.added, [])
self.assertEqual(event.removed, [2])
self.assertEqual(event.index, 1)
def test_remove_item_not_present(self):
foo = MyClass()
with self.assertRaises(ValueError):
foo.l.remove(1729)
self.assertEqual(foo.l, [1, 2, 3])
self.assertEqual(len(foo.l_events), 0)
def test_inplace_multiply(self):
foo = MyClass()
foo.l *= 2
self.assertEqual(foo.l, [1, 2, 3, 1, 2, 3])
self.assertEqual(len(foo.l_events), 1)
event = foo.l_events[0]
self.assertEqual(event.added, [1, 2, 3])
self.assertEqual(event.removed, [])
self.assertEqual(event.index, 3)
def test_inplace_multiply_by_zero(self):
foo = MyClass()
foo.l *= 0
self.assertEqual(foo.l, [])
self.assertEqual(len(foo.l_events), 1)
event = foo.l_events[0]
self.assertEqual(event.added, [])
self.assertEqual(event.removed, [1, 2, 3])
self.assertEqual(event.index, 0)
def test_remove_empty_slices(self):
foo = MyClass()
del foo.l[3:]
self.assertEqual(foo.l, [1, 2, 3])
self.assertEqual(len(foo.l_events), 0)
del foo.l[:]
self.assertEqual(foo.l, [])
self.assertEqual(len(foo.l_events), 1)
(event,) = foo.l_events
self.assertEqual(event.added, [])
self.assertEqual(event.removed, [1, 2, 3])
self.assertEqual(event.index, 0)
del foo.l[:]
self.assertEqual(foo.l, [])
self.assertEqual(len(foo.l_events), 1)
def test_remove_empty_slices_steps(self):
foo = MyClass()
del foo.l[3::2]
self.assertEqual(foo.l, [1, 2, 3])
self.assertEqual(len(foo.l_events), 0)
def test_clear(self):
foo = MyClass()
foo.l.clear()
self.assertEqual(len(foo.l_events), 1)
event = foo.l_events[0]
self.assertEqual(event.index, 0)
self.assertEqual(event.removed, [1, 2, 3])
self.assertEqual(event.added, [])
def test_clear_empty_list(self):
foo = MyClass()
foo.l = []
foo.l.clear()
self.assertEqual(len(foo.l_events), 0)
def test_delete_step_slice(self):
foo = MyClass()
foo.l = [0, 1, 2, 3, 4]
del foo.l[0:5:2]
self.assertEqual(len(foo.l_events), 1)
(event,) = foo.l_events
self.assertEqual(event.index, slice(0, 5, 2))
self.assertEqual(event.removed, [0, 2, 4])
self.assertEqual(event.added, [])
def test_delete_step_slice_empty_list(self):
foo = MyClass()
foo.l = []
del foo.l[::(- 1)]
self.assertEqual(len(foo.l_events), 0)
def test_assignment_step_slice(self):
foo = MyClass()
foo.l = [1, 2, 3]
foo.l[::2] = [3, 4]
self.assertEqual(len(foo.l_events), 1)
(event,) = foo.l_events
self.assertEqual(event.index, slice(0, 3, 2))
self.assertEqual(event.added, [3, 4])
self.assertEqual(event.removed, [1, 3]) |
class Instance(atom):
_fields = ('name', 'args')
_attributes = ('lineno', 'col_offset')
def __init__(self, name, args=[], lineno=0, col_offset=0, **ARGS):
atom.__init__(self, **ARGS)
self.name = name
self.args = list(args)
self.lineno = int(lineno)
self.col_offset = int(col_offset) |
def test_custom_graph_data(dashboard_user, custom_graph, custom_graph_data):
today = datetime.utcnow()
yesterday = (today - timedelta(days=1))
response = dashboard_user.get('dashboard/api/custom_graph/{id}/{start}/{end}'.format(id=custom_graph.graph_id, start=yesterday.strftime('%Y-%m-%d'), end=today.strftime('%Y-%m-%d')))
assert (response.status_code == 200)
[data] = response.json
assert (data['graph_id'] == str(custom_graph.graph_id))
assert (data['id'] == str(custom_graph_data.id))
assert (data['time'] == str(custom_graph_data.time))
assert (data['value'] == str(custom_graph_data.value)) |
def get_task_results(container):
result_path = '/srv/celery-results'
results = []
with tempfile.TemporaryDirectory() as tempdir:
container.copy_from(result_path, tempdir)
for (root, dirs, files) in os.walk(tempdir):
for filename in files:
with open(os.path.join(root, filename)) as fh:
result = json.load(fh)
results.append(result)
return results |
class TestConfigValidatorComputeZone():
.e2e
.scanner
.server
def test_cv_compute_zone(self, cloudsql_connection, forseti_scan_readonly, forseti_server_vm_name):
(scanner_id, scanner_result) = forseti_scan_readonly
violation_type = 'CV_GCPComputeZoneConstraintV1.compute-zone-denylist'
query = text("SELECT COUNT(*) FROM forseti_security.violations WHERE scanner_index_id = :scanner_id AND resource_id = :forseti_server_vm_name AND resource_type = 'compute.googleapis.com/Instance' AND violation_type = :violation_type")
violation_count = cloudsql_connection.execute(query, forseti_server_vm_name=forseti_server_vm_name, scanner_id=scanner_id, violation_type=violation_type).fetchone()
assert (1 == violation_count[0]) |
def bar_chart():
chart = ft.BarChart(bar_groups=[ft.BarChartGroup(x=0, bar_rods=[ft.BarChartRod(from_y=0, to_y=40, width=40, color=ft.colors.AMBER, tooltip='Apple', border_radius=0)]), ft.BarChartGroup(x=1, bar_rods=[ft.BarChartRod(from_y=0, to_y=100, width=40, color=ft.colors.BLUE, tooltip='Blueberry', border_radius=0)]), ft.BarChartGroup(x=2, bar_rods=[ft.BarChartRod(from_y=0, to_y=30, width=40, color=ft.colors.RED, tooltip='Cherry', border_radius=0)]), ft.BarChartGroup(x=3, bar_rods=[ft.BarChartRod(from_y=0, to_y=60, width=40, color=ft.colors.ORANGE, tooltip='Orange', border_radius=0)])], border=ft.border.all(1, ft.colors.GREY_400), left_axis=ft.ChartAxis(labels_size=40, title=ft.Text('Fruit supply'), title_size=40), bottom_axis=ft.ChartAxis(labels=[ft.ChartAxisLabel(value=0, label=ft.Container(ft.Text('Apple'), padding=10)), ft.ChartAxisLabel(value=1, label=ft.Container(ft.Text('Blueberry'), padding=10)), ft.ChartAxisLabel(value=2, label=ft.Container(ft.Text('Cherry'), padding=10)), ft.ChartAxisLabel(value=3, label=ft.Container(ft.Text('Orange'), padding=10))], labels_size=40), horizontal_grid_lines=ft.ChartGridLines(color=ft.colors.GREY_300, width=1, dash_pattern=[3, 3]), tooltip_bgcolor=ft.colors.with_opacity(0.5, ft.colors.GREY_300), max_y=110, interactive=True, aspect_ratio=1)
return ft.Column(controls=[ft.Container(content=chart, padding=10)]) |
class RemoveElementwiseNoOpsIntegrationTest(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(RemoveElementwiseNoOpsIntegrationTest, self).__init__(*args, **kwargs)
torch.manual_seed(0)
self.BATCH_SIZES = [1, 218]
self.M = 10
def test_remove_elementwise_op(self) -> None:
test_cases = [((lambda x: ops.elementwise(FuncEnum.ADD)(x, 0)), (lambda x: (x + 0))), ((lambda x: ops.elementwise(FuncEnum.SUB)(x, 0)), (lambda x: (x - 0))), ((lambda x: ops.elementwise(FuncEnum.MUL)(x, 1)), (lambda x: (x * 1))), ((lambda x: ops.elementwise(FuncEnum.DIV)(x, 1)), (lambda x: (x * 1)))]
for (test_no, test) in enumerate(test_cases):
with self.subTest(test_no=test_no):
self._test_remove_elementwise_no_ops_impl(elementwise_op_getter=test[0], expected_op=test[1])
def _test_remove_elementwise_no_ops_impl(self, elementwise_op_getter: Callable[([Tensor], Tensor)], expected_op: Callable[([Tensor], Tensor)]):
dtype = 'float16'
batch_dim = shape_utils.gen_int_var_min_max(self.BATCH_SIZES, 'batch_0')
reduce_dim = 0
X0 = gen_input_tensor([batch_dim, IntImm(self.M)], name='x0', dtype=dtype)
elementwise_op_0 = elementwise_op_getter(X0)
Y = ops.reduce_mean(reduce_dim)(elementwise_op_0)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
module = compile_model(Y, detect_target(), './tmp', 'test_remove_elementwise_no_ops')
sorted_graph = module.debug_sorted_graph
sorted_ops = graph_utils.get_sorted_ops(sorted_graph)
self.assertEqual(len(sorted_ops), 1)
for batch in self.BATCH_SIZES:
x0_pt = get_random_torch_tensor([batch, self.M], dtype)
add_0_pt = expected_op(x0_pt)
y_pt = torch.mean(add_0_pt, dim=reduce_dim)
y = get_torch_empty_tensor(y_pt.size(), dtype)
inputs = {'x0': x0_pt}
module.run_with_tensors(inputs, [y])
torch.testing.assert_close(y_pt, y, atol=0.01, rtol=0.01) |
class TagInline(admin.TabularInline):
model = None
verbose_name = 'Tag'
verbose_name_plural = 'Tags'
form = InlineTagForm
formset = TagFormSet
related_field = None
extra = 0
def get_formset(self, request, obj=None, **kwargs):
formset = super().get_formset(request, obj, **kwargs)
class ProxyFormset(formset):
pass
ProxyFormset.related_field = self.related_field
return ProxyFormset |
def test_edit_session_only_state(db, client, user, jwt):
user.is_admin = True
session = get_simple_custom_form_session(db, user)
data = json.dumps({'data': {'type': 'session', 'id': str(session.id), 'attributes': {'state': 'withdrawn'}}})
response = client.patch(f'/v1/sessions/{session.id}', content_type='application/vnd.api+json', headers=jwt, data=data)
db.session.refresh(session)
assert (response.status_code == 200)
assert (session.state == 'withdrawn') |
class _COSERVERINFO(Structure):
_fields_ = [('dwReserved1', c_ulong), ('pwszName', c_wchar_p), ('pAuthInfo', POINTER(_COAUTHINFO)), ('dwReserved2', c_ulong)]
if TYPE_CHECKING:
dwReserved1 = hints.AnnoField()
pwszName = hints.AnnoField()
pAuthInfo = hints.AnnoField()
dwReserved2 = hints.AnnoField() |
class DiagonalTensor(UnaryOp):
diagonal = True
def __init__(self, A):
assert (A.rank == 2), 'The tensor must be rank 2.'
assert (A.shape[0] == A.shape[1]), 'The diagonal can only be computed on square tensors.'
super(DiagonalTensor, self).__init__(A)
_property
def arg_function_spaces(self):
(tensor,) = self.operands
return tuple((arg.function_space() for arg in tensor.arguments()))
def arguments(self):
(tensor,) = self.operands
return tensor.arguments()
def _output_string(self, prec=None):
(tensor,) = self.operands
return ('(%s).diag' % tensor) |
def main():
parser = ArgumentParser(description=' Add license notice to every source file if not present.')
parser.add_argument('--check', action='store_true', dest='check', default=False, help=CHECK_HELP)
args = parser.parse_args()
gitignore = get_gitignore(Path('.'))
python_files = [path for path in Path('.').glob('**/*.py') if (not str(path).startswith('.')) if (not gitignore.match_file(path))]
missing_notice_files = []
for pyfile in python_files:
code = pyfile.read_text()
if (not code.startswith(NOTICE)):
missing_notice_files.append(pyfile)
if args.check:
if missing_notice_files:
print('License notice is missing in some source files! ')
for pyfile in missing_notice_files:
print(f' {pyfile}')
sys.exit(1)
else:
print('All source files have the license notice! ')
sys.exit(0)
else:
print('Successfully added license notice to:')
for pyfile in missing_notice_files:
code = pyfile.read_text()
pyfile.write_text('\n'.join([NOTICE, code]))
print(f' {pyfile}')
sys.exit(0) |
class OptionPlotoptionsScatterStatesHoverHalo(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def opacity(self):
return self._config_get(0.25)
def opacity(self, num: float):
self._config(num, js_type=False)
def size(self):
return self._config_get(10)
def size(self, num: float):
self._config(num, js_type=False) |
.django_db
def test_match_from_ata_tas(client, monkeypatch, elasticsearch_award_index, subaward_with_ata_tas):
_setup_es(client, monkeypatch, elasticsearch_award_index)
resp = query_by_tas_subaward(client, {'require': [_tas_path(ATA_TAS)]})
assert (resp.json()['results'] == [_subaward1()]) |
def install():
global rDownloadURL
rURL = rDownloadURL
if os.path.exists('/home/xtreamcodes'):
os.system("kill $(ps aux | grep '[p]hp' | awk '{print $2}')")
os.system("kill $(ps aux | grep '[n]nginx' | awk '{print $2}')")
os.system("kill $(ps aux | grep '[f]fmpeg' | awk '{print $2}')")
os.system('chattr -i /home/xtreamcodes/iptv_xtream_codes/GeoLite2.mmdb > /dev/null')
os.system('rm -rf /home/xtreamcodes/tmp/* > /dev/null')
os.system('rm -rf /home/xtreamcodes/streams/* > /dev/null')
os.system(('wget -q -O "/tmp/xtreamcodes.tar.gz" "%s"' % rURL))
if os.path.exists('/tmp/xtreamcodes.tar.gz'):
os.system('tar -zxvf "/tmp/xtreamcodes.tar.gz" -C "/home/xtreamcodes/" > /dev/null')
try:
os.remove('/tmp/xtreamcodes.tar.gz')
except:
pass
return True
return False |
class MultiToolbarWindow(ApplicationWindow):
_tool_bar_managers = List(Instance(ToolBarManager))
_tool_bar_locations = Dict(Instance(ToolBarManager), Enum('top', 'bottom', 'left', 'right'))
def _create_contents(self, parent):
panel = super()._create_contents(parent)
self._create_trim_widgets(parent)
return panel
def _create_trim_widgets(self, parent):
self._set_window_icon()
self._create_menu_bar(parent)
self._create_status_bar(parent)
self.sizer = self._create_tool_bars(parent)
def _create_tool_bars(self, parent):
if (len(self._tool_bar_managers) > 0):
self.main_sizer = sizer = wx.BoxSizer(wx.VERTICAL)
parent.SetSizer(sizer)
parent.SetAutoLayout(True)
for tool_bar_manager in self._tool_bar_managers:
location = self._tool_bar_locations[tool_bar_manager]
sizer = self._create_tool_bar(parent, sizer, tool_bar_manager, location)
return sizer
return None
def _create_tool_bar(self, parent, sizer, tool_bar_manager, location):
tool_bar = tool_bar_manager.create_tool_bar(parent)
if (location == 'top'):
child_sizer = wx.BoxSizer(wx.VERTICAL)
child_sizer.Add(tool_bar, 0, ((wx.ALL | wx.ALIGN_LEFT) | wx.EXPAND))
sizer.Add(child_sizer, 1, (wx.ALL | wx.EXPAND))
if (location == 'bottom'):
toolbar_sizer = wx.BoxSizer(wx.VERTICAL)
child_sizer = self._create_content_spacer(toolbar_sizer)
toolbar_sizer.Add(tool_bar, 0, ((wx.ALL | wx.ALIGN_TOP) | wx.EXPAND))
sizer.Add(toolbar_sizer, 1, (wx.ALL | wx.EXPAND))
if (location == 'left'):
child_sizer = wx.BoxSizer(wx.HORIZONTAL)
child_sizer.Add(tool_bar, 0, ((wx.ALL | wx.ALIGN_TOP) | wx.EXPAND))
sizer.Add(child_sizer, 1, (wx.ALL | wx.EXPAND))
if (location == 'right'):
toolbar_sizer = wx.BoxSizer(wx.HORIZONTAL)
child_sizer = self._create_content_spacer(toolbar_sizer)
toolbar_sizer.Add(tool_bar, 0, ((wx.ALL | wx.ALIGN_TOP) | wx.EXPAND))
sizer.Add(toolbar_sizer, 1, (wx.ALL | wx.EXPAND))
return child_sizer
def _create_content_spacer(self, sizer):
spacer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(spacer, 1, (wx.ALL | wx.EXPAND))
return spacer
def add_tool_bar(self, tool_bar_manager, location='top'):
self._tool_bar_managers.append(tool_bar_manager)
self._tool_bar_locations[tool_bar_manager] = location |
def upgrade():
op.create_table('user_favourite_sessions', sa.Column('id', sa.Integer(), nullable=False), sa.Column('session_id', sa.Integer(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('created_at', sa.DateTime(timezone=True), nullable=True), sa.Column('modified_at', sa.DateTime(timezone=True), nullable=True), sa.ForeignKeyConstraint(['session_id'], ['sessions.id'], ondelete='CASCADE'), sa.ForeignKeyConstraint(['user_id'], ['users.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('id'))
op.create_unique_constraint('uq_session_user', 'user_favourite_sessions', ['session_id', 'user_id']) |
class European(GameItem):
key = 'european'
title = ''
description = 'Roll'
def __init__(self):
pass
def should_usable(self, core: ServerCore, g: ServerGame, u: Client):
cls = self.__class__
from thb.thbrole import THBattleRole
if isinstance(g, THBattleRole):
raise exceptions.IncorrectGameMode
items = core.item.items_of(g)
for (pid, l) in items.items():
if any((isinstance(i, cls) for i in l)):
raise exceptions.EuropeanConflict
def get_european(cls, g: Game, items: Dict[(Player, List[GameItem])]) -> Optional[Player]:
for (p, l) in items.items():
for i in l:
if isinstance(i, cls):
return p
return None |
def test_curr_score():
curr = pd.DataFrame({'user_id': [1, 2, 2, 3, 3], 'item_id': [3, 2, 3, 1, 2], 'prediction': [3, 3, 2, 3, 2]})
train = pd.DataFrame({'user_id': [1, 1, 2, 3], 'item_id': [1, 2, 1, 1]})
metric = PopularityBias(k=3)
report = Report(metrics=[metric])
column_mapping = ColumnMapping(recommendations_type=RecomType.SCORE)
report.run(reference_data=None, current_data=curr, column_mapping=column_mapping, additional_data={'current_train_data': train})
results = metric.get_result()
assert (results.current_apr == 1.5)
assert (results.current_coverage == 1.0)
assert (results.current_gini == 0.2) |
def test_angle2azimuth():
res = xcalc.angle2azimuth(30)
assert (res == 60.0)
a1 = ((30 * math.pi) / 180)
a2 = ((60 * math.pi) / 180)
res = xcalc.angle2azimuth(a1, mode='radians')
assert (res == a2)
res = xcalc.angle2azimuth((- 30))
assert (res == 120.0)
res = xcalc.angle2azimuth((- 300))
assert (res == 30) |
class OptionPlotoptionsArearangeSonificationDefaultinstrumentoptionsMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def build_exp(**kwargs):
t = kwargs['t']
if (t == 'Load'):
return LoadExp(**kwargs)
elif (t == 'Store'):
return StoreExp(**kwargs)
elif (t == 'BinOp'):
return BinOpExp(**kwargs)
elif (t == 'UnOp'):
return UnOpExp(**kwargs)
elif (t == 'Int'):
return IntExp(**kwargs)
elif (t == 'Cast'):
return CastExp(**kwargs)
elif (t == 'Let'):
return LetExp(**kwargs)
elif (t == 'Unknown'):
return UnknownExp(**kwargs)
elif (t == 'Ite'):
return IteExp(**kwargs)
elif (t == 'Extract'):
return ExtractExp(**kwargs)
elif (t == 'Concat'):
return ConcatExp(**kwargs)
elif (t == 'Var'):
return bap.vars.build_var(**kwargs) |
def find_common_bits_for_tag_groups(lines, tag_groups):
bit_groups = []
for tag_group in tag_groups:
bit_group = set()
for line in lines:
(tag, bits, mode, _) = util.parse_db_line(line)
if (not bits):
continue
bits = set([util.parse_tagbit(b) for b in bits])
if ((tag in tag_group) and len(bits)):
ones = set([b[1] for b in bits if b[0]])
bit_group |= ones
bit_groups.append(bit_group)
return bit_groups |
def test_reaction_event_removed(session):
data = {'threadKey': {'threadFbId': 1234}, 'messageId': 'mid.$XYZ', 'action': 1, 'userId': 4321, 'senderId': 4321, 'offlineThreadingId': ''}
thread = Group(session=session, id='1234')
assert (ReactionEvent(author=User(session=session, id='4321'), thread=thread, message=Message(thread=thread, id='mid.$XYZ'), reaction=None) == parse_client_delta(session, {'deltaMessageReaction': data})) |
def run_action(args):
import time
from collections import Counter
from lib import MAX_SYSCALLS
from lib.ebpf import Probe
from lib.ml import AutoEncoder
from lib.platform import SYSCALLS
probe = Probe(args.pid)
ae = AutoEncoder(args.model, load=True)
print(('monitoring process %d (%s) ...' % (args.pid, probe.comm)))
histo_map = probe.start()
prev = ([0.0] * MAX_SYSCALLS)
while 1:
histogram = [histo_map[s] for s in range(0, MAX_SYSCALLS)]
if (histogram != prev):
deltas = [((1.0 - (prev[s] / histogram[s])) if (histogram[s] != 0.0) else 0.0) for s in range(0, MAX_SYSCALLS)]
prev = histogram
(_, feat_errors, error) = ae.predict([deltas])
if (error > args.max_error):
print(('error = %f - max = %f - top 3:' % (error, args.max_error)))
errors = {idx: err for (idx, err) in enumerate(feat_errors)}
k = Counter(errors)
top3 = k.most_common(3)
for (idx, err) in top3:
name = SYSCALLS.get(idx, ('syscall_%d' % idx))
print((' %s = %f' % (name, err)))
time.sleep((args.time / 1000.0)) |
class NmapScan():
def __init__(self, host, port_range, full_scan=None, scripts=None, services=None):
self.target = host.target
self.full_scan = full_scan
self.scripts = scripts
self.services = services
self.port_range = port_range
self.path = HelpUtilities.get_output_path('{}/nmap_scan.txt'.format(self.target))
self.logger = Logger(self.path)
def build_script(self):
script = ['nmap', '-Pn', self.target]
if self.port_range:
HelpUtilities.validate_port_range(self.port_range)
script.append('-p')
script.append(self.port_range)
self.logger.info('{} Added port range {} to Nmap script'.format(COLORED_COMBOS.NOTIFY, self.port_range))
if self.full_scan:
script.append('-sV')
script.append('-sC')
self.logger.info('{} Added scripts and services to Nmap script'.format(COLORED_COMBOS.NOTIFY))
return script
else:
if self.scripts:
self.logger.info('{} Added safe-scripts scan to Nmap script'.format(COLORED_COMBOS.NOTIFY))
script.append('-sC')
if self.services:
self.logger.info('{} Added service scan to Nmap script'.format(COLORED_COMBOS.NOTIFY))
script.append('-sV')
return script |
class QueryStub(object):
def __init__(self, channel):
self.ClientState = channel.unary_unary('/ibc.core.client.v1.Query/ClientState', request_serializer=ibc_dot_core_dot_client_dot_v1_dot_query__pb2.QueryClientStateRequest.SerializeToString, response_deserializer=ibc_dot_core_dot_client_dot_v1_dot_query__pb2.QueryClientStateResponse.FromString)
self.ClientStates = channel.unary_unary('/ibc.core.client.v1.Query/ClientStates', request_serializer=ibc_dot_core_dot_client_dot_v1_dot_query__pb2.QueryClientStatesRequest.SerializeToString, response_deserializer=ibc_dot_core_dot_client_dot_v1_dot_query__pb2.QueryClientStatesResponse.FromString)
self.ConsensusState = channel.unary_unary('/ibc.core.client.v1.Query/ConsensusState', request_serializer=ibc_dot_core_dot_client_dot_v1_dot_query__pb2.QueryConsensusStateRequest.SerializeToString, response_deserializer=ibc_dot_core_dot_client_dot_v1_dot_query__pb2.QueryConsensusStateResponse.FromString)
self.ConsensusStates = channel.unary_unary('/ibc.core.client.v1.Query/ConsensusStates', request_serializer=ibc_dot_core_dot_client_dot_v1_dot_query__pb2.QueryConsensusStatesRequest.SerializeToString, response_deserializer=ibc_dot_core_dot_client_dot_v1_dot_query__pb2.QueryConsensusStatesResponse.FromString)
self.ClientStatus = channel.unary_unary('/ibc.core.client.v1.Query/ClientStatus', request_serializer=ibc_dot_core_dot_client_dot_v1_dot_query__pb2.QueryClientStatusRequest.SerializeToString, response_deserializer=ibc_dot_core_dot_client_dot_v1_dot_query__pb2.QueryClientStatusResponse.FromString)
self.ClientParams = channel.unary_unary('/ibc.core.client.v1.Query/ClientParams', request_serializer=ibc_dot_core_dot_client_dot_v1_dot_query__pb2.QueryClientParamsRequest.SerializeToString, response_deserializer=ibc_dot_core_dot_client_dot_v1_dot_query__pb2.QueryClientParamsResponse.FromString)
self.UpgradedClientState = channel.unary_unary('/ibc.core.client.v1.Query/UpgradedClientState', request_serializer=ibc_dot_core_dot_client_dot_v1_dot_query__pb2.QueryUpgradedClientStateRequest.SerializeToString, response_deserializer=ibc_dot_core_dot_client_dot_v1_dot_query__pb2.QueryUpgradedClientStateResponse.FromString)
self.UpgradedConsensusState = channel.unary_unary('/ibc.core.client.v1.Query/UpgradedConsensusState', request_serializer=ibc_dot_core_dot_client_dot_v1_dot_query__pb2.QueryUpgradedConsensusStateRequest.SerializeToString, response_deserializer=ibc_dot_core_dot_client_dot_v1_dot_query__pb2.QueryUpgradedConsensusStateResponse.FromString) |
def get_source_link(module_path: str, xpath: str, title: str) -> str:
module = import_module(module_path)
module_filepath = pathlib.Path(module.__file__)
(_, attrib) = module_source(module, xpath)[0]
url = ('%s/blob/%s/%s#L%s' % (GITHUB_REPO, HEAD.commit.hexsha, module_filepath.relative_to(WORKSPACE_DIR), attrib['lineno']))
return f'[{title}]({url}):' |
class Component():
def __init__(self, index, reserved=False):
self._index = index
self._reserved = reserved
self._encoding = None
self.prepared = False
def index(self):
return self._index
def component(self):
return self._index
def reserved(self):
return self._reserved
def used(self):
return (self._reserved or (self._encoding is not None))
def encoding(self):
return self._encoding
def store(self, encoding):
self._encoding = encoding
def clear(self):
self._encoding = None
self.prepared = False |
def bench_one(name_module='cmorph', func=None, total_duration=2):
if (func is not None):
raise NotImplementedError
functions = [(mod, func_) for (mod, func_) in statements.keys() if (mod == name_module)]
if (not functions):
raise ValueError(f'bad name_module: {name_module}')
name_function = functions[0][1]
print(f'module: {name_module}')
stmt = statements[(name_module, name_function)]
print(stmt)
path_setup = (Path('setup_codes') / f'{name_module}_{name_function}.py')
if (not path_setup.exists()):
raise RuntimeError
with open(path_setup) as file:
setup = file.read()
if ((name_module, name_function) in import_from_skimage):
setup_from_skimage = setup.replace(f'from future.{name_module} import {name_function}', import_from_skimage[(name_module, name_function)])
time = timeit(stmt, setup_from_skimage, total_duration=total_duration)
print(f"{'from skimage':18s} {time:.2e} s")
setup_pyx = setup.replace(f'from future.{name_module} import', f'from pyx.{name_module} import')
code = f"""
from transonic.util import timeit
setup = '''{setup}'''
stmt = '''{stmt}'''
print(timeit(stmt, setup, total_duration={total_duration}))
"""
time_old = timeit(stmt, setup_pyx, total_duration=total_duration)
print(f'cython pyx skimage {time_old:.2e} s (= norm)')
with open('tmp.py', 'w') as file:
file.write(code)
for backend in ('cython', 'pythran', 'numba'):
time = float(getoutput(f"TRANSONIC_BACKEND='{backend}' python tmp.py"))
print(f'{backend:18s} {time:.2e} s (= {(time / time_old):.2f} * norm)')
if ((name_module, name_function) not in import_from_skimage):
return
setup_from_skimage = setup.replace(f'from future.{name_module} import {name_function}', import_from_skimage[(name_module, name_function)])
time = timeit(stmt, setup_from_skimage, total_duration=total_duration)
print(f"{'from skimage':18s} {time:.2e} s (= {(time / time_old):.2f} * norm)") |
def test_activate_reload_and_deactivate(testbot):
for command in ('activate', 'reload', 'deactivate'):
testbot.push_message(f'!plugin {command}')
m = testbot.pop_message()
assert ('Please tell me which of the following plugins to' in m)
assert ('ChatRoom' in m)
testbot.push_message(f'!plugin {command} nosuchplugin')
m = testbot.pop_message()
assert ("nosuchplugin isn't a valid plugin name. The current plugins are" in m)
assert ('ChatRoom' in m)
testbot.push_message('!plugin reload ChatRoom')
assert ('Plugin ChatRoom reloaded.' == testbot.pop_message())
testbot.push_message('!status plugins')
assert ('A ChatRoom' in testbot.pop_message())
testbot.push_message('!plugin deactivate ChatRoom')
assert ('Plugin ChatRoom deactivated.' == testbot.pop_message())
testbot.push_message('!status plugins')
assert ('D ChatRoom' in testbot.pop_message())
testbot.push_message('!plugin deactivate ChatRoom')
assert ('ChatRoom is already deactivated.' in testbot.pop_message())
testbot.push_message('!plugin activate ChatRoom')
assert ('Plugin ChatRoom activated.' in testbot.pop_message())
testbot.push_message('!status plugins')
assert ('A ChatRoom' in testbot.pop_message())
testbot.push_message('!plugin activate ChatRoom')
assert ('ChatRoom is already activated.' == testbot.pop_message())
testbot.push_message('!plugin deactivate ChatRoom')
assert ('Plugin ChatRoom deactivated.' == testbot.pop_message())
testbot.push_message('!plugin reload ChatRoom')
assert ('Warning: plugin ChatRoom is currently not activated. Use !plugin activate ChatRoom to activate it.' == testbot.pop_message())
assert ('Plugin ChatRoom reloaded.' == testbot.pop_message())
testbot.push_message('!plugin blacklist ChatRoom')
assert ('Plugin ChatRoom is now blacklisted.' == testbot.pop_message())
testbot.push_message('!status plugins')
assert ('B,D ChatRoom' in testbot.pop_message())
testbot.push_message('!plugin unblacklist ChatRoom')
testbot.pop_message() |
.integration
class TestListServerResources():
def test_list_server_resources_passing(self, test_config: FidesConfig) -> None:
resource_type = 'data_category'
result = _api_helpers.list_server_resources(url=test_config.cli.server_url, resource_type=resource_type, headers=test_config.user.auth_header, exclude_keys=[])
assert result
def test_list_server_resources_none(self, test_config: FidesConfig) -> None:
resource_type = 'system'
delete_resource_type(test_config, resource_type)
result: List[FidesModel] = _api_helpers.list_server_resources(url=test_config.cli.server_url, resource_type=resource_type, headers=test_config.user.auth_header, exclude_keys=[])
assert (result == []) |
def make_classification_dataset(n_features=10, n_classes=10):
rng = numpy.random.RandomState(0)
(X, y) = make_classification(n_features=n_features, n_classes=n_classes, n_redundant=0, n_informative=n_features, random_state=rng, n_clusters_per_class=3, n_samples=50)
X += (2 * rng.uniform(size=X.shape))
X = StandardScaler().fit_transform(X)
X = numpy.clip((1000 * X), (- 32768), 32767).astype(int)
(X_train, X_test, y_train, y_test) = train_test_split(X, y, test_size=0.2)
return (X_train, X_test, y_train, y_test) |
def update_trace_rank(file_path: str, rank: int) -> None:
def _add_rank_meta(trace_data: Dict[(str, Any)], rank: int) -> None:
if ('distributedInfo' in trace_data):
trace_data['distributedInfo']['rank'] = rank
else:
trace_data['distributedInfo'] = {'rank': rank}
trace_data = read_trace(file_path)
_add_rank_meta(trace_data, rank)
write_trace(trace_data, file_path) |
def before_uninstall():
try:
print('Removing customizations created by Frappe Health...')
remove_customizations()
except Exception as e:
BUG_REPORT_URL = '
click.secho(f'Removing Customizations for Frappe Health failed due to an error. Please try again or report the issue on {BUG_REPORT_URL} if not resolved.', fg='bright_red')
raise e
click.secho('Frappe Health app customizations have been removed successfully...', fg='green') |
class AltRadioController(AltGenericController):
__gtype_name = 'AltRadioController'
def __init__(self, header):
super(AltRadioController, self).__init__(header)
self._gicon = Gio.ThemedIcon(name='audio-radio-symbolic')
def valid_source(self, source):
return ('RBIRadioSource' in type(source).__name__)
def get_gicon(self, source):
return self._gicon
def get_category(self):
return AltControllerCategory.ONLINE
def set_library_labels(self):
self.header.set_library_labels(song_label=_('Stations'))
def toolbar_visibility(self):
return True |
class OptionSeriesPackedbubbleSonificationDefaultinstrumentoptionsMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
def extractBllovetranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('CDAW', 'Continuation of the Dream in Another World', 'translated'), ('aak', 'ai wo ataeru kemono-tachi', 'translated'), ('ai wo ataeru kemono-tachi', 'ai wo ataeru kemono-tachi', 'translated'), ('Sweet Daydream', 'Sweet Daydream', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
(autouse=True)
def xdg_trinity_root(monkeypatch, tmpdir):
with tempfile.TemporaryDirectory() as tmp_dir:
xdg_root_dir = (Path(tmp_dir) / 'trinity')
monkeypatch.setenv('XDG_TRINITY_ROOT', str(xdg_root_dir))
assert (not is_under_path(os.path.expandvars('$HOME'), get_xdg_trinity_root()))
(yield xdg_root_dir) |
class Station(ServiceInterface):
def __init__(self, *args, state='', connected_network='', **kwargs):
ServiceInterface.__init__(self, IWD_STATION, *args, **kwargs)
self._state = state
self._connected_network = connected_network
self._scanning = False
_property(access=PropertyAccess.READ)
def State(self) -> 's':
return self._state
_property(access=PropertyAccess.READ)
def ConnectedNetwork(self) -> 'o':
return self._connected_network
_property(access=PropertyAccess.READ)
def Scanning(self) -> 'b':
return self._scanning
()
def Scan(self) -> None:
self._scanning = (not self._scanning)
self.emit_properties_changed({'Scanning': self._scanning}) |
def test_pickle_dump_load(assertion, source, target=None, protocol=(0, HIGHEST_PROTOCOL)):
(start, stop) = protocol
failures = []
for protocol in range(start, (stop + 1)):
try:
if (target is None):
assertion(loads(dumps(source, protocol=protocol)), source)
else:
assertion(loads(dumps(source, protocol=protocol)), target)
except Exception:
(exc, tb) = sys.exc_info()[1:]
failures.append(('%2d: %s' % (protocol, exc)))
if failures:
raise ValueError(('Failed with protocols: %s' % ', '.join(failures))) |
def test_json_writer_with_validation():
'
schema = {'doc': 'A weather reading.', 'name': 'Weather', 'namespace': 'test', 'type': 'record', 'fields': [{'name': 'station', 'type': 'string'}, {'name': 'time', 'type': 'long'}, {'name': 'temp', 'type': 'int'}]}
records = [{'station': '011990-99999', 'temp': 0, 'time': }, {'station': '011990-99999', 'temp': 22, 'time': 'last day'}, {'station': '011990-99999', 'temp': (- 11), 'time': }, {'station': '012650-99999', 'temp': 111.9, 'time': }]
new_file = StringIO()
with pytest.raises(ValidationError):
json_writer(new_file, schema, records, validator=True) |
class CacheDoc(Directive):
required_arguments = 0
has_content = True
def run(self):
zenpy_client = Zenpy(subdomain='party', email='', password='Yer')
node_list = []
cache_node = container()
cache_sections = self.generate_cache_sections(zenpy_client)
for cache_section in cache_sections:
node = paragraph()
self.state.nested_parse(StringList(cache_section.split('\n')), 0, node)
node_list.append(node)
node_list.append(cache_node)
return node_list
def generate_cache_sections(self, zenpy):
cache_sections = []
for method_tuple in inspect.getmembers(zenpy, (lambda x: inspect.ismethod(x))):
(method_name, method) = method_tuple
if method_name.startswith('_'):
continue
output = ('.. py:method:: %s%s\n' % (method_name, inspect.signature(method)))
output += (' %s\n\n' % method.__doc__)
cache_sections.append(output)
return cache_sections |
class Dependency():
__slots__ = ('_name', '_version', '_index', '_git', '_ref')
def __init__(self, name: Union[(PyPIPackageName, str)], version: Union[(str, SpecifierSet)]='', index: Optional[str]=None, git: Optional[str]=None, ref: Optional[Union[(GitRef, str)]]=None) -> None:
self._name: PyPIPackageName = PyPIPackageName(name)
self._version: SpecifierSet = self._parse_version(version)
self._index: Optional[str] = index
self._git: Optional[str] = git
self._ref: Optional[GitRef] = (GitRef(ref) if (ref is not None) else None)
def name(self) -> str:
return str(self._name)
def version(self) -> str:
return str(self._version)
def index(self) -> Optional[str]:
return (str(self._index) if self._index else None)
def git(self) -> Optional[str]:
return (str(self._git) if self._git else None)
def ref(self) -> Optional[str]:
return (str(self._ref) if self._ref else None)
def _parse_version(version: Union[(str, SpecifierSet)]) -> SpecifierSet:
return (version if isinstance(version, SpecifierSet) else SpecifierSet(version))
def from_json(cls, obj: Dict[(str, Dict[(str, str)])]) -> 'Dependency':
if (len(obj) != 1):
raise ValueError(f'Only one key allowed, found {set(obj.keys())}')
(name, attributes) = list(obj.items())[0]
allowed_keys = {'version', 'index', 'git', 'ref'}
not_allowed_keys = set(attributes.keys()).difference(allowed_keys)
if (len(not_allowed_keys) > 0):
raise ValueError(f'Not allowed keys: {not_allowed_keys}')
version = attributes.get('version', '')
index = attributes.get('index', None)
git = attributes.get('git', None)
ref = attributes.get('ref', None)
return Dependency(name=name, version=version, index=index, git=git, ref=ref)
def to_json(self) -> Dict[(str, Dict[(str, str)])]:
result = {}
if (self.version != ''):
result['version'] = self.version
if (self.index is not None):
result['index'] = self.index
if (self.git is not None):
result['git'] = cast(str, self.git)
if (self.ref is not None):
result['ref'] = cast(str, self.ref)
return {self.name: result}
def get_pip_install_args(self) -> List[str]:
name = self.name
index = self.index
git_url = self.git
revision = (self.ref if (self.ref is not None) else DEFAULT_GIT_REF)
version_constraint = str(self.version)
command: List[str] = []
if (index is not None):
command += ['-i', index]
if (git_url is not None):
command += [((((('git+' + git_url) + '') + revision) + '#egg=') + name)]
else:
command += [(name + version_constraint)]
return command
def __str__(self) -> str:
return f"{self.__class__.__name__}(name='{self.name}', version='{self.version}', index='{self.index}', git='{self.git}', ref='{self.ref}')"
def __eq__(self, other: Any) -> bool:
return (isinstance(other, Dependency) and (self._name == other._name) and (self._version == other._version) and (self._index == other._index) and (self._git == other._git) and (self._ref == other._ref)) |
('guess')
('--only', multiple=True, help='Only guess actions with the given prefix, e.g. Describe (can be passed multiple times)')
def guess(only):
stdin = click.get_text_stream('stdin')
policy = parse_policy_document(stdin)
allowed_prefixes = [s.title() for s in only]
policy = guess_statements(policy, allowed_prefixes)
click.echo(policy.to_json()) |
def check_git_status():
try:
repo = Repo(Path.cwd().resolve(), search_parent_directories=True)
if repo.is_dirty(untracked_files=True):
changedFiles = [item.a_path for item in repo.index.diff(None)]
return (False, f'Found uncommitted changes: {(changedFiles + repo.untracked_files)}')
except InvalidGitRepositoryError:
return (False, 'Does not appear to be a git repository')
return (True, 'Git repo looks good.') |
class ClientFilesTransmitterGroupbox(QGroupBox):
showm_signal = pyqtSignal(str)
def __init__(self, text='', parent=None):
super(ClientFilesTransmitterGroupbox, self).__init__(title=text, parent=parent)
self.Transmitter = ClientFilesTransmitter(self)
self.resize(560, 220)
self.allowCheckBox = QCheckBox('', self)
self.allowCheckBox.setToolTip('')
self.allowCheckBox.setGeometry(10, 18, 80, 25)
self.allowCheckBox.stateChanged.connect(self.allowconnectchange)
self.allowCheckBox.setChecked(QSettings('Fandes', 'jamtools').value('clientfilestransmittertest/allowconnect', True, type=bool))
self.needallowconnection = QCheckBox('', self)
self.needallowconnection.setToolTip(',')
self.needallowconnection.setGeometry(((self.allowCheckBox.x() + self.allowCheckBox.width()) + 12), self.allowCheckBox.y(), (self.allowCheckBox.width() + 10), self.allowCheckBox.height())
self.needallowconnection.setChecked(QSettings('Fandes', 'jamtools').value('clientfilestransmittertest/needallow', True, type=bool))
self.needallowconnection.stateChanged.connect(self.autoAllowchange)
self.disconnectallbtn = QPushButton('', self)
self.disconnectallbtn.setToolTip(',,')
self.disconnectallbtn.move(self.allowCheckBox.x(), ((self.allowCheckBox.height() + self.allowCheckBox.y()) + 5))
self.disconnectallbtn.clicked.connect(self.killallconnection)
self.connectionstredit = QLineEdit(self.Transmitter.connectstr, self)
self.connectionstredit.setReadOnly(True)
self.connectionstredit.setToolTip('')
self.connectionstredit.setGeometry(self.disconnectallbtn.x(), ((self.disconnectallbtn.height() + self.disconnectallbtn.y()) + 10), 70, 22)
self.connectionstrupdate = QPushButton(QIcon(':/update.png'), '', self)
self.connectionstrupdate.setGeometry(((self.connectionstredit.x() + self.connectionstredit.width()) + 3), self.connectionstredit.y(), 22, 22)
self.connectionstrupdate.clicked.connect(self.Transmitter.update_connectionstr)
self.connectionstrupdate.setToolTip('')
self.connectionstrupdate.setStatusTip('')
copyconnectionstrbtn = QPushButton('', self)
copyconnectionstrbtn.setGeometry(((self.connectionstrupdate.x() + self.connectionstrupdate.width()) + 3), self.connectionstrupdate.y(), 90, 22)
copyconnectionstrbtn.clicked.connect(self.copyconnectionstr)
self.targetconnectionedit = QLineEdit(self)
self.targetconnectionedit.setPlaceholderText('')
self.targetconnectionedit.setGeometry(self.connectionstredit.x(), (copyconnectionstrbtn.y() + 28), self.connectionstredit.width(), self.connectionstredit.height())
findserverbtn = QPushButton('', self)
findserverbtn.setGeometry(((self.targetconnectionedit.x() + self.targetconnectionedit.width()) + 5), self.targetconnectionedit.y(), 50, 22)
findserverbtn.clicked.connect((lambda : self.Transmitter.findandconnectserver(self.targetconnectionedit.text())))
findserverbtn.setToolTip('')
connection_ScrollArea = QScrollArea(self)
connection_ScrollArea.setGeometry(200, 8, (self.width() - 210), (self.height() - 10))
self.connection_ScrollArea_widget = QWidget()
self.connection_ScrollArea_widget.setGeometry(connection_ScrollArea.geometry())
connection_ScrollArea.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
connection_ScrollArea.setWidget(self.connection_ScrollArea_widget)
self.labeltest = QLabel(',\n', self.connection_ScrollArea_widget)
self.labeltest.setGeometry(10, 30, 120, 50)
self.setStyleSheet('QScrollBar{width: 5px;}')
self.connectionwidgetslist = []
self.autoallow = (not self.needallowconnection.isChecked())
self.connectall()
def copyconnectionstr(self):
clipboard = QApplication.clipboard()
clipboard.setText(self.Transmitter.connectstr)
def connectall(self):
self.Transmitter.aconnectionsignal.connect(self.aconnectionsignalhandle)
self.Transmitter.reconnectionsignal.connect(self.reconnectionhandle)
self.Transmitter.foundserverMSGsignal.connect(self.foundserverMSGsignalhandle)
self.Transmitter.update_connectionstrchangesignal.connect(self.connectionstredit.setText)
self.Transmitter.show_warningsignal.connect(self.show_a_message)
self.Transmitter.showm_signal.connect(self.showm_signal.emit)
def foundserverMSGsignalhandle(self, foundresult: str):
if (foundresult == 'notfound'):
QMessageBox.warning(self, 'notfound!', ',!', QMessageBox.Yes)
elif (foundresult == 'outofdate'):
QMessageBox.warning(self, 'out of date!', ',!', QMessageBox.Yes)
else:
infoBox = QMessageBox()
infoBox.setIcon(QMessageBox.Information)
infoBox.setText('{}\n...'.format(foundresult))
infoBox.setStandardButtons(QMessageBox.Ok)
infoBox.button(QMessageBox.Ok).animateClick(3000)
infoBox.exec_()
def reconnectionhandle(self, client_socket: socket.socket, client_address, fail=False):
if fail:
self.reconnectFail(client_address)
return
self.create_a_connection(client_socket, client_address)
self.showm_signal.emit('{}!'.format(client_address))
self.show_a_message('{}!'.format(client_address))
def aconnectionsignalhandle(self, client_socket: socket.socket, client_address, connectport):
if self.autoallow:
client_socket.send('allow'.encode())
self.create_a_connection(client_socket, client_address, connectport)
self.showm_signal.emit('{}!'.format(client_address))
self.show_a_message('{}!'.format(client_address))
return
self.activateWindow()
result = QMessageBox.warning(self, '?', '{}\n?'.format(client_address), (QMessageBox.Yes | QMessageBox.No), QMessageBox.Yes)
if (result == QMessageBox.Yes):
client_socket.send('allow'.encode())
self.create_a_connection(client_socket, client_address, connectport)
else:
client_socket.send('refuse'.encode())
client_socket.close()
def show_a_message(self, message, delay=3000):
self.activateWindow()
infoBox = QMessageBox(self)
infoBox.setIcon(QMessageBox.Information)
infoBox.setText(message)
infoBox.setStandardButtons(QMessageBox.Ok)
infoBox.button(QMessageBox.Ok).animateClick(delay)
infoBox.exec_()
def create_a_connection(self, client_socket, client_address, connectport=0):
for aconnectionbox in self.connectionwidgetslist:
if (aconnectionbox.ip == client_address):
aconnectionbox.update_state('')
print('')
break
else:
self.labeltest.hide()
if connectport:
self.Transmitter.targetdict[client_address] = connectport
aconnectionbox = ClientConnectionbox(client_address, self.connection_ScrollArea_widget)
aconnectionbox.move(5, (10 + ((aconnectionbox.height() + 5) * len(self.connectionwidgetslist))))
aconnectionbox.show()
th = ClientListenThread(client_socket, self.Transmitter, client_address)
aconnectionbox.sendfilessignal.connect(th.sendfiles)
aconnectionbox.senddirssignal.connect(th.senddirs)
aconnectionbox.resetsignal.connect(self.reset)
aconnectionbox.rootpathchangesignal.connect(th.rootpathchange)
aconnectionbox.threadnum.valueChanged.connect(th.changethreadnum)
aconnectionbox.pausebtn.clicked.connect(th.pause)
aconnectionbox.cannelbtn.clicked.connect(th.cancel)
th.resetsignal.connect(self.beReset)
th.showm_signal.connect(self.showm_signal.emit)
th.update_state_signal.connect(aconnectionbox.update_state)
th.pausebtntext_signal.connect(aconnectionbox.pausebtn.setText)
th.reconnectsignal.connect(self.reconnectionhandle)
th.start()
self.connectionwidgetslist.append(aconnectionbox)
self.connection_ScrollArea_widget.resize(self.connection_ScrollArea_widget.width(), (20 + ((aconnectionbox.height() + 5) * len(self.connectionwidgetslist))))
self.Transmitter.clientthreads.append(th)
self.Transmitter.connectionips.add(client_address)
self.Transmitter.allowip.add(client_address)
print(client_address, '')
def reset(self, ip):
for th in self.Transmitter.clientthreads:
if (th.ip == ip):
th.quit()
try:
th.sendthreadmanager.quit()
except:
print(sys.exc_info())
self.Transmitter.clientthreads.remove(th)
break
try:
self.Transmitter.connectionips.remove(ip)
self.Transmitter.allowip.remove(ip)
except:
pass
self.showm_signal.emit('{}'.format(ip))
print('')
def beReset(self, ip):
print('bereset ')
for th in self.Transmitter.clientthreads:
if (th.ip == ip):
self.reconnectthrea = Commen_Thread(th.reconnect)
self.reconnectthrea.start()
break
def reconnectFail(self, ip):
for th in self.Transmitter.clientthreads:
if (th.ip == ip):
try:
self.Transmitter.clientthreads.remove(th)
except:
print(sys.exc_info(), 295)
print('')
for wid in self.connectionwidgetslist:
if (wid.ip == ip):
wid.resetwidget()
self.showm_signal.emit('{}'.format(ip))
print(ip, self.Transmitter.connectionips)
try:
self.Transmitter.connectionips.remove(ip)
except:
print(sys.exc_info(), 284)
def autoAllowchange(self, e):
if e:
QSettings('Fandes', 'jamtools').setValue('clientfilestransmittertest/needallow', True)
self.autoallow = False
else:
QSettings('Fandes', 'jamtools').setValue('clientfilestransmittertest/needallow', False)
self.autoallow = True
def allowconnectchange(self, e):
if e:
QSettings('Fandes', 'jamtools').setValue('clientfilestransmittertest/allowconnect', True)
self.Transmitter.canconnect = True
self.Transmitter.start()
else:
QSettings('Fandes', 'jamtools').setValue('clientfilestransmittertest/allowconnect', False)
self.Transmitter.canconnect = False
def killallconnection(self):
if self.Transmitter.closeall():
return
for wid in self.connectionwidgetslist:
wid.resetwidget()
self.allowCheckBox.setChecked(False)
self.Transmitter.connectionips = set()
self.Transmitter.allowip = set() |
def extractDlazartureadWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def load_menu(menu_path, menu_file, filters=None):
proto_to_id.clear()
method_to_id.clear()
with open(f'{menu_path}/{menu_file}', 'r', encoding='UTF-8') as f:
for pro_str in f.readlines():
line = pro_str.split('=')
if (len(line) >= 2):
str_key = line[0].strip()
str_val = line[1].strip()
if ((filters is not None) and (str_key not in filters)):
continue
proto_to_id[str_key] = int(str_val)
for (proto, start_id) in proto_to_id.items():
LoadMethod2Id(menu_path, proto, start_id) |
def verify_asset_tracking_details(fledge_url, south_service_name, south_asset_name, south_plugin, north_service_name, north_plugin, skip_verify_north_interface):
tracking_details = utils.get_asset_tracking_details(fledge_url, 'Ingest')
assert len(tracking_details['track']), 'Failed to track Ingest event'
tracked_item = tracking_details['track'][0]
assert (south_service_name == tracked_item['service'])
assert (south_asset_name == tracked_item['asset'])
assert (south_plugin == tracked_item['plugin'])
if (not skip_verify_north_interface):
egress_tracking_details = utils.get_asset_tracking_details(fledge_url, 'Egress')
assert len(egress_tracking_details['track']), 'Failed to track Egress event'
tracked_item = egress_tracking_details['track'][0]
assert (north_service_name == tracked_item['service'])
assert (south_asset_name == tracked_item['asset'])
assert (north_plugin == tracked_item['plugin']) |
def compile_from_input_json(input_json: Dict, silent: bool=True, allow_paths: Optional[str]=None) -> Dict:
if (input_json['language'] == 'Vyper'):
return vyper.compile_from_input_json(input_json, silent, allow_paths)
if (input_json['language'] == 'Solidity'):
allow_paths = _get_allow_paths(allow_paths, input_json['settings']['remappings'])
return solidity.compile_from_input_json(input_json, silent, allow_paths)
raise UnsupportedLanguage(f"{input_json['language']}") |
class ConfigRepository(IConfigRepository):
config_search_path: ConfigSearchPath
sources: List[ConfigSource]
def __init__(self, config_search_path: ConfigSearchPath) -> None:
self.initialize_sources(config_search_path)
def initialize_sources(self, config_search_path: ConfigSearchPath) -> None:
self.sources = []
for search_path in config_search_path.get_path():
assert (search_path.path is not None)
assert (search_path.provider is not None)
scheme = self._get_scheme(search_path.path)
source_type = SourcesRegistry.instance().resolve(scheme)
source = source_type(search_path.provider, search_path.path)
self.sources.append(source)
def get_schema_source(self) -> ConfigSource:
source = self.sources[(- 1)]
assert ((source.__class__.__name__ == 'StructuredConfigSource') and (source.provider == 'schema')), 'schema config source must be last'
return source
def load_config(self, config_path: str) -> Optional[ConfigResult]:
source = self._find_object_source(config_path=config_path, object_type=ObjectType.CONFIG)
ret = None
if (source is not None):
ret = source.load_config(config_path=config_path)
ret.is_schema_source = ((source.__class__.__name__ == 'StructuredConfigSource') and (source.provider == 'schema'))
if (ret is not None):
raw_defaults = self._extract_defaults_list(config_path, ret.config)
ret.defaults_list = self._create_defaults_list(config_path, raw_defaults)
return ret
def group_exists(self, config_path: str) -> bool:
return (self._find_object_source(config_path, ObjectType.GROUP) is not None)
def config_exists(self, config_path: str) -> bool:
return (self._find_object_source(config_path, ObjectType.CONFIG) is not None)
def get_group_options(self, group_name: str, results_filter: Optional[ObjectType]=ObjectType.CONFIG) -> List[str]:
options: List[str] = []
for source in self.sources:
if source.is_group(config_path=group_name):
options.extend(source.list(config_path=group_name, results_filter=results_filter))
return sorted(list(set(options)))
def get_sources(self) -> List[ConfigSource]:
return self.sources
def _find_object_source(self, config_path: str, object_type: Optional[ObjectType]) -> Optional[ConfigSource]:
found_source = None
for source in self.sources:
if (object_type == ObjectType.CONFIG):
if source.is_config(config_path):
found_source = source
break
elif (object_type == ObjectType.GROUP):
if source.is_group(config_path):
found_source = source
break
else:
raise ValueError('Unexpected object_type')
return found_source
def _get_scheme(path: str) -> str:
idx = path.find('://')
if (idx == (- 1)):
return 'file'
else:
return path[0:idx]
def _split_group(self, group_with_package: str) -> Tuple[(str, Optional[str], Optional[str])]:
idx = group_with_package.find('')
if (idx == (- 1)):
group = group_with_package
package = None
else:
group = group_with_package[0:idx]
package = group_with_package[(idx + 1):]
package2 = None
if (package is not None):
idx = package.find(':')
if (idx != (- 1)):
package2 = package[(idx + 1):]
package = package[0:idx]
return (group, package, package2)
def _create_defaults_list(self, config_path: str, defaults: ListConfig) -> List[InputDefault]:
def issue_deprecated_name_warning() -> None:
url = '
deprecation_warning(message=dedent(f''' In {config_path}: Defaults List contains deprecated keyword _name_, see {url}
'''))
res: List[InputDefault] = []
for item in defaults._iter_ex(resolve=False):
default: InputDefault
if isinstance(item, DictConfig):
if (not version.base_at_least('1.2')):
old_optional = None
if (len(item) > 1):
if ('optional' in item):
old_optional = item.pop('optional')
keys = list(item.keys())
if (len(keys) > 1):
raise ValueError(f'In {config_path}: Too many keys in default item {item}')
if (len(keys) == 0):
raise ValueError(f'In {config_path}: Missing group name in {item}')
key = keys[0]
assert isinstance(key, str)
(config_group, package, _package2) = self._split_group(key)
keywords = ConfigRepository.Keywords()
self._extract_keywords_from_config_group(config_group, keywords)
if (not version.base_at_least('1.2')):
if ((not keywords.optional) and (old_optional is not None)):
keywords.optional = old_optional
node = item._get_node(key)
assert ((node is not None) and isinstance(node, Node))
config_value = node._value()
if (not version.base_at_least('1.2')):
if (old_optional is not None):
msg = dedent(f'''
In {config_path}: 'optional: true' is deprecated.
Use 'optional {key}: {config_value}' instead.
Support for the old style is removed for Hydra version_base >= 1.2''')
deprecation_warning(msg)
if ((config_value is not None) and (not isinstance(config_value, (str, list)))):
raise ValueError(f'Unsupported item value in defaults : {type(config_value).__name__}. Supported: string or list')
if isinstance(config_value, list):
options = []
for v in config_value:
vv = v._value()
if (not isinstance(vv, str)):
raise ValueError(f'Unsupported item value in defaults : {type(vv).__name__}, nested list items must be strings')
options.append(vv)
config_value = options
if (not version.base_at_least('1.2')):
if ((package is not None) and ('_name_' in package)):
issue_deprecated_name_warning()
default = GroupDefault(group=keywords.group, value=config_value, package=package, optional=keywords.optional, override=keywords.override)
elif isinstance(item, str):
(path, package, _package2) = self._split_group(item)
if (not version.base_at_least('1.2')):
if ((package is not None) and ('_name_' in package)):
issue_deprecated_name_warning()
default = ConfigDefault(path=path, package=package)
else:
raise ValueError(f'Unsupported type in defaults : {type(item).__name__}')
res.append(default)
return res
def _extract_defaults_list(self, config_path: str, cfg: Container) -> ListConfig:
empty = OmegaConf.create([])
if (not OmegaConf.is_dict(cfg)):
return empty
assert isinstance(cfg, DictConfig)
with read_write(cfg):
with open_dict(cfg):
if (not cfg._is_typed()):
defaults = cfg.pop('defaults', empty)
else:
cfg._set_flag('HYDRA_REMOVE_TOP_LEVEL_DEFAULTS', True)
defaults = cfg.get('defaults', empty)
if (not isinstance(defaults, ListConfig)):
if isinstance(defaults, DictConfig):
type_str = 'mapping'
else:
type_str = type(defaults).__name__
raise ValueError(f"Invalid defaults list in '{config_path}', defaults must be a list (got {type_str})")
return defaults
class Keywords():
optional: bool = False
override: bool = False
group: str = ''
def _extract_keywords_from_config_group(group: str, keywords: 'ConfigRepository.Keywords') -> None:
elements = group.split(' ')
group = elements[(- 1)]
elements = elements[0:(- 1)]
for (idx, e) in enumerate(elements):
if (e == 'optional'):
keywords.optional = True
elif (e == 'override'):
keywords.override = True
else:
break
keywords.group = group |
def notify_session_state_change(session, actor):
speakers = Speaker.query.filter_by(deleted_at=None, is_email_overridden=False).filter((Speaker.email != None), Speaker.sessions.contains(session)).with_entities(Speaker.email).all()
emails = [val[0] for val in speakers]
users = User.query.filter(User._email.in_(emails)).all()
content = NotificationContent(type=NotificationType.SESSION_STATE_CHANGE, target=session, target_action=session.state, actors=[NotificationActor(actor=actor)])
if (not users):
logger.warning('No speaker to send notification for state change of session %s', session)
return
send_notification(content, users=users) |
class FileSaveChooser(BaseChooser):
def __init__(self, title, parent, patterns=[]):
super().__init__(title, parent, Gtk.FileChooserAction.SAVE, _('_Save'))
file_filter = Gtk.FileFilter()
for pattern in patterns:
file_filter.add_pattern(pattern)
self.set_filter(file_filter)
def set_filename(self, name):
if name:
self.set_current_folder(Gio.File.new_for_path(os.path.abspath(os.path.dirname(name))))
self.set_current_name(os.path.basename(name)) |
def test_is_event_version_part_of_edition(manifest):
assert (manifest.is_in_edition('edition-agen', 'EiffelActivityTriggeredEvent', '1.0.0') is False)
assert (manifest.is_in_edition('edition-agen', 'EiffelActivityTriggeredEvent', '2.0.0') is True)
assert (manifest.is_in_edition('edition-agen', 'EiffelActivityTriggeredEvent', '3.0.0') is True)
assert (manifest.is_in_edition('edition-agen', 'EiffelActivityTriggeredEvent', '4.0.0') is False) |
.parametrize('workflow, position', [(wf_with_multioutput_error0, 0), (wf_with_multioutput_error1, 1)])
(st.integers())
(deadline=timedelta(seconds=2))
def test_workflow_with_multioutput_error(workflow, position, correct_input):
with pytest.raises(TypeError, match="Encountered error while executing workflow '{}':\\n Failed to convert output in position {} of value .+, expected type \\<class 'int'\\>".format(workflow.name, position)):
workflow(a=correct_input, b=correct_input) |
def extractBaihemtltranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def log_on_exception(f: T_WrappedCallableOrType) -> T_WrappedCallableOrType:
if inspect.isclass(f):
return wrap_class_methods(f, log_on_exception)
(f)
def wrapper(*args, **kwargs):
try:
f(*args, **kwargs)
except Exception as e:
LOGGER.exception('failed due to %s', repr(e))
raise
return wrapper |
class WSServer():
def __init__(self, enums: Any, request_time_out=0.01, sleep_pause_time=0.1):
self.enums = enums
self.STREAM_LISTS = getattr(enums, 'STREAM_LISTS', set())
self.supported_stream_list = getattr(self.STREAM_LISTS, 'supported_stream_list', set())
self.sleep_pause_streams = getattr(self.STREAM_LISTS, 'sleep_pause_streams', set())
self.streams = {}
self.stream_id_to_stream_name = {}
self.request_time_out = request_time_out
self.sleep_pause_time = sleep_pause_time
self.websocket = None
self.states = None
def isactive(self, stream_name) -> bool:
return (stream_name in self.streams)
def remove_stream(self, stream_name) -> None:
if (stream_name in self.streams):
del self.streams[stream_name]
async def end_stream_listener(self) -> bool:
request = None
while ((not request) or request.is_start_stream_request()):
try:
request = APIRequest((await asyncio.wait_for(self.websocket.recv(), timeout=self.request_time_out)), enums=self.enums)
except asyncio.TimeoutError:
break
return (request.is_end_stream_request() if request else False)
async def send_samples(self, msg) -> None:
(await self.websocket.send(msg))
async def start_connection(self, request) -> None:
start_stream_request_msg = get_start_stream_request(stream_id=request.stream_id, request_id=request.request_id, api_version=self.enums.WS_SERVER.DEFAULT_API_VERSION)
(await self.websocket.send(json.dumps(start_stream_request_msg)))
self.streams[request.stream_name] = request
self.stream_id_to_stream_name[request.stream_id] = request.stream_name
async def start_connection_error(self, request) -> None:
start_stream_request_msg = get_start_stream_request_error(stream_id=request.stream_id, request_id=request.request_id, api_version=self.enums.WS_SERVER.DEFAULT_API_VERSION)
(await self.websocket.send(json.dumps(start_stream_request_msg)))
self.streams[request.stream_name] = request
self.stream_id_to_stream_name[request.stream_id] = request.stream_name
async def ws_recv(self) -> Tuple[(str, bool)]:
recv_success = False
try:
msg = (await asyncio.wait_for(self.websocket.recv(), timeout=self.request_time_out))
recv_success = True
except asyncio.TimeoutError:
msg = ''
except websockets.exceptions.ConnectionClosedError:
msg = ''
return (msg, recv_success)
def is_start_stream_request(self, msg: str) -> bool:
return (self.enums.API.START_STREAM_REQUEST in msg)
def is_end_stream_request(self, msg: str) -> bool:
return (self.enums.API.END_STREAM_REQUEST in msg)
async def process(self, websocket, path) -> None:
logger.info('Start of WebSocket Process...')
self.websocket = websocket
while True:
(msg, recv_success) = (await self.ws_recv())
if self.is_start_stream_request(msg):
request = APIRequest(msg, enums=self.enums)
stream_name = request.stream_name
stream_id = request.stream_id
if (not self.isactive(stream_name)):
if ((self.supported_stream_list is None) or (stream_name in self.supported_stream_list)):
(await self.start_connection(request))
logger.info(f'Connected to stream {stream_name}')
else:
(await self.start_connection_error(request))
logger.info(f'Stream {stream_name} is not in supported_stream_list.')
self.streams[stream_name] = request
self.stream_id_to_stream_name[stream_id] = stream_name
elif self.is_end_stream_request(msg):
request = APIRequest(msg, enums=self.enums)
stream_name = self.stream_id_to_stream_name[request.stream_id]
if self.isactive(stream_name):
del self.streams[stream_name]
del self.stream_id_to_stream_name[request.stream_id]
end_stream_request_msg = get_end_stream_request(stream_id=request.stream_id, request_id=request.request_id, api_version=self.enums.WS_SERVER.DEFAULT_API_VERSION)
(await websocket.send(json.dumps(end_stream_request_msg)))
logger.info(f'Stream {stream_name} has been closed.')
for sleep_pause_stream in self.sleep_pause_streams:
if ((sleep_pause_stream in msg) or (not recv_success)):
(await asyncio.sleep(self.sleep_pause_time)) |
class GenericZendeskResponseHandler(ResponseHandler):
def applies_to(api, response):
try:
return ((api.base_url in response.request.url) and response.json())
except ValueError:
return False
def deserialize(self, response_json):
response_objects = dict()
if all(((t in response_json) for t in ('ticket', 'audit'))):
response_objects['ticket_audit'] = self.object_mapping.object_from_json('ticket_audit', response_json)
for zenpy_object_name in self.object_mapping.class_mapping:
if (zenpy_object_name in response_json):
zenpy_object = self.object_mapping.object_from_json(zenpy_object_name, response_json[zenpy_object_name])
response_objects[zenpy_object_name] = zenpy_object
for (key, value) in response_json.items():
if isinstance(value, list):
zenpy_object_name = as_singular(key)
if (zenpy_object_name in self.object_mapping.class_mapping):
response_objects[key] = []
for object_json in response_json[key]:
zenpy_object = self.object_mapping.object_from_json(zenpy_object_name, object_json)
response_objects[key].append(zenpy_object)
return response_objects
def _isCBP(self, response_json):
meta = response_json.get('meta')
return ((meta is not None) and (meta.get('has_more') is not None))
def build(self, response):
response_json = response.json()
if ((self._isCBP(response_json) is False) and get_endpoint_path(self.api, response).startswith('/ticket_audits.json')):
return TicketCursorGenerator(self, response_json, object_type='audit')
if get_endpoint_path(self.api, response).startswith('/incremental/tickets/cursor.json'):
return TicketCursorGenerator(self, response_json, object_type='ticket')
if get_endpoint_path(self.api, response).startswith('/incremental/users/cursor.json'):
return TicketCursorGenerator(self, response_json, object_type='users')
if get_endpoint_path(self.api, response).startswith('/services/jira/links'):
return JiraLinkGenerator(self, response_json, response)
zenpy_objects = self.deserialize(response_json)
plural_object_type = as_plural(self.api.object_type)
if (plural_object_type in zenpy_objects):
meta = response_json.get('meta')
if (meta and (meta.get('has_more') is not None)):
return GenericCursorResultsGenerator(self, response_json, response_objects=zenpy_objects[plural_object_type])
else:
return ZendeskResultGenerator(self, response_json, response_objects=zenpy_objects[plural_object_type])
if (self.api.object_type in zenpy_objects):
return zenpy_objects[self.api.object_type]
for zenpy_object_name in self.object_mapping.class_mapping:
plural_zenpy_object_name = as_plural(zenpy_object_name)
if (plural_zenpy_object_name in zenpy_objects):
meta = response_json.get('meta')
if (meta and (meta.get('has_more') is not None)):
return GenericCursorResultsGenerator(self, response_json, object_type=zenpy_object_name)
else:
return ZendeskResultGenerator(self, response_json, object_type=plural_zenpy_object_name)
for zenpy_object_name in self.object_mapping.class_mapping:
if (zenpy_object_name in zenpy_objects):
return zenpy_objects[zenpy_object_name]
raise ZenpyException(('Unknown Response: ' + str(response_json))) |
def _test_correct_response_for_place_of_performance_county_with_geo_filters(client):
resp = client.post('/api/v2/search/spending_by_geography', content_type='application/json', data=json.dumps({'scope': 'place_of_performance', 'geo_layer': 'county', 'geo_layer_filters': ['45001', '53005'], 'filters': {'time_period': [{'start_date': '2018-10-01', 'end_date': '2020-09-30'}]}}))
expected_response = {'scope': 'place_of_performance', 'geo_layer': 'county', 'results': [{'aggregated_amount': 550005.0, 'display_name': 'Charleston', 'per_capita': 550005.0, 'population': 1, 'shape_code': '45001'}, {'aggregated_amount': 5500.0, 'display_name': 'Test Name', 'per_capita': 55.0, 'population': 100, 'shape_code': '53005'}], 'messages': [get_time_period_message()]}
assert (resp.status_code == status.HTTP_200_OK), 'Failed to return 200 Response'
resp_json = resp.json()
resp_json['results'].sort(key=_get_shape_code_for_sort)
assert (resp_json == expected_response) |
def PrintBranchToRepos(branch_to_repos, params):
for (branch, repos) in sorted(iteritems(branch_to_repos)):
if (len(repos) == 1):
msg = ('${START_COLOR}%s${RESET_COLOR}' % (branch,))
elif (len(repos) == len(set(params.config.repos))):
msg = ('${START_COLOR}%s${RESET_COLOR} (all repos)' % (branch,))
else:
msg = ('${START_COLOR}%s${RESET_COLOR} (%s)' % (branch, ', '.join(sorted(repos))))
Print(msg) |
class APISiteScansTests(APITestCase):
def setUp(self):
create_site()
def test_get_site_scans(self):
url = urljoin(urlroot, 'sites/securethe.news/scans/')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 3)
self.assertTrue(response.data['results'][0]['live']) |
def remove_gold_pass_val(save_stats: dict[(str, Any)]) -> dict[(str, Any)]:
gold_pass = save_stats['gold_pass']
gold_pass['officer_id']['Value'] =
gold_pass['renewal_times']['Value'] = 0
gold_pass['start_date'] = 0
gold_pass['expiry_date'] = 0
gold_pass['unknown_2'][0] = 0
gold_pass['unknown_2'][1] = 0
gold_pass['start_date_2'] = 0
gold_pass['expiry_date_2'] = 0
gold_pass['unknown_3'] = 0
gold_pass['flag_2']['Value'] = 0
gold_pass['expiry_date_3'] = 0
gold_pass['unknown_4']['Value'] = 0
gold_pass['unknown_5']['Value'] = 0
gold_pass['unknown_6']['Value'] = 0
save_stats['gold_pass'] = gold_pass
save_stats['login_bonuses'][5100] = 0
return save_stats |
def upgrade():
op.create_table('Payments', sa.Column('id', sa.Integer(), nullable=False), sa.Column('invoice_id', sa.Integer(), nullable=True), sa.Column('amount', sa.Float(), nullable=True), sa.Column('unit', sa.String(length=64), nullable=True), sa.ForeignKeyConstraint(['id'], ['Entities.id']), sa.ForeignKeyConstraint(['invoice_id'], ['Invoices.id']), sa.PrimaryKeyConstraint('id')) |
class PluginRestRoutes(Resource):
ENDPOINTS = [('/plugins/dummy/rest', ['GET'])]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.config = kwargs.get('config', None)
_accepted(*PRIVILEGES['view_analysis'])
def get(self):
return success_message({'dummy': 'rest'}, self.ENDPOINTS[0][0]) |
def test_sel_with_default_parameters(df_test):
(X, y) = df_test
sel = SelectByShuffling(RandomForestClassifier(random_state=1), threshold=0.01, random_state=1)
sel.fit(X, y)
Xtransformed = pd.DataFrame(X['var_7'].copy())
assert (sel.threshold == 0.01)
assert (sel.cv == 3)
assert (sel.scoring == 'roc_auc')
assert (np.round(sel.initial_model_performance_, 3) == 0.997)
assert (sel.features_to_drop_ == ['var_0', 'var_1', 'var_2', 'var_3', 'var_4', 'var_5', 'var_6', 'var_8', 'var_9', 'var_10', 'var_11'])
pd.testing.assert_frame_equal(sel.transform(X), Xtransformed) |
class ELU(Fixed):
codomain = constraints.greater_than((- 1.0))
def _forward(self, x: torch.Tensor, params: Optional[Sequence[torch.Tensor]]) -> Tuple[(torch.Tensor, Optional[torch.Tensor])]:
y = F.elu(x)
ladj = self._log_abs_det_jacobian(x, y, params)
return (y, ladj)
def _inverse(self, y: torch.Tensor, params: Optional[Sequence[torch.Tensor]]) -> Tuple[(torch.Tensor, Optional[torch.Tensor])]:
x = (torch.max(y, torch.zeros_like(y)) + torch.min(torch.log1p((y + eps)), torch.zeros_like(y)))
ladj = self._log_abs_det_jacobian(x, y, params)
return (x, ladj)
def _log_abs_det_jacobian(self, x: torch.Tensor, y: torch.Tensor, params: Optional[Sequence[torch.Tensor]]) -> torch.Tensor:
return (- F.relu((- x))) |
class OptionSeriesPackedbubbleSonificationContexttracksMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def main():
import grpc
from koapy.backend.kiwoom_open_api_plus.grpc.KiwoomOpenApiPlusServiceClient import KiwoomOpenApiPlusServiceClient
host = 'localhost'
port = 5943
with open('server.crt', 'rb') as f:
server_crt = f.read()
credentials = grpc.ssl_channel_credentials(root_certificates=server_crt, private_key=None, certificate_chain=None)
client = KiwoomOpenApiPlusServiceClient(host=host, port=port, credentials=credentials)
client.EnsureConnected() |
def schema_mandatory_attributes(schema: FieldEntry) -> None:
current_schema_attributes: List[str] = sorted((list(schema['field_details'].keys()) + list(schema['schema_details'].keys())))
missing_attributes: List[str] = ecs_helpers.list_subtract(SCHEMA_MANDATORY_ATTRIBUTES, current_schema_attributes)
if (len(missing_attributes) > 0):
msg = 'Schema {} is missing the following mandatory attributes: {}.\nFound these: {}'.format(schema['field_details']['name'], ', '.join(missing_attributes), current_schema_attributes)
raise ValueError(msg)
if ('reusable' in schema['schema_details']):
reuse_attributes: List[str] = sorted(schema['schema_details']['reusable'].keys())
missing_reuse_attributes: List[str] = ecs_helpers.list_subtract(['expected', 'top_level'], reuse_attributes)
if (len(missing_reuse_attributes) > 0):
msg = 'Reusable schema {} is missing the following reuse attributes: {}.\nFound these: {}'.format(schema['field_details']['name'], ', '.join(missing_reuse_attributes), reuse_attributes)
raise ValueError(msg) |
def test_proper_name_argument():
argument = RangeStringArgument()
assert argument.validate('1')
assert argument.validate('1-10')
assert argument.validate('1-10,11-20')
assert argument.validate('1-10,11,12,13,14,15,16-20')
assert (not argument.validate(''))
assert (not argument.validate('s5'))
assert (not argument.validate('1-10,5-4*'))
assert argument.validate('1 - 5, 2,3 ,4')
assert argument.validate('1 - 5, 2 ,3 ,4')
argument = RangeStringArgument(max_value=10)
assert argument.validate('1-5, 9')
assert (not argument.validate('10')) |
class OptionSeriesXrangeEvents(Options):
def afterAnimate(self):
return self._config_get(None)
def afterAnimate(self, value: Any):
self._config(value, js_type=False)
def checkboxClick(self):
return self._config_get(None)
def checkboxClick(self, value: Any):
self._config(value, js_type=False)
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def hide(self):
return self._config_get(None)
def hide(self, value: Any):
self._config(value, js_type=False)
def legendItemClick(self):
return self._config_get(None)
def legendItemClick(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def show(self):
return self._config_get(None)
def show(self, value: Any):
self._config(value, js_type=False) |
def Run(params):
args = params.args[1:]
remote = False
if (len(args) > 0):
if (args[0] == '-r'):
del args[0]
remote = True
repos_and_local_branches = GetReposAndLocalBranches(params, patterns=[('*%s*' % x) for x in args], remote=remote)
branch_to_repos = ConvertRepoToBranchesToBranchToRepos(repos_and_local_branches)
PrintBranchToRepos(branch_to_repos, params) |
class TCPSocketChannelClientTLS(TCPSocketChannelClient):
DEFAULT_VERIFICATION_SIGNATURE_WAIT_TIMEOUT = 5.0
def __init__(self, in_path: str, out_path: str, server_pub_key: str, logger: logging.Logger=_default_logger, loop: Optional[AbstractEventLoop]=None, verification_signature_wait_timeout: Optional[float]=None) -> None:
super().__init__(in_path, out_path, logger, loop)
self.verification_signature_wait_timeout = (self.DEFAULT_VERIFICATION_SIGNATURE_WAIT_TIMEOUT if (verification_signature_wait_timeout is None) else verification_signature_wait_timeout)
self.server_pub_key = server_pub_key
def _get_session_pub_key(writer: StreamWriter) -> bytes:
cert_data = writer.get_extra_info('ssl_object').getpeercert(binary_form=True)
cert = x509.Certificate.load(cert_data)
session_pub_key = VerifyingKey.from_der(cert.public_key.dump()).to_string('uncompressed')
return session_pub_key
async def _open_connection(self) -> TCPSocketProtocol:
sock = (await self._open_tls_connection())
session_pub_key = self._get_session_pub_key(sock.writer)
try:
signature = (await asyncio.wait_for(sock.read(), timeout=self.verification_signature_wait_timeout))
except asyncio.TimeoutError:
raise ValueError(f'Failed to get peer verification record in timeout: {self.verification_signature_wait_timeout}')
if (not signature):
raise ValueError('Unexpected socket read data!')
try:
self._verify_session_key_signature(signature, session_pub_key)
except BadSignatureError as e:
with contextlib.suppress(Exception):
(await sock.close())
raise ValueError(f'Invalid TLS session key signature: {e}')
return sock
async def _open_tls_connection(self) -> TCPSocketProtocol:
cadata = (await asyncio.get_event_loop().run_in_executor(None, (lambda : ssl.get_server_certificate((self._host, self._port)))))
ssl_ctx = ssl.create_default_context(cadata=cadata)
ssl_ctx.check_hostname = False
ssl_ctx.verify_mode = ssl.CERT_REQUIRED
(reader, writer) = (await asyncio.open_connection(self._host, self._port, ssl=ssl_ctx))
return TCPSocketProtocol(reader, writer, logger=self.logger, loop=self._loop)
def _verify_session_key_signature(self, signature: bytes, session_pub_key: bytes) -> None:
vk = VerifyingKey.from_string(bytes.fromhex(self.server_pub_key), SECP256k1)
vk.verify(signature, session_pub_key, hashfunc=hashlib.sha256, sigdecode=sigdecode_der) |
class StartsWith(FunctionSignature):
name = 'startsWith'
argument_types = [TypeHint.String, TypeHint.String]
return_value = TypeHint.Boolean
def run(cls, source, substring):
if (is_string(source) and is_string(substring)):
return fold_case(source).startswith(fold_case(substring)) |
def prompt_for_user_token(client_id: str, client_secret: str, redirect_uri: str, scope=None) -> RefreshingToken:
cred = RefreshingCredentials(client_id, client_secret, redirect_uri)
auth = UserAuth(cred, scope=scope)
print('Opening browser for Spotify login...')
webbrowser.open(auth.url)
redirected = input('Please paste redirect URL: ').strip()
return auth.request_token(url=redirected) |
class OptionPlotoptionsSankeySonificationDefaultspeechoptionsMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('undefined')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('undefined')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('undefined')
def min(self, text: str):
self._config(text, js_type=False)
def within(self):
return self._config_get('undefined')
def within(self, text: str):
self._config(text, js_type=False) |
def load_c_function(code, name, comm):
cppargs = [('-I%s/include' % d) for d in get_petsc_dir()]
ldargs = (([('-L%s/lib' % d) for d in get_petsc_dir()] + [('-Wl,-rpath,%s/lib' % d) for d in get_petsc_dir()]) + ['-lpetsc', '-lm'])
return load(code, 'c', name, argtypes=[ctypes.c_voidp, ctypes.c_int, ctypes.c_voidp, ctypes.c_voidp, ctypes.c_voidp, ctypes.c_int, ctypes.c_voidp, ctypes.c_voidp, ctypes.c_voidp], restype=ctypes.c_int, cppargs=cppargs, ldargs=ldargs, comm=comm) |
class Parser():
def print_help(self):
print('{fail}usage: {blue}{prog} {green}<command>{reset} [options]'.format(prog='todo', fail=Fore.FAIL, blue=Fore.BLUE, green=Fore.GREEN, reset=Style.RESET_ALL))
def parseopts(self, args):
try:
cmd_name = args[0]
cmd_args = args[1:]
except IndexError:
self.print_help()
sys.exit()
if (cmd_name not in commands_dict):
msg = '{fail}unknown command {blue}{cmd}{reset}'.format(cmd=cmd_name, fail=Fore.FAIL, blue=Fore.BLUE, reset=Style.RESET_ALL)
raise CommandError(msg)
return (cmd_name, cmd_args)
def autocomplete(self):
pass |
class OptionPlotoptionsSunburstSonificationTracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesPictorialStatesSelectMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(num, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(4)
def radius(self, num: float):
self._config(num, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
def test_override_custom_form_name(db):
event = EventFactoryBasic()
overridden_custom_form = CustomForms(event=event, field_identifier='shippingAddress', name='Home Address', form='attendee', type='text')
custom_custom_form = CustomForms(event=event, field_identifier='portNumber', name='Portable Number', form='attendee', type='text')
db.session.commit()
assert (overridden_custom_form.name == 'Shipping Address')
assert (custom_custom_form.name == 'Portable Number') |
.parametrize('njit', [True, False])
def test_reflections(njit):
if njit:
reflections = kernel.reflections
else:
reflections = kernel.reflections.py_func
dat = DATAKERNEL['refl'][()]
for (_, val) in dat.items():
(Rp, Rm) = reflections(**val[0])
assert_allclose(Rp, val[1])
assert_allclose(Rm, val[2]) |
class MultiUrl(MultiSource):
def __init__(self, urls, *args, filter=None, merger=None, force=None, **kwargs):
if (not isinstance(urls, (list, tuple))):
urls = [urls]
assert len(urls)
sources = [load_source('url', url, filter=filter, merger=merger, force=force, lazily=True) for url in sorted(urls)]
super().__init__(sources, filter=filter, merger=merger) |
.requires_tex
def test_pdflatex(cli: CliRunner, temp_with_override: Path):
path_output = temp_with_override.absolute()
path_template = path_tests.parent.joinpath('jupyter_book', 'book_template')
cmd = f'{path_template} --path-output {path_output} --builder pdflatex'
result = cli.invoke(build, cmd.split())
assert (result.exit_code == 0)
path_pdf = path_output.joinpath('_build', 'latex')
assert path_pdf.joinpath('book.pdf').exists()
path_page = path_tests.parent.joinpath('jupyter_book', 'book_template').joinpath('markdown.md')
cmd = f'{path_page} --path-output {path_output} --builder pdflatex'
result = cli.invoke(build, cmd.split())
assert (result.exit_code == 0)
path_pdf = path_output.joinpath('_build', '_page', 'markdown', 'latex')
assert path_pdf.joinpath('book.pdf').exists() |
class Command():
def __init__(self, func, s: str) -> None:
self.name = func.__name__
self.description = func.__doc__
self.help = (self.description.split('.')[0] if self.description else None)
self.requires_network = ('n' in s)
self.requires_wallet = ('w' in s)
self.requires_password = ('p' in s)
varnames = func.__code__.co_varnames[1:func.__code__.co_argcount]
self.defaults = func.__defaults__
if self.defaults:
n = len(self.defaults)
self.params = list(varnames[:(- n)])
self.options = list(varnames[(- n):])
else:
self.params = list(varnames)
self.options = []
self.defaults = []
def __repr__(self):
return '<Command {}>'.format(self)
def __str__(self):
return '{}({})'.format(self.name, ', '.join((self.params + ['{}={!r}'.format(name, self.defaults[i]) for (i, name) in enumerate(self.options)]))) |
.EventDecorator()
def to_reference_coords_newton_step(ufl_coordinate_element, parameters, x0_dtype='double', dX_dtype=ScalarType):
cell = ufl_coordinate_element.cell
domain = ufl.Mesh(ufl_coordinate_element)
K = ufl.JacobianInverse(domain)
x = ufl.SpatialCoordinate(domain)
x0_element = finat.ufl.VectorElement('Real', cell, 0)
x0 = ufl.Coefficient(ufl.FunctionSpace(domain, x0_element))
expr = ufl.dot(K, (x - x0))
C = ufl.Coefficient(ufl.FunctionSpace(domain, ufl_coordinate_element))
expr = ufl_utils.preprocess_expression(expr, complex_mode=complex_mode)
expr = ufl_utils.simplify_abs(expr, complex_mode)
builder = firedrake_interface.KernelBuilderBase(ScalarType)
builder.domain_coordinate[domain] = C
Cexpr = builder._coefficient(C, 'C')
x0_expr = builder._coefficient(x0, 'x0')
loopy_args = [lp.GlobalArg('C', dtype=ScalarType, shape=(numpy.prod(Cexpr.shape, dtype=int),)), lp.GlobalArg('x0', dtype=x0_dtype, shape=(numpy.prod(x0_expr.shape, dtype=int),))]
dim = cell.topological_dimension()
point = gem.Variable('X', (dim,))
loopy_args.append(lp.GlobalArg('X', dtype=ScalarType, shape=(dim,)))
context = tsfc.fem.GemPointContext(interface=builder, ufl_cell=cell, point_indices=(), point_expr=point, scalar_type=parameters['scalar_type'])
translator = tsfc.fem.Translator(context)
ir = map_expr_dag(translator, expr)
ir = [gem.Indexed(ir, alpha) for alpha in numpy.ndindex(ir.shape)]
max_extent = parameters['unroll_indexsum']
if max_extent:
def predicate(index):
return (index.extent <= max_extent)
ir = gem.optimise.unroll_indexsum(ir, predicate=predicate)
ir = impero_utils.preprocess_gem(ir)
return_variable = gem.Variable('dX', (dim,))
loopy_args.append(lp.GlobalArg('dX', dtype=dX_dtype, shape=(dim,)))
assignments = [(gem.Indexed(return_variable, (i,)), e) for (i, e) in enumerate(ir)]
impero_c = impero_utils.compile_gem(assignments, ())
(kernel, _) = tsfc.loopy.generate(impero_c, loopy_args, ScalarType, kernel_name='to_reference_coords_newton_step')
return lp.generate_code_v2(kernel).device_code() |
class Subscription(BaseModel):
tg_chat = ForeignKeyField(TelegramChat, related_name='subscriptions')
tw_user = ForeignKeyField(TwitterUser, related_name='subscriptions')
known_at = DateTimeField(default=datetime.datetime.now)
last_tweet_id = BigIntegerField(default=0)
def last_tweet(self):
if (self.last_tweet_id == 0):
return None
return Tweet.get((Tweet.tw_id == self.last_tweet_id)) |
def prepare_template_reading_from_fogbench():
def _prepare_template_reading_from_fogbench(FOGBENCH_TEMPLATE, ASSET_NAME):
fogbench_template_path = os.path.join(os.path.expandvars('${FLEDGE_ROOT}'), 'data/{}'.format(FOGBENCH_TEMPLATE))
with open(fogbench_template_path, 'w') as f:
f.write(('[{"name": "%s", "sensor_values": [{"name": "sensor", "type": "number", "min": %d, "max": %d, "precision": 0}]}]' % (ASSET_NAME, SENSOR_VALUE, SENSOR_VALUE)))
return fogbench_template_path
return _prepare_template_reading_from_fogbench |
def run() -> None:
logging.basicConfig(level=logging.INFO, format='%(message)s')
logging.info(bold_green(HEADER))
logging.info((construct_evm_runtime_identifier() + '\n'))
if ('--compile-contracts' in sys.argv):
logging.info('Precompiling contracts')
try:
compile_contracts(get_contracts())
except OSError:
logging.error(bold_red('Compiling contracts requires "solc" system dependency'))
sys.exit(1)
total_stat = DefaultStat()
benchmarks = [MineEmptyBlocksBenchmark(), ImportEmptyBlocksBenchmark(), SimpleValueTransferBenchmark(TO_EXISTING_ADDRESS_CONFIG), SimpleValueTransferBenchmark(TO_NON_EXISTING_ADDRESS_CONFIG), ERC20DeployBenchmark(), ERC20TransferBenchmark(), ERC20ApproveBenchmark(), ERC20TransferFromBenchmark(), DOSContractDeployBenchmark(), DOSContractSstoreUint64Benchmark(), DOSContractCreateEmptyContractBenchmark(), DOSContractRevertSstoreUint64Benchmark(), DOSContractRevertCreateEmptyContractBenchmark()]
for benchmark in benchmarks:
total_stat = total_stat.cumulate(benchmark.run(), increment_by_counter=True)
print_final_benchmark_total_line(total_stat) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.