code stringlengths 281 23.7M |
|---|
def test_caption_truncation(channel, bot_admin, image):
msg_body = ''.join((random.choice(string.ascii_letters) for _ in range(100000)))
with patch('telegram.Bot.send_document') as mock_send_document:
message = channel.bot_manager.send_photo(bot_admin, image, caption=msg_body, prefix='Prefix')
assert message.caption.startswith(('Prefix\n' + msg_body[:50]))
mock_send_document.assert_called()
assert mock_send_document.call_args[1]['filename'].endswith('txt')
msg_body = ''.join((random.choice(string.ascii_letters) for _ in range(100000)))
edited = channel.bot_manager.edit_message_caption(caption=msg_body, prefix='Prefix', chat_id=message.chat_id, message_id=message.message_id)
assert edited.caption.startswith(('Prefix\n' + msg_body[:50])) |
class OptionSeriesBarSonificationTracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def kinetic3d_40(ax, da, A, bx, db, B):
result = numpy.zeros((15, 1), dtype=float)
x0 = (2.0 * ax)
x1 = (((2.0 * bx) + x0) ** (- 1.0))
x2 = ((ax + bx) ** (- 1.0))
x3 = ((x2 * ((ax * A[0]) + (bx * B[0]))) - A[0])
x4 = (- ax)
x5 = (x3 ** 2)
x6 = (2.0 * (ax ** 2))
x7 = ((- x4) - (x6 * (x1 + x5)))
x8 = (bx * x2)
x9 = (ax * x8)
x10 = numpy.exp(((- x9) * ((A[0] - B[0]) ** 2)))
x11 = (1. * numpy.sqrt(x2))
x12 = (x10 * x11)
x13 = (x12 * x3)
x14 = (x13 * x7)
x15 = (x0 * x8)
x16 = ((x13 * x15) + x14)
x17 = (x16 * x3)
x18 = (x12 * x5)
x19 = (x1 * x12)
x20 = (x18 + x19)
x21 = (x8 * ((x0 * x20) - x12))
x22 = (x19 * x7)
x23 = (4.0 * x9)
x24 = ((x17 + x21) + x22)
x25 = (x3 * ((2.0 * x19) + x20))
x26 = (((x1 * ((x13 * x23) + (2.0 * x14))) + (x24 * x3)) + (x8 * ((x0 * x25) - (2.0 * x13))))
x27 = ((3.0 * x18) + (3.0 * x19))
x28 = ((x1 * x27) + (x25 * x3))
x29 = numpy.exp(((- x9) * ((A[1] - B[1]) ** 2)))
x30 = numpy.exp(((- x9) * ((A[2] - B[2]) ** 2)))
x31 = ((3. * x2) * x30)
x32 = (x29 * x31)
x33 = ((x2 * ((ax * A[1]) + (bx * B[1]))) - A[1])
x34 = (x33 ** 2)
x35 = ((- x4) - (x6 * (x1 + x34)))
x36 = (x28 * x32)
x37 = ((x2 * ((ax * A[2]) + (bx * B[2]))) - A[2])
x38 = (x37 ** 2)
x39 = ((- x4) - (x6 * (x1 + x38)))
x40 = (da * db)
x41 = (0. * x40)
x42 = (x11 * x29)
x43 = (x33 * x42)
x44 = (x35 * x43)
x45 = ((x15 * x43) + x44)
x46 = (x11 * x30)
x47 = (x45 * x46)
x48 = (x26 * x32)
x49 = (x25 * x32)
x50 = (0. * x40)
x51 = (x37 * x46)
x52 = (x39 * x51)
x53 = ((x15 * x51) + x52)
x54 = (x33 * x45)
x55 = (x34 * x42)
x56 = (x1 * x42)
x57 = (x55 + x56)
x58 = (x8 * ((x0 * x57) - x42))
x59 = (x35 * x56)
x60 = ((x54 + x58) + x59)
x61 = (x20 * x46)
x62 = (0. * x40)
x63 = (1. * x62)
x64 = (x37 * x53)
x65 = (x38 * x46)
x66 = (x1 * x46)
x67 = (x65 + x66)
x68 = (x8 * ((x0 * x67) - x46))
x69 = (x39 * x66)
x70 = ((x64 + x68) + x69)
x71 = (x20 * x42)
x72 = (x33 * ((2.0 * x56) + x57))
x73 = (((x1 * ((x23 * x43) + (2.0 * x44))) + (x33 * x60)) + (x8 * ((x0 * x72) - (2.0 * x43))))
x74 = (x10 * x31)
x75 = (x73 * x74)
x76 = (x3 * x74)
x77 = (((3. * x10) * x2) * x29)
x78 = (x3 * x77)
x79 = (x37 * ((2.0 * x66) + x67))
x80 = (((x1 * ((x23 * x51) + (2.0 * x52))) + (x37 * x70)) + (x8 * ((x0 * x79) - (2.0 * x51))))
x81 = (x77 * x80)
x82 = ((3.0 * x55) + (3.0 * x56))
x83 = ((x1 * x82) + (x33 * x72))
x84 = (x74 * x83)
x85 = (x12 * x57)
x86 = ((3.0 * x65) + (3.0 * x66))
x87 = ((x1 * x86) + (x37 * x79))
x88 = (x77 * x87)
result[(0, 0)] = numpy.sum((x41 * (((x32 * ((((3.0 * x1) * ((x17 + x21) + x22)) + (x26 * x3)) + (x8 * (((2.0 * ax) * x28) - x27)))) + (x35 * x36)) + (x36 * x39))))
result[(1, 0)] = numpy.sum((x50 * (((x25 * x47) + ((x33 * x39) * x49)) + (x33 * x48))))
result[(2, 0)] = numpy.sum((x50 * ((((x25 * x42) * x53) + ((x35 * x37) * x49)) + (x37 * x48))))
result[(3, 0)] = numpy.sum((x62 * ((((x24 * x46) * x57) + ((x39 * x57) * x61)) + (x60 * x61))))
result[(4, 0)] = numpy.sum((x63 * ((((x20 * x37) * x47) + ((x20 * x43) * x53)) + (((x24 * x32) * x33) * x37))))
result[(5, 0)] = numpy.sum((x62 * ((((x24 * x42) * x67) + ((x35 * x67) * x71)) + (x70 * x71))))
result[(6, 0)] = numpy.sum((x50 * ((((x16 * x46) * x72) + (x3 * x75)) + ((x39 * x72) * x76))))
result[(7, 0)] = numpy.sum((x63 * ((((x13 * x53) * x57) + ((x16 * x51) * x57)) + ((x37 * x60) * x76))))
result[(8, 0)] = numpy.sum((x63 * ((((x13 * x45) * x67) + ((x16 * x43) * x67)) + ((x33 * x70) * x78))))
result[(9, 0)] = numpy.sum((x50 * ((((x16 * x42) * x79) + (x3 * x81)) + ((x35 * x78) * x79))))
result[(10, 0)] = numpy.sum((x41 * (((x39 * x84) + (x7 * x84)) + (x74 * ((((3.0 * x1) * ((x54 + x58) + x59)) + (x33 * x73)) + (x8 * (((2.0 * ax) * x83) - x82)))))))
result[(11, 0)] = numpy.sum((x50 * ((((x12 * x53) * x72) + (((x37 * x7) * x72) * x74)) + (x37 * x75))))
result[(12, 0)] = numpy.sum((x62 * ((((x12 * x60) * x67) + ((x67 * x7) * x85)) + (x70 * x85))))
result[(13, 0)] = numpy.sum((x50 * ((((x12 * x45) * x79) + (((x33 * x7) * x77) * x79)) + (x33 * x81))))
result[(14, 0)] = numpy.sum((x41 * (((x35 * x88) + (x7 * x88)) + (x77 * ((((3.0 * x1) * ((x64 + x68) + x69)) + (x37 * x80)) + (x8 * (((2.0 * ax) * x87) - x86)))))))
return result |
def nlms(x, d, N=4, mu=0.1):
nIters = (min(len(x), len(d)) - N)
u = np.zeros(N)
w = np.zeros(N)
e = np.zeros(nIters)
for n in range(nIters):
u[1:] = u[:(- 1)]
u[0] = x[n]
e_n = (d[n] - np.dot(u, w))
w = (w + (((mu * e_n) * u) / (np.dot(u, u) + 0.001)))
e[n] = e_n
return e |
class TestScoreEntropy(BaseCheckValueTest):
group: ClassVar = RECSYS_GROUP.id
name: ClassVar = 'Score Entropy (top-k)'
k: int
_metric: ScoreDistribution
def __init__(self, k: int, eq: Optional[Numeric]=None, gt: Optional[Numeric]=None, gte: Optional[Numeric]=None, is_in: Optional[List[Union[(Numeric, str, bool)]]]=None, lt: Optional[Numeric]=None, lte: Optional[Numeric]=None, not_eq: Optional[Numeric]=None, not_in: Optional[List[Union[(Numeric, str, bool)]]]=None, is_critical: bool=True):
self.k = k
self._metric = ScoreDistribution(k)
super().__init__(eq=eq, gt=gt, gte=gte, is_in=is_in, lt=lt, lte=lte, not_eq=not_eq, not_in=not_in, is_critical=is_critical)
def get_condition(self) -> TestValueCondition:
if self.condition.has_condition():
return self.condition
metric_result = self.metric.get_result()
ref_value = metric_result.reference_entropy
if (ref_value is not None):
return TestValueCondition(eq=approx(ref_value, relative=0.1))
return TestValueCondition(gt=0)
def calculate_value_for_test(self) -> Numeric:
return self.metric.get_result().current_entropy
def get_description(self, value: Numeric) -> str:
return f'Score Entropy (top-{self.k}) is {value:.3}. The test threshold is {self.get_condition()}'
def metric(self):
return self._metric |
def aggregate(conf, fedavg_models, client_models, criterion, metrics, flatten_local_models, fa_val_perf, val_data_loader):
(_, local_models) = agg_utils.recover_models(conf, client_models, flatten_local_models)
client_models = {}
for (arch, fedavg_model) in fedavg_models.items():
kt = ZeroShotKTSolver(conf=conf, teacher_models=list(local_models.values()), student_model=fedavg_model, criterion=criterion, metrics=metrics, z_dim=100, n_generator_iter=(1 if ('n_generator_iter' not in conf.fl_aggregate) else int(conf.fl_aggregate['n_generator_iter'])), n_student_iter=(10 if ('n_student_iter' not in conf.fl_aggregate) else int(conf.fl_aggregate['n_student_iter'])), dataset=conf.fl_aggregate['data_name'], batch_size=(128 if ('batch_size' not in conf.fl_aggregate) else int(conf.fl_aggregate['batch_size'])), total_n_pseudo_batches=(1000 if ('total_n_pseudo_batches' not in conf.fl_aggregate) else int(conf.fl_aggregate['total_n_pseudo_batches'])), total_n_server_pseudo_batches=((1000 * 10) if ('total_n_server_pseudo_batches' not in conf.fl_aggregate) else int(conf.fl_aggregate['total_n_server_pseudo_batches'])), server_local_steps=(1 if ('server_local_steps' not in conf.fl_aggregate) else int(conf.fl_aggregate['server_local_steps'])), val_data_loader=val_data_loader, same_noise=(True if ('same_noise' not in conf.fl_aggregate) else conf.fl_aggregate['same_noise']), generator=(conf.generators[arch] if hasattr(conf, 'generators') else None), client_generators=None, generator_learning_rate=(0.001 if ('generator_learning_rate' not in conf.fl_aggregate) else conf.fl_aggregate['generator_learning_rate']), student_learning_rate=(0.002 if ('student_learning_rate' not in conf.fl_aggregate) else conf.fl_aggregate['student_learning_rate']), AT_beta=(0 if ('AT_beta' not in conf.fl_aggregate) else conf.fl_aggregate['AT_beta']), KL_temperature=(1 if ('temperature' not in conf.fl_aggregate) else conf.fl_aggregate['temperature']), log_fn=conf.logger.log, eval_batches_freq=(100 if ('eval_batches_freq' not in conf.fl_aggregate) else int(conf.fl_aggregate['eval_batches_freq'])), early_stopping_batches=(200 if ('early_stopping_batches' not in conf.fl_aggregate) else int(conf.fl_aggregate['early_stopping_batches'])), early_stopping_server_batches=(2000 if ('early_stopping_server_batches' not in conf.fl_aggregate) else int(conf.fl_aggregate['early_stopping_server_batches'])), n_parallel_comp=(2 if ('n_parallel_comp' not in conf.fl_aggregate) else int(conf.fl_aggregate['n_parallel_comp'])), scheme_of_next_generator=('optimal_generator_based_on_teacher' if ('scheme_of_next_generator' not in conf.fl_aggregate) else conf.fl_aggregate['scheme_of_next_generator']), weighted_server_teaching=(False if ('weighted_server_teaching' not in conf.fl_aggregate) else conf.fl_aggregate['weighted_server_teaching']), ensemble_teaching=(True if (('adv_kt_scheme' in conf.fl_aggregate) and (conf.fl_aggregate['adv_kt_scheme'] == 'ensemble')) else False))
if (not hasattr(conf, 'generators')):
conf.generators = {}
conf.generators[arch] = getattr(kt, ('alternative_clients_teaching_v3_parallel' if ('adv_kt_scheme' not in conf.fl_aggregate) else conf.fl_aggregate['adv_kt_scheme']))()
client_models[arch] = kt.server_student.cpu()
del local_models, kt
torch.cuda.empty_cache()
return client_models |
('{dst_data} = vmulq_f32({dst_data}, {rhs_data});')
def neon_vmul2_4xf32(dst: ([f32][4] Neon), lhs: ([f32][4] Neon), rhs: ([f32][4] Neon)):
assert (stride(dst, 0) == 1)
assert (stride(lhs, 0) == 1)
assert (stride(rhs, 0) == 1)
for i in seq(0, 4):
dst[i] = (lhs[i] * rhs[i]) |
class MapProjectTests(DatabaseTestCase):
def setUp(self):
super().setUp()
create_distro(self.session)
create_project(self.session)
self.client = self.flask_app.test_client()
self.user = models.User(email='', username='user')
user_social_auth = social_models.UserSocialAuth(user_id=self.user.id, user=self.user)
self.session.add(self.user)
self.session.add(user_social_auth)
self.session.commit()
def test_protected_view(self):
output = self.client.get('/project/1/map', follow_redirects=False)
self.assertEqual(output.status_code, 302)
self.assertEqual('/login/', parse.urlparse(output.location).path)
def test_authenticated_access(self):
with login_user(self.flask_app, self.user):
output = self.client.get('/project/1/map', follow_redirects=False)
self.assertEqual(output.status_code, 200)
def test_non_existing_project(self):
with login_user(self.flask_app, self.user):
output = self.client.get('/project/idonotexist/map', follow_redirects=False)
self.assertEqual(output.status_code, 404)
def test_no_csrf_token(self):
with login_user(self.flask_app, self.user):
data = {'package_name': 'geany', 'distro': 'CentOS'}
output = self.client.post('/project/1/map', data=data, follow_redirects=False)
self.assertEqual(output.status_code, 200)
self.assertEqual(0, models.Packages.query.count())
def test_map_project(self):
with login_user(self.flask_app, self.user):
with self.flask_app.test_client() as c:
output = c.get('/project/1/map')
self.assertEqual(output.status_code, 200)
self.assertTrue((b'<h1>Project: geany</h1>' in output.data))
self.assertTrue((b'<td><label for="distro">Distribution</label></td>' in output.data))
csrf_token = output.data.split(b'name="csrf_token" type="hidden" value="')[1].split(b'">')[0]
data = {'package_name': 'geany', 'distro': 'Fedora', 'csrf_token': csrf_token}
with fml_testing.mock_sends(anitya_schema.ProjectMapCreated):
output = c.post('/project/1/map', data=data, follow_redirects=True)
self.assertEqual(output.status_code, 200)
self.assertTrue((b'<li class="list-group-item list-group-item-default">Mapping added</li>' in output.data))
self.assertTrue((b'<h1>Project: geany</h1>' in output.data))
self.assertEqual(1, models.Packages.query.count())
def test_map_same_distro(self):
with login_user(self.flask_app, self.user):
with self.flask_app.test_client() as c:
output = c.get('/project/1/map')
csrf_token = output.data.split(b'name="csrf_token" type="hidden" value="')[1].split(b'">')[0]
data = {'package_name': 'geany', 'distro': 'Fedora', 'csrf_token': csrf_token}
with fml_testing.mock_sends(anitya_schema.ProjectMapCreated):
output = c.post('/project/1/map', data=data, follow_redirects=True)
self.assertEqual(output.status_code, 200)
output = c.post('/project/1/map', data=data, follow_redirects=True)
self.assertEqual(output.status_code, 200)
self.assertTrue((b'<li class="list-group-item list-group-item-danger">Could not edit the mapping of geany on Fedora, there is already a package geany on Fedora as part of the project <a href="/project/1/">geany</a>.</li>' in output.data))
self.assertTrue((b'<h1>Project: geany</h1>' in output.data)) |
class Conv1d(Module):
def __init__(self, in_channels: int, out_channels: int, kernel_size: int, stride: int=1, padding: int=0, dilation: int=1, groups: int=1, dtype: str='float16', bias: bool=False, name: str='conv1d'):
super().__init__()
self.weight = Parameter(shape=[out_channels, kernel_size, (in_channels // groups)], dtype=dtype, name=f'{name}_weight')
if bias:
self.bias = Parameter(shape=[out_channels], dtype=dtype, name=f'{name}_bias')
else:
self.bias = None
fwd_func = (conv2d_bias if bias else conv2d)
self.op = fwd_func(stride=(stride, 1), pad=(padding, 0), dilate=(dilation, 1), group=groups)
def forward(self, x: Tensor) -> Tensor:
xu = unsqueeze(dim=2)(x)
wu = unsqueeze(dim=2)(self.weight.tensor())
if (self.bias is None):
c2d = self.op(xu, wu)
else:
c2d = self.op(xu, wu, self.bias.tensor())
return squeeze(dim=2)(c2d) |
def get_constructor_abi(contract_abi: ABI) -> ABIFunction:
candidates = [abi for abi in contract_abi if (abi['type'] == 'constructor')]
if (len(candidates) == 1):
return candidates[0]
elif (len(candidates) == 0):
return None
elif (len(candidates) > 1):
raise ValueError('Found multiple constructors.')
return None |
def extractThat1VillainessWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('ibmv', 'I Became the Master of the Villain', 'translated'), ('cam', 'The Count and the Maid', 'translated'), ('yma', 'Your Majesty is Very Annoying!', 'translated'), ('ysr', 'You are the Supporting Role', 'translated'), ('pcp', 'Please Cry Prettily', 'translated'), ('tpcp', 'The Predators Contract Partner', 'translated'), ('illyml', 'I Lost the Leash of the Yandere Male Lead', 'translated'), ('ba', 'Beloved Angela', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def update_mapping_from_cache(dsk: Dict[(CollectionAddress, Tuple[(Any, ...)])], resources: TaskResources, start_fn: Callable) -> None:
cached_results: Dict[(str, Optional[List[Row]])] = resources.get_all_cached_objects()
for collection_name in cached_results:
dsk[CollectionAddress.from_string(collection_name)] = (start_fn(cached_results[collection_name]),) |
def _extract_flat_kerning(font, pairpos_table):
extracted_kerning = {}
for glyph_name_1 in pairpos_table.Coverage.glyphs:
class_def_1 = pairpos_table.ClassDef1.classDefs.get(glyph_name_1, 0)
for glyph_name_2 in font.getGlyphOrder():
class_def_2 = pairpos_table.ClassDef2.classDefs.get(glyph_name_2, 0)
kern_value = pairpos_table.Class1Record[class_def_1].Class2Record[class_def_2].Value1.XAdvance
extracted_kerning[(glyph_name_1, glyph_name_2)] = kern_value
return extracted_kerning |
def test_check_instances_dict():
instances = {'a': [('ols-a1', OLS()), ('ols-a2', OLS(offset=1))], 'b': [('ols-b1', OLS()), ('ols-b2', OLS(offset=1))]}
out = _check_instances(instances)
assert (id(out) == id(instances))
for k in out:
ou = out[k]
it = instances[k]
for i in range(2):
for j in range(2):
assert (id(ou[i][j]) == id(it[i][j])) |
def main():
if (len(sys.argv) < 2):
sys.exit('Usage: make-multiple-text.py [file]')
filename = sys.argv[1]
spec = segyio.spec()
spec.sorting = 2
spec.format = 1
spec.samples = [1]
spec.ilines = [1]
spec.xlines = [1]
spec.ext_headers = 4
with segyio.create(filename, spec) as f:
for i in range(1, (spec.ext_headers + 1)):
f.text[i] = f.text[0]
f.trace[0] = [0] |
class BeamStateBackfill(Service, QueenTrackerAPI):
_total_added_nodes = 0
_num_added = 0
_num_missed = 0
_num_accounts_completed = 0
_num_storage_completed = 0
_report_interval = 10
_num_requests_by_peer: typing.Counter[ETHPeer]
def __init__(self, db: AtomicDatabaseAPI, peer_pool: ETHPeerPool) -> None:
self.logger = get_logger('trinity.sync.beam.backfill.BeamStateBackfill')
self._db = db
self._peer_pool = peer_pool
self._is_missing: Set[Hash32] = set()
self._num_requests_by_peer = Counter()
self._queening_queue = QueeningQueue(peer_pool)
self._account_tracker = TrieNodeRequestTracker()
self._storage_trackers: Dict[(Hash32, TrieNodeRequestTracker)] = {}
self._bytecode_trackers: Dict[(Hash32, TrieNodeRequestTracker)] = {}
self._next_trie_root_hash: Optional[Hash32] = None
self._begin_backfill = asyncio.Event()
self._external_peasant_usage = SilenceObserver(minimum_silence_duration=GAP_BETWEEN_TESTS)
async def get_queen_peer(self) -> ETHPeer:
return (await self._queening_queue.get_queen_peer())
def penalize_queen(self, peer: ETHPeer, delay: float=NON_IDEAL_RESPONSE_PENALTY) -> None:
self._queening_queue.penalize_queen(peer, delay=delay)
def insert_peer(self, peer: ETHPeer, delay: float=0) -> None:
self._queening_queue.insert_peer(peer, delay=delay)
async def pop_fastest_peasant(self) -> ETHPeer:
async with self._external_peasant_usage.make_noise():
return (await self._queening_queue.pop_fastest_peasant())
def pop_knights(self) -> Iterable[ETHPeer]:
return self._queening_queue.pop_knights()
def set_desired_knight_count(self, desired_knights: int) -> None:
self._queening_queue.set_desired_knight_count(desired_knights)
async def run(self) -> None:
self.manager.run_daemon_task(self._periodically_report_progress)
queening_manager = self.manager.run_daemon_child_service(self._queening_queue)
(await queening_manager.wait_started())
(await self._run_backfill())
self.manager.cancel()
def _batch_of_missing_hashes(self) -> Tuple[(TrackedRequest, ...)]:
return tuple(take(REQUEST_SIZE, self._missing_trie_hashes()))
async def _run_backfill(self) -> None:
(await self._begin_backfill.wait())
if (self._next_trie_root_hash is None):
raise RuntimeError('Cannot start backfill when a recent trie root hash is unknown')
loop = asyncio.get_event_loop()
while self.manager.is_running:
required_data = (await loop.run_in_executor(None, self._batch_of_missing_hashes))
if (len(required_data) == 0):
if self._check_complete():
self.logger.info('Downloaded all accounts, storage and bytecode state')
return
else:
self.logger.debug('Backfill is waiting for more hashes to arrive')
(await asyncio.sleep(PAUSE_SECONDS_IF_STATE_BACKFILL_STARVED))
continue
(await asyncio.wait((self._external_peasant_usage.until_silence(), self.manager.wait_finished()), return_when=asyncio.FIRST_COMPLETED))
if (not self.manager.is_running):
break
peer = (await self._queening_queue.pop_fastest_peasant())
while peer.eth_api.get_node_data.is_requesting:
self.logger.debug('Want backfill nodes from %s, but it has an active request, skipping...', peer)
self._queening_queue.insert_peer(peer, NON_IDEAL_RESPONSE_PENALTY)
peer = (await self._queening_queue.pop_fastest_peasant())
self.manager.run_task(self._make_request, peer, required_data)
def _check_complete(self) -> bool:
if self._account_tracker.is_complete:
storage_complete = all((storage_tracker.is_complete for storage_tracker in self._storage_trackers.values()))
if storage_complete:
bytecode_complete = all((bytecode_tracker.is_complete for bytecode_tracker in self._bytecode_trackers.values()))
return bytecode_complete
else:
return False
else:
return False
def _missing_trie_hashes(self) -> Iterator[TrackedRequest]:
exhausted_account_leaves: Tuple[(Nibbles, ...)] = ()
starting_root_hash = self._next_trie_root_hash
try:
while self.manager.is_running:
account_iterator = self._request_tracking_trie_items(self._account_tracker, starting_root_hash)
try:
next_account_info = next(account_iterator)
except trie_exceptions.MissingTraversalNode as exc:
(yield self._account_tracker.generate_request(exc.missing_node_hash, exc.nibbles_traversed))
continue
except StopIteration:
break
(path_to_leaf, address_hash_nibbles, encoded_account) = next_account_info
account = rlp.decode(encoded_account, sedes=Account)
subcomponent_hashes_iterator = self._missing_subcomponent_hashes(address_hash_nibbles, account, starting_root_hash)
for node_request in subcomponent_hashes_iterator:
(yield node_request)
account_components_complete = self._are_account_components_complete(address_hash_nibbles, account)
if account_components_complete:
self._mark_account_complete(path_to_leaf, address_hash_nibbles)
else:
self._account_tracker.pause_review(path_to_leaf)
exhausted_account_leaves += (path_to_leaf,)
except GeneratorExit:
for path_to_leaf in exhausted_account_leaves:
self._account_tracker.mark_for_review(path_to_leaf)
raise
else:
for path_to_leaf in exhausted_account_leaves:
self._account_tracker.mark_for_review(path_to_leaf)
return
def _request_tracking_trie_items(self, request_tracker: TrieNodeRequestTracker, root_hash: Hash32) -> Iterator[Tuple[(Nibbles, Nibbles, bytes)]]:
if (self._next_trie_root_hash is None):
return
trie = HexaryTrie(self._db, root_hash)
starting_index = bytes_to_nibbles(root_hash)
while self.manager.is_running:
try:
path_to_node = request_tracker.next_path_to_explore(starting_index)
except trie_exceptions.PerfectVisibility:
return
try:
(cached_node, uncached_key) = request_tracker.get_cached_parent(path_to_node)
except KeyError:
cached_node = None
node_getter = partial(trie.traverse, path_to_node)
else:
node_getter = partial(trie.traverse_from, cached_node, uncached_key)
try:
node = node_getter()
except trie_exceptions.MissingTraversalNode as exc:
if (path_to_node == exc.nibbles_traversed):
raise
elif (cached_node is None):
raise RuntimeError(f'Unexpected: on a non-cached traversal to {path_to_node}, the exception only claimed to traverse {exc.nibbles_traversed} -- {exc}') from exc
else:
missing_hash = exc.missing_node_hash
raise trie_exceptions.MissingTraversalNode(missing_hash, path_to_node) from exc
except trie_exceptions.TraversedPartialPath as exc:
node = exc.simulated_node
if node.value:
full_key_nibbles = (path_to_node + node.suffix)
if len(node.sub_segments):
raise NotImplementedError(f"The state backfiller doesn't handle keys of different lengths, where one key is a prefix of another. But found {node} in trie with {root_hash!r}")
(yield (path_to_node, full_key_nibbles, node.value))
else:
request_tracker.confirm_prefix(path_to_node, node)
def _missing_subcomponent_hashes(self, address_hash_nibbles: Nibbles, account: Account, starting_main_root: Hash32) -> Iterator[TrackedRequest]:
storage_node_iterator = self._missing_storage_hashes(address_hash_nibbles, account.storage_root, starting_main_root)
for node_request in storage_node_iterator:
(yield node_request)
bytecode_node_iterator = self._missing_bytecode_hashes(address_hash_nibbles, account.code_hash, starting_main_root)
for node_request in bytecode_node_iterator:
(yield node_request)
def _missing_storage_hashes(self, address_hash_nibbles: Nibbles, storage_root: Hash32, starting_main_root: Hash32) -> Iterator[TrackedRequest]:
if (storage_root == BLANK_NODE_HASH):
return
storage_tracker = self._get_storage_tracker(address_hash_nibbles)
while self.manager.is_running:
storage_iterator = self._request_tracking_trie_items(storage_tracker, storage_root)
try:
for (path_to_leaf, _hashed_key, _storage_value) in storage_iterator:
storage_tracker.confirm_leaf(path_to_leaf)
except trie_exceptions.MissingTraversalNode as exc:
(yield storage_tracker.generate_request(exc.missing_node_hash, exc.nibbles_traversed))
else:
return
def _missing_bytecode_hashes(self, address_hash_nibbles: Nibbles, code_hash: Hash32, starting_main_root: Hash32) -> Iterator[TrackedRequest]:
if (code_hash == EMPTY_SHA3):
return
bytecode_tracker = self._get_bytecode_tracker(address_hash_nibbles)
if bytecode_tracker.is_complete:
return
if (not bytecode_tracker.has_active_requests):
if (code_hash not in self._db):
(yield bytecode_tracker.generate_request(code_hash, prefix=()))
else:
bytecode_tracker.confirm_leaf(path_to_leaf=())
def _get_storage_tracker(self, address_hash_nibbles: Nibbles) -> TrieNodeRequestTracker:
if (address_hash_nibbles in self._storage_trackers):
return self._storage_trackers[address_hash_nibbles]
else:
new_tracker = TrieNodeRequestTracker()
self._storage_trackers[address_hash_nibbles] = new_tracker
return new_tracker
def _get_bytecode_tracker(self, address_hash_nibbles: Nibbles) -> TrieNodeRequestTracker:
if (address_hash_nibbles in self._bytecode_trackers):
return self._bytecode_trackers[address_hash_nibbles]
else:
new_tracker = TrieNodeRequestTracker()
self._bytecode_trackers[address_hash_nibbles] = new_tracker
return new_tracker
def _mark_account_complete(self, path_to_leaf: Nibbles, address_hash_nibbles: Nibbles) -> None:
self._account_tracker.confirm_leaf(path_to_leaf)
self._num_accounts_completed += 1
if (address_hash_nibbles in self._storage_trackers):
self._num_storage_completed += 1
del self._storage_trackers[address_hash_nibbles]
if (address_hash_nibbles in self._bytecode_trackers):
del self._bytecode_trackers[address_hash_nibbles]
def _are_account_components_complete(self, address_hash_nibbles: Nibbles, account: Account) -> bool:
if (account.storage_root != BLANK_NODE_HASH):
storage_tracker = self._get_storage_tracker(address_hash_nibbles)
if ((account.storage_root == BLANK_NODE_HASH) or storage_tracker.is_complete):
if (account.code_hash == EMPTY_SHA3):
return True
else:
bytecode_tracker = self._get_bytecode_tracker(address_hash_nibbles)
return bytecode_tracker.is_complete
else:
return False
async def _make_request(self, peer: ETHPeer, request_data: Iterable[TrackedRequest]) -> None:
self._num_requests_by_peer[peer] += 1
request_hashes = tuple(set((request.node_hash for request in request_data)))
try:
nodes = (await peer.eth_api.get_node_data(request_hashes))
except asyncio.TimeoutError:
self._queening_queue.insert_peer(peer, (GAP_BETWEEN_TESTS * 2))
except PeerConnectionLost:
pass
except (BaseP2PError, Exception) as exc:
self.logger.info('Unexpected err while getting background nodes from %s: %s', peer, exc)
self.logger.debug('Problem downloading background nodes from peer...', exc_info=True)
self._queening_queue.insert_peer(peer, (GAP_BETWEEN_TESTS * 2))
else:
self._queening_queue.insert_peer(peer, GAP_BETWEEN_TESTS)
self._insert_results(request_hashes, nodes)
finally:
for request in request_data:
request.tracker.mark_for_review(request.prefix)
def _insert_results(self, requested_hashes: Tuple[(Hash32, ...)], nodes: Tuple[(Tuple[(Hash32, bytes)], ...)]) -> None:
returned_nodes = dict(nodes)
with self._db.atomic_batch() as write_batch:
for requested_hash in requested_hashes:
if (requested_hash in returned_nodes):
self._num_added += 1
self._total_added_nodes += 1
encoded_node = returned_nodes[requested_hash]
write_batch[requested_hash] = encoded_node
else:
self._num_missed += 1
def set_root_hash(self, header: BlockHeaderAPI, root_hash: Hash32) -> None:
if (self._next_trie_root_hash is None):
self._next_trie_root_hash = root_hash
self._begin_backfill.set()
elif ((header.block_number % EPOCH_BLOCK_LENGTH) == 1):
self._next_trie_root_hash = root_hash
async def _periodically_report_progress(self) -> None:
for step in itertools.count():
if (not self.manager.is_running):
break
self._num_added = 0
self._num_missed = 0
timer = Timer()
(await asyncio.sleep(self._report_interval))
if (not self._begin_backfill.is_set()):
self.logger.debug('Beam-Backfill: waiting for new state root')
continue
msg = ('total=%d' % self._total_added_nodes)
msg += (' new=%d' % self._num_added)
msg += (' miss=%d' % self._num_missed)
self.logger.debug('Beam-Backfill: %s', msg)
show_top_n_peers = 3
self.logger.debug('Beam-Backfill-Peer-Usage-Top-%d: %s', show_top_n_peers, self._num_requests_by_peer.most_common(show_top_n_peers))
if ((step % 3) == 0):
num_storage_trackers = len(self._storage_trackers)
if num_storage_trackers:
trackers = tuple(self._storage_trackers.values())
active_storage_completion = (sum((self._complete_trie_fraction(store_tracker) for store_tracker in trackers)) / num_storage_trackers)
else:
active_storage_completion = 0
num_requests = sum(self._num_requests_by_peer.values())
if (num_requests == 0):
log = self.logger.debug
else:
log = self.logger.info
log('State Stats: nodes=%d accts=%d prog=%.2f%% stores=%d storing=%.1f%% of %d walked=%.1fppm tnps=%.0f req=%d', self._total_added_nodes, self._num_accounts_completed, (self._complete_trie_fraction(self._account_tracker) * 100), self._num_storage_completed, (active_storage_completion * 100), num_storage_trackers, (self._contiguous_accounts_complete_fraction() * 1000000.0), (self._num_added / timer.elapsed), num_requests)
self._num_requests_by_peer.clear()
def _complete_trie_fraction(self, tracker: TrieNodeRequestTracker) -> float:
unknown_prefixes = tracker._trie_fog._unexplored_prefixes
unknown_fraction = sum((((1 / 16) ** len(prefix)) for prefix in unknown_prefixes))
return (1 - unknown_fraction)
def _contiguous_accounts_complete_fraction(self) -> float:
starting_index = bytes_to_nibbles(self._next_trie_root_hash)
unknown_prefixes = self._account_tracker._trie_fog._unexplored_prefixes
if (len(unknown_prefixes) == 0):
return 1
nearest_index = unknown_prefixes.bisect(starting_index)
if (nearest_index == 0):
left_prefix = ((0,) * 64)
else:
left_prefix = unknown_prefixes[(nearest_index - 1)]
if key_starts_with(starting_index, left_prefix):
return 0
if (len(unknown_prefixes) == nearest_index):
right_prefix = ((15,) * 64)
else:
right_prefix = unknown_prefixes[nearest_index]
known_first_nibbles = ((right_prefix[0] - left_prefix[0]) - 1)
completed_fraction_base = ((1 / 16) * known_first_nibbles)
right_side_completed = sum(((nibble * ((1 / 16) ** nibble_depth)) for (nibble_depth, nibble) in enumerate(right_prefix[1:], 2)))
left_side_completed = sum((((15 - nibble) * ((1 / 16) ** nibble_depth)) for (nibble_depth, nibble) in enumerate(left_prefix[1:], 2)))
return ((left_side_completed + completed_fraction_base) + right_side_completed) |
def test_federal_account_update_agency(data_fixture):
assert (TreasuryAppropriationAccount.objects.filter(awarding_toptier_agency__isnull=True).count() == 6)
assert (TreasuryAppropriationAccount.objects.filter(funding_toptier_agency__isnull=True).count() == 6)
assert (FederalAccount.objects.filter(parent_toptier_agency__isnull=True).count() == 4)
update_federal_account_agency()
update_treasury_appropriation_account_agencies()
assert (TreasuryAppropriationAccount.objects.get(pk=1).awarding_toptier_agency_id == 1)
assert (TreasuryAppropriationAccount.objects.get(pk=1).funding_toptier_agency_id == 1)
assert (TreasuryAppropriationAccount.objects.get(pk=2).awarding_toptier_agency_id is None)
assert (TreasuryAppropriationAccount.objects.get(pk=2).funding_toptier_agency_id == 2)
assert (TreasuryAppropriationAccount.objects.get(pk=3).funding_toptier_agency_id == 3)
assert (TreasuryAppropriationAccount.objects.get(pk=4).funding_toptier_agency_id == 2)
assert (TreasuryAppropriationAccount.objects.get(pk=5).funding_toptier_agency_id == 2)
assert (TreasuryAppropriationAccount.objects.get(pk=6).funding_toptier_agency_id == 2)
assert (FederalAccount.objects.get(pk=1).parent_toptier_agency_id == 1)
assert (FederalAccount.objects.get(pk=2).parent_toptier_agency_id == 2)
assert (FederalAccount.objects.get(pk=3).parent_toptier_agency_id == 3)
assert (FederalAccount.objects.get(pk=4).parent_toptier_agency_id == 2) |
def test():
assert ('in doc.ents' in __solution__), 'Are you iterating over the entities?'
assert (iphone_x.text == 'iPhone X'), 'Are you sure iphone_x covers the right tokens?'
__msg__.good("Perfect! Of course, you don't always have to do this manually. In the next exercise, you'll learn about spaCy's rule-based matcher, which can help you find certain words and phrases in text.") |
class SkuTestSerializer(ExtensionsModelSerializer):
class Meta():
model = test_models.Sku
fields = ('id', 'variant')
expandable_fields = dict(owners=dict(serializer='tests.serializers.OwnerTestSerializer', many=True), model=ModelTestSerializer, manufacturer=dict(serializer=ManufacturerTestSerializer, source='model.manufacturer', id_source=False)) |
class DummyConnection(Connection):
connection_id = PublicId.from_str('fetchai/dummy:0.1.0')
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.state = ConnectionStates.disconnected
self._queue = None
async def connect(self, *args, **kwargs):
self._queue = asyncio.Queue()
self.state = ConnectionStates.connected
async def disconnect(self, *args, **kwargs):
assert (self._queue is not None)
(await self._queue.put(None))
self.state = ConnectionStates.disconnected
async def send(self, envelope: 'Envelope'):
assert (self._queue is not None)
self._queue.put_nowait(envelope)
async def receive(self, *args, **kwargs) -> Optional['Envelope']:
try:
assert (self._queue is not None)
envelope = (await self._queue.get())
if (envelope is None):
return None
return envelope
except CancelledError:
return None
except Exception as e:
print(str(e))
return None
def put(self, envelope: Envelope):
assert (self._queue is not None)
self._queue.put_nowait(envelope) |
def create_algorithm(repr: Union[(dict, str)], bms_name=None) -> BaseAlgorithm:
classes = dict(soc=SocAlgorithm)
(args, kwargs) = ([], {})
if isinstance(repr, dict):
repr = dict(repr)
name = repr.pop('name')
kwargs = repr
else:
repr = repr.strip().split(' ')
name = repr.pop(0)
args = repr
from bmslib.store import store_algorithm_state
state = store_algorithm_state(bms_name, algorithm_name=name)
algo = classes[name](name=name, args=SocArgs(*args, **kwargs), state=(SocState(**state) if state else SocState(charging=True, last_calibration_time=time.time())))
if state:
logger.info('Restored %s algo [args=%s] state %s', name, algo.args, dict_to_short_string(state))
else:
logger.info('Initialized %s algo [args=%s]', name, algo.args)
return algo |
def test_align_coords_interpolate():
geoms = get_geoms(translate=5.0, euler=(0.0, 0.0, 90.0))
interpolated = interpolate(*geoms, 10, kind='lst')
all_coords = [geom.coords for geom in interpolated]
aligned = align_coords(all_coords)
np.testing.assert_allclose(aligned[0], aligned[(- 1)], atol=1e-10) |
def test_update_versions_is_working_properly_case_3(create_test_data, create_pymel, create_maya_env):
data = create_test_data
pm = create_pymel
maya_env = create_maya_env
data['asset2_model_main_v002'].is_published = True
data['asset2_model_main_v003'].is_published = True
maya_env.open(data['asset2_model_take1_v002'])
maya_env.reference(data['asset2_model_main_v002'])
pm.saveFile()
data['asset2_model_take1_v002'].is_published = True
pm.newFile(force=True)
maya_env.open(data['version12'])
maya_env.reference(data['asset2_model_take1_v002'])
pm.saveFile()
data['version12'].is_published = True
pm.newFile(force=True)
maya_env.open(data['version15'])
maya_env.reference(data['version12'])
maya_env.reference(data['version12'])
pm.saveFile()
pm.newFile(force=True)
visited_versions = []
for v in data['version15'].walk_inputs():
visited_versions.append(v)
expected_visited_versions = [data['version15'], data['version12'], data['asset2_model_take1_v002'], data['asset2_model_main_v002']]
assert (expected_visited_versions == visited_versions)
reference_resolution = maya_env.open(data['version15'])
assert (sorted(reference_resolution['root'], key=(lambda x: x.name)) == sorted([data['version12']], key=(lambda x: x.name)))
assert (sorted(reference_resolution['create'], key=(lambda x: x.name)) == sorted([data['asset2_model_take1_v002'], data['version12']], key=(lambda x: x.name)))
assert (sorted(reference_resolution['update'], key=(lambda x: x.name)) == sorted([data['asset2_model_main_v002']], key=(lambda x: x.name)))
assert (reference_resolution['leave'] == [])
updated_versions = maya_env.update_versions(reference_resolution)
assert (0 == len(updated_versions))
assert (data['version15'] == maya_env.get_current_version())
refs_level1 = pm.listReferences()
assert (data['version12'] == maya_env.get_version_from_full_path(refs_level1[0].path))
assert (data['version12'] == maya_env.get_version_from_full_path(refs_level1[1].path))
refs_level2 = pm.listReferences(refs_level1[0])
assert (data['asset2_model_take1_v002'] == maya_env.get_version_from_full_path(refs_level2[0].path))
refs_level3 = pm.listReferences(refs_level2[0])
assert (data['asset2_model_main_v002'] == maya_env.get_version_from_full_path(refs_level3[0].path))
refs_level2 = pm.listReferences(refs_level1[1])
assert (data['asset2_model_take1_v002'] == maya_env.get_version_from_full_path(refs_level2[0].path))
refs_level3 = pm.listReferences(refs_level2[0])
assert (data['asset2_model_main_v002'] == maya_env.get_version_from_full_path(refs_level3[0].path)) |
def at_search_result(matches, caller, query='', quiet=False, **kwargs):
error = ''
if (not matches):
error = (kwargs.get('nofound_string') or _("Could not find '{query}'.").format(query=query))
matches = None
elif (len(matches) > 1):
multimatch_string = kwargs.get('multimatch_string')
if multimatch_string:
error = ('%s\n' % multimatch_string)
else:
error = _("More than one match for '{query}' (please narrow target):\n").format(query=query)
for (num, result) in enumerate(matches):
if hasattr(result.aliases, 'all'):
aliases = result.aliases.all(return_objs=True)
aliases = [alias for alias in aliases if (hasattr(alias, 'category') and (alias.category not in ('plural_key',)))]
else:
aliases = result.aliases
error += _MULTIMATCH_TEMPLATE.format(number=(num + 1), name=(result.get_display_name(caller) if hasattr(result, 'get_display_name') else query), aliases=' [{alias}]'.format(alias=(';'.join(aliases) if aliases else '')), info=result.get_extra_info(caller))
matches = None
else:
matches = matches[0]
if (error and (not quiet)):
caller.msg(error.strip())
return matches |
def deriveKeysFromUserkey(sid, pwdhash):
if (len(pwdhash) == 20):
key1 = HMAC.new(pwdhash, (sid + '\x00').encode('utf-16le'), SHA1).digest()
return [key1]
key1 = HMAC.new(pwdhash, (sid + '\x00').encode('utf-16le'), SHA1).digest()
tmpKey = pbkdf2_hmac('sha256', pwdhash, sid.encode('utf-16le'), 10000)
tmpKey2 = pbkdf2_hmac('sha256', tmpKey, sid.encode('utf-16le'), 1)[:16]
key2 = HMAC.new(tmpKey2, (sid + '\x00').encode('utf-16le'), SHA1).digest()[:20]
return [key1, key2] |
def test_duplicates_are_removed(tmp_path: Path) -> None:
with run_within_dir(tmp_path):
create_files([Path('dir/subdir/file1.py')])
files = PythonFileFinder(exclude=(), extend_exclude=(), using_default_exclude=False).get_all_python_files_in((Path(), Path()))
assert (sorted(files) == [Path('dir/subdir/file1.py')]) |
class SparkDataFrameFilesystemStoragePlugin(TypeStoragePlugin):
def compatible_with_storage_def(cls, system_storage_def):
return (system_storage_def is fs_system_storage)
def set_object(cls, intermediate_store, obj, _context, _runtime_type, paths):
print('kaka Set_object')
target_path = os.path.join(intermediate_store.root, *paths)
obj[0].write.parquet(intermediate_store.uri_for_paths(paths))
return target_path
def get_object(cls, intermediate_store, context, _runtime_type, paths):
print('kaka Get_object')
return [context.resources.pyspark.spark_session.builder.getOrCreate().read.parquet(os.path.join(intermediate_store.root, *paths)), []]
def required_resource_keys(cls):
print('required_resource_keys')
return frozenset({'pyspark'}) |
def extractSOnlinehomeUs(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class Plugin(plugin.PluginProto):
PLUGIN_ID = 217
PLUGIN_NAME = 'Energy (AC) - HDHK modbus AC current sensor (TESTING)'
PLUGIN_VALUENAME1 = 'Amper'
PLUGIN_VALUENAME2 = 'Amper'
PLUGIN_VALUENAME3 = 'Amper'
PLUGIN_VALUENAME4 = 'Amper'
def __init__(self, taskindex):
plugin.PluginProto.__init__(self, taskindex)
self.dtype = rpieGlobals.DEVICE_TYPE_SER
self.vtype = rpieGlobals.SENSOR_TYPE_QUAD
self.valuecount = 4
self.senddataoption = True
self.recdataoption = False
self.timeroption = True
self.timeroptional = False
self.formulaoption = True
self.decimals = [2, 2, 2, 2]
self.hdhk = None
self.readinprogress = 0
self.initialized = False
def webform_load(self):
choice1 = self.taskdevicepluginconfig[0]
options = rpiSerial.serial_portlist()
if (len(options) > 0):
webserver.addHtml('<tr><td>Serial Device:<td>')
webserver.addSelector_Head('p217_addr', False)
for o in range(len(options)):
webserver.addSelector_Item(options[o], options[o], (str(options[o]) == str(choice1)), False)
webserver.addSelector_Foot()
webserver.addFormNote('Address of the USB-RS485 converter')
else:
webserver.addFormNote('No serial ports found')
webserver.addFormNumericBox('Slave address', 'p217_saddr', self.taskdevicepluginconfig[1], 1, 32)
webserver.addFormNote('Default address is 1.')
if (self.taskname == ''):
choice1 = 0
choice2 = 1
choice3 = 2
choice4 = 3
choice5 = 8
channels = 16
else:
choice1 = self.taskdevicepluginconfig[2]
choice2 = self.taskdevicepluginconfig[3]
choice3 = self.taskdevicepluginconfig[4]
choice4 = self.taskdevicepluginconfig[5]
choice5 = self.taskdevicepluginconfig[6]
channels = int(self.taskdevicepluginconfig[6])
options = ['None']
optionvalues = [(- 1)]
for ch in range(0, channels):
options.append(chr((65 + ch)))
optionvalues.append(ch)
webserver.addFormSelector('Channel1', 'plugin_217_ch0', len(options), options, optionvalues, None, choice1)
webserver.addFormSelector('Channel2', 'plugin_217_ch1', len(options), options, optionvalues, None, choice2)
webserver.addFormSelector('Channel3', 'plugin_217_ch2', len(options), options, optionvalues, None, choice3)
webserver.addFormSelector('Channel4', 'plugin_217_ch3', len(options), options, optionvalues, None, choice4)
options = ['8ch (28h-3Fh)', '16ch (08h-17h)']
optionvalues = [8, 16]
webserver.addFormSelector('Number of channels on model', 'p217_mod', len(options), options, optionvalues, None, choice5)
try:
if ((self.hdhk != None) and self.hdhk.initialized):
webserver.addFormNote(('HDHK product detected ' + str(self.hdhk.prod)))
except:
pass
return True
def webform_save(self, params):
par = webserver.arg('p217_saddr', params)
try:
self.taskdevicepluginconfig[1] = int(par)
except:
self.taskdevicepluginconfig[1] = 1
try:
self.taskdevicepluginconfig[0] = str(webserver.arg('p217_addr', params)).strip()
self.taskdevicepluginconfig[6] = int(webserver.arg('p217_mod', params))
for v in range(0, 4):
par = webserver.arg(('plugin_217_ch' + str(v)), params)
if (par == ''):
par = (- 1)
else:
par = int(par)
if (str(self.taskdevicepluginconfig[(v + 2)]) != str(par)):
self.uservar[v] = 0
self.taskdevicepluginconfig[(v + 2)] = par
if ((int(par) > 0) and (self.valuecount != (v + 1))):
self.valuecount = (v + 1)
if (self.valuecount == 1):
self.vtype = rpieGlobals.SENSOR_TYPE_SINGLE
elif (self.valuecount == 2):
self.vtype = rpieGlobals.SENSOR_TYPE_DUAL
elif (self.valuecount == 3):
self.vtype = rpieGlobals.SENSOR_TYPE_TRIPLE
elif (self.valuecount == 4):
self.vtype = rpieGlobals.SENSOR_TYPE_QUAD
except Exception as e:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ('webformload' + str(e)))
return True
def plugin_init(self, enableplugin=None):
plugin.PluginProto.plugin_init(self, enableplugin)
self.taskdevicepluginconfig[0] = str(self.taskdevicepluginconfig[0]).strip()
self.readinprogress = 0
self.initialized = False
if (self.valuecount == 1):
self.vtype = rpieGlobals.SENSOR_TYPE_SINGLE
elif (self.valuecount == 2):
self.vtype = rpieGlobals.SENSOR_TYPE_DUAL
elif (self.valuecount == 3):
self.vtype = rpieGlobals.SENSOR_TYPE_TRIPLE
elif (self.valuecount == 4):
self.vtype = rpieGlobals.SENSOR_TYPE_QUAD
if (self.enabled and (self.taskdevicepluginconfig[0] != '') and (self.taskdevicepluginconfig[0] != '0')):
self.ports = ((str(self.taskdevicepluginconfig[0]) + '/') + str(self.taskdevicepluginconfig[1]))
try:
self.hdhk = hdhk.request_hdhk_device(self.taskdevicepluginconfig[0], self.taskdevicepluginconfig[1], int(self.taskdevicepluginconfig[6]))
if ((self.hdhk != None) and self.hdhk.initialized):
misc.addLog(rpieGlobals.LOG_LEVEL_INFO, ((((('HDHK ' + self.hdhk.prod) + ' initialized at: ') + str(self.taskdevicepluginconfig[0])) + ' / ') + str(self.taskdevicepluginconfig[1])))
self.initialized = True
self.readinprogress = 0
else:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ('HDHK init failed at address ' + str(self.taskdevicepluginconfig[1])))
except Exception as e:
self.hdhk = None
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ('HDHK init error: ' + str(e)))
else:
self.ports = ''
def plugin_read(self):
result = False
if (self.initialized and (self.readinprogress == 0) and self.enabled):
self.readinprogress = 1
for v in range(0, 4):
vtype = int(self.taskdevicepluginconfig[(v + 2)])
if (vtype != (- 1)):
try:
c = 10
while (self.hdhk.busy and (c > 0)):
time.sleep(0.05)
c = (c - 1)
value = self.hdhk.read_value(vtype)
except:
value = None
if (value != None):
self.set_value((v + 1), value, False)
self.plugin_senddata()
self._lastdataservetime = rpieTime.millis()
result = True
self.readinprogress = 0
return result |
class OptionPlotoptionsColumnSonificationContexttracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class OptionSeriesStreamgraphSonificationContexttracksMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class SSHhandler(object):
SSH_SESSIONS = {}
SSH_AUTH = {}
def __init__(self):
AES.new = fixed_AES_new
self.mutex = threading.RLock()
def remove(self, host):
try:
del SSHhandler.SSH_SESSIONS[host]
except Exception:
pass
def close(self):
keys = list(SSHhandler.SSH_SESSIONS.keys())
for ssh in keys:
s = SSHhandler.SSH_SESSIONS.pop(ssh)
if (s._transport is not None):
s.close()
del s
def transfer(self, host, local_file, remote_file, user=None, pw=None, auto_pw_request=False):
with self.mutex:
try:
ssh = self._getSSH(host, (nm.settings().host_user(host) if (user is None) else user), pw, True, auto_pw_request)
if (ssh is not None):
sftp = ssh.open_sftp()
try:
sftp.mkdir(os.path.dirname(remote_file))
except Exception:
pass
sftp.put(local_file, remote_file)
rospy.loginfo('SSH COPY %s -> %%s:%s', local_file, ssh._transport.get_username(), host, remote_file)
except AuthenticationRequest as _aerr:
raise
except Exception as _err:
raise
def ssh_exec(self, host, cmd, user=None, pw=None, auto_pw_request=False, get_pty=False, close_stdin=False, close_stdout=False, close_stderr=False):
with self.mutex:
try:
ssh = self._getSSH(host, (nm.settings().host_user(host) if (user is None) else user), pw, True, auto_pw_request)
if (ssh is not None):
cmd_str = utf8(' '.join(cmd))
rospy.loginfo('REMOTE execute on %%s: %s', ssh._transport.get_username(), host, cmd_str)
(stdin, stdout, stderr) = (None, None, None)
if get_pty:
(stdin, stdout, stderr) = ssh.exec_command(cmd_str, get_pty=get_pty)
else:
(stdin, stdout, stderr) = ssh.exec_command(cmd_str)
if close_stdin:
stdin.close()
if close_stdout:
stdout.close()
if close_stderr:
stderr.close()
return (stdin, stdout, stderr, True)
except AuthenticationRequest as _aerr:
raise
except Exception as _err:
raise
raise Exception(('Cannot login %s' % host))
def ssh_x11_exec(self, host, cmd, title=None, user=None):
with self.mutex:
try:
user = (nm.settings().host_user(host) if (user is None) else user)
if (host in self.SSH_AUTH):
user = self.SSH_AUTH[host]
ssh_str = ' '.join(['/usr/bin/ssh', '-aqtx', '-oClearAllForwardings=yes', '-oConnectTimeout=5', '-oStrictHostKeyChecking=no', '-oVerifyHostKeyDNS=no', '-oCheckHostIP=no', ''.join([user, '', host])])
if (title is not None):
cmd_str = nm.settings().terminal_cmd([ssh_str, ' '.join(cmd)], title)
else:
cmd_str = utf8(' '.join([ssh_str, ' '.join(cmd)]))
rospy.loginfo('REMOTE x11 execute on %s: %s', host, cmd_str)
return SupervisedPopen(shlex.split(cmd_str), object_id=utf8(title), description=('REMOTE x11 execute on %s: %s' % (host, cmd_str)))
except Exception:
raise
def _getSSH(self, host, user, pw=None, do_connect=True, auto_pw_request=False):
session = SSHhandler.SSH_SESSIONS.get(host, paramiko.SSHClient())
if ((session is None) or ((not (session.get_transport() is None)) and ((not session.get_transport().is_active()) or (session._transport.get_username() != user)))):
t = SSHhandler.SSH_SESSIONS.pop(host)
del t
if (host in self.SSH_AUTH):
del self.SSH_AUTH[host]
session = SSHhandler.SSH_SESSIONS.get(host, paramiko.SSHClient())
if (session._transport is None):
session.set_missing_host_key_policy(paramiko.AutoAddPolicy())
while (((session.get_transport() is None) or (not session.get_transport().authenticated)) and do_connect):
try:
session.connect(host, username=user, password=pw, timeout=3, compress=True)
self.SSH_AUTH[host] = user
except Exception as e:
if (utf8(e) in ['Authentication failed.', 'No authentication methods available', 'Private key file is encrypted', 'No existing session']):
if auto_pw_request:
(res, user, pw) = self._requestPW(user, host)
if (not res):
return None
self.SSH_AUTH[host] = user
else:
raise AuthenticationRequest(user, host, utf8(e))
else:
rospy.logwarn('ssh connection to %s failed: %s', host, utf8(e))
raise Exception(' '.join(['ssh connection to', host, 'failed:', utf8(e)]))
else:
SSHhandler.SSH_SESSIONS[host] = session
if (not (session.get_transport() is None)):
session.get_transport().set_keepalive(10)
return session
def _requestPW(self, user, host):
from python_qt_binding.QtCore import Qt
from python_qt_binding import loadUi
try:
from python_qt_binding.QtGui import QDialog
except Exception:
from python_qt_binding.QtWidgets import QDialog
result = False
pw = None
pwInput = QDialog()
ui_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'ui', 'PasswordInput.ui')
loadUi(ui_file, pwInput)
pwInput.setWindowTitle(''.join(['Access for ', host]))
pwInput.userLine.setText(utf8(user))
pwInput.pwLine.setText('')
pwInput.pwLine.setFocus(Qt.OtherFocusReason)
if pwInput.exec_():
result = True
user = pwInput.userLine.text()
pw = pwInput.pwLine.text()
return (result, user, pw) |
class ForthTest(unittest.TestCase):
def test_parsing_and_numbers_numbers_just_get_pushed_onto_the_stack(self):
self.assertEqual(evaluate(['1 2 3 4 5']), [1, 2, 3, 4, 5])
def test_parsing_and_numbers_pushes_negative_numbers_onto_the_stack(self):
self.assertEqual(evaluate(['-1 -2 -3 -4 -5']), [(- 1), (- 2), (- 3), (- 4), (- 5)])
def test_addition_can_add_two_numbers(self):
self.assertEqual(evaluate(['1 2 +']), [3])
def test_addition_errors_if_there_is_nothing_on_the_stack(self):
with self.assertRaises(StackUnderflowError) as err:
evaluate(['+'])
self.assertEqual(type(err.exception), StackUnderflowError)
self.assertEqual(str(err.exception.args[0]), 'Insufficient number of items in stack')
def test_addition_errors_if_there_is_only_one_value_on_the_stack(self):
with self.assertRaises(StackUnderflowError) as err:
evaluate(['1 +'])
self.assertEqual(type(err.exception), StackUnderflowError)
self.assertEqual(str(err.exception.args[0]), 'Insufficient number of items in stack')
def test_subtraction_can_subtract_two_numbers(self):
self.assertEqual(evaluate(['3 4 -']), [(- 1)])
def test_subtraction_errors_if_there_is_nothing_on_the_stack(self):
with self.assertRaises(StackUnderflowError) as err:
evaluate(['-'])
self.assertEqual(type(err.exception), StackUnderflowError)
self.assertEqual(str(err.exception.args[0]), 'Insufficient number of items in stack')
def test_subtraction_errors_if_there_is_only_one_value_on_the_stack(self):
with self.assertRaises(StackUnderflowError) as err:
evaluate(['1 -'])
self.assertEqual(type(err.exception), StackUnderflowError)
self.assertEqual(str(err.exception.args[0]), 'Insufficient number of items in stack')
def test_multiplication_can_multiply_two_numbers(self):
self.assertEqual(evaluate(['2 4 *']), [8])
def test_multiplication_errors_if_there_is_nothing_on_the_stack(self):
with self.assertRaises(StackUnderflowError) as err:
evaluate(['*'])
self.assertEqual(type(err.exception), StackUnderflowError)
self.assertEqual(str(err.exception.args[0]), 'Insufficient number of items in stack')
def test_multiplication_errors_if_there_is_only_one_value_on_the_stack(self):
with self.assertRaises(StackUnderflowError) as err:
evaluate(['1 *'])
self.assertEqual(type(err.exception), StackUnderflowError)
self.assertEqual(str(err.exception.args[0]), 'Insufficient number of items in stack')
def test_division_can_divide_two_numbers(self):
self.assertEqual(evaluate(['12 3 /']), [4])
def test_division_performs_integer_division(self):
self.assertEqual(evaluate(['8 3 /']), [2])
def test_division_errors_if_dividing_by_zero(self):
with self.assertRaises(ZeroDivisionError) as err:
evaluate(['4 0 /'])
self.assertEqual(type(err.exception), ZeroDivisionError)
self.assertEqual(str(err.exception.args[0]), 'divide by zero')
def test_division_errors_if_there_is_nothing_on_the_stack(self):
with self.assertRaises(StackUnderflowError) as err:
evaluate(['/'])
self.assertEqual(type(err.exception), StackUnderflowError)
self.assertEqual(str(err.exception.args[0]), 'Insufficient number of items in stack')
def test_division_errors_if_there_is_only_one_value_on_the_stack(self):
with self.assertRaises(StackUnderflowError) as err:
evaluate(['1 /'])
self.assertEqual(type(err.exception), StackUnderflowError)
self.assertEqual(str(err.exception.args[0]), 'Insufficient number of items in stack')
def test_combined_arithmetic_addition_and_subtraction(self):
self.assertEqual(evaluate(['1 2 + 4 -']), [(- 1)])
def test_combined_arithmetic_multiplication_and_division(self):
self.assertEqual(evaluate(['2 4 * 3 /']), [2])
def test_dup_copies_a_value_on_the_stack(self):
self.assertEqual(evaluate(['1 dup']), [1, 1])
def test_dup_copies_the_top_value_on_the_stack(self):
self.assertEqual(evaluate(['1 2 dup']), [1, 2, 2])
def test_dup_errors_if_there_is_nothing_on_the_stack(self):
with self.assertRaises(StackUnderflowError) as err:
evaluate(['dup'])
self.assertEqual(type(err.exception), StackUnderflowError)
self.assertEqual(str(err.exception.args[0]), 'Insufficient number of items in stack')
def test_drop_removes_the_top_value_on_the_stack_if_it_is_the_only_one(self):
self.assertEqual(evaluate(['1 drop']), [])
def test_drop_removes_the_top_value_on_the_stack_if_it_is_not_the_only_one(self):
self.assertEqual(evaluate(['1 2 drop']), [1])
def test_drop_errors_if_there_is_nothing_on_the_stack(self):
with self.assertRaises(StackUnderflowError) as err:
evaluate(['drop'])
self.assertEqual(type(err.exception), StackUnderflowError)
self.assertEqual(str(err.exception.args[0]), 'Insufficient number of items in stack')
def test_swap_swaps_the_top_two_values_on_the_stack_if_they_are_the_only_ones(self):
self.assertEqual(evaluate(['1 2 swap']), [2, 1])
def test_swap_swaps_the_top_two_values_on_the_stack_if_they_are_not_the_only_ones(self):
self.assertEqual(evaluate(['1 2 3 swap']), [1, 3, 2])
def test_swap_errors_if_there_is_nothing_on_the_stack(self):
with self.assertRaises(StackUnderflowError) as err:
evaluate(['swap'])
self.assertEqual(type(err.exception), StackUnderflowError)
self.assertEqual(str(err.exception.args[0]), 'Insufficient number of items in stack')
def test_swap_errors_if_there_is_only_one_value_on_the_stack(self):
with self.assertRaises(StackUnderflowError) as err:
evaluate(['1 swap'])
self.assertEqual(type(err.exception), StackUnderflowError)
self.assertEqual(str(err.exception.args[0]), 'Insufficient number of items in stack')
def test_over_copies_the_second_element_if_there_are_only_two(self):
self.assertEqual(evaluate(['1 2 over']), [1, 2, 1])
def test_over_copies_the_second_element_if_there_are_more_than_two(self):
self.assertEqual(evaluate(['1 2 3 over']), [1, 2, 3, 2])
def test_over_errors_if_there_is_nothing_on_the_stack(self):
with self.assertRaises(StackUnderflowError) as err:
evaluate(['over'])
self.assertEqual(type(err.exception), StackUnderflowError)
self.assertEqual(str(err.exception.args[0]), 'Insufficient number of items in stack')
def test_over_errors_if_there_is_only_one_value_on_the_stack(self):
with self.assertRaises(StackUnderflowError) as err:
evaluate(['1 over'])
self.assertEqual(type(err.exception), StackUnderflowError)
self.assertEqual(str(err.exception.args[0]), 'Insufficient number of items in stack')
def test_user_defined_words_can_consist_of_built_in_words(self):
self.assertEqual(evaluate([': dup-twice dup dup ;', '1 dup-twice']), [1, 1, 1])
def test_user_defined_words_execute_in_the_right_order(self):
self.assertEqual(evaluate([': countup 1 2 3 ;', 'countup']), [1, 2, 3])
def test_user_defined_words_can_override_other_user_defined_words(self):
self.assertEqual(evaluate([': foo dup ;', ': foo dup dup ;', '1 foo']), [1, 1, 1])
def test_user_defined_words_can_override_built_in_words(self):
self.assertEqual(evaluate([': swap dup ;', '1 swap']), [1, 1])
def test_user_defined_words_can_override_built_in_operators(self):
self.assertEqual(evaluate([': + * ;', '3 4 +']), [12])
def test_user_defined_words_can_use_different_words_with_the_same_name(self):
self.assertEqual(evaluate([': foo 5 ;', ': bar foo ;', ': foo 6 ;', 'bar foo']), [5, 6])
def test_user_defined_words_can_define_word_that_uses_word_with_the_same_name(self):
self.assertEqual(evaluate([': foo 10 ;', ': foo foo 1 + ;', 'foo']), [11])
def test_user_defined_words_cannot_redefine_non_negative_numbers(self):
with self.assertRaises(ValueError) as err:
evaluate([': 1 2 ;'])
self.assertEqual(type(err.exception), ValueError)
self.assertEqual(str(err.exception.args[0]), 'illegal operation')
def test_user_defined_words_cannot_redefine_negative_numbers(self):
with self.assertRaises(ValueError) as err:
evaluate([': -1 2 ;'])
self.assertEqual(type(err.exception), ValueError)
self.assertEqual(str(err.exception.args[0]), 'illegal operation')
def test_user_defined_words_errors_if_executing_a_non_existent_word(self):
with self.assertRaises(ValueError) as err:
evaluate(['foo'])
self.assertEqual(type(err.exception), ValueError)
self.assertEqual(str(err.exception.args[0]), 'undefined operation')
def test_case_insensitivity_dup_is_case_insensitive(self):
self.assertEqual(evaluate(['1 DUP Dup dup']), [1, 1, 1, 1])
def test_case_insensitivity_drop_is_case_insensitive(self):
self.assertEqual(evaluate(['1 2 3 4 DROP Drop drop']), [1])
def test_case_insensitivity_swap_is_case_insensitive(self):
self.assertEqual(evaluate(['1 2 SWAP 3 Swap 4 swap']), [2, 3, 4, 1])
def test_case_insensitivity_over_is_case_insensitive(self):
self.assertEqual(evaluate(['1 2 OVER Over over']), [1, 2, 1, 2, 1])
def test_case_insensitivity_user_defined_words_are_case_insensitive(self):
self.assertEqual(evaluate([': foo dup ;', '1 FOO Foo foo']), [1, 1, 1, 1])
def test_case_insensitivity_definitions_are_case_insensitive(self):
self.assertEqual(evaluate([': SWAP DUP Dup dup ;', '1 swap']), [1, 1, 1, 1]) |
def appender(filepath, total, results):
log.debug('file appender for %s started ...', filepath)
print()
with open(filepath, 'a+t') as fp:
done = 0
while True:
on_progress(done, total)
line = results.get()
if (line is None):
break
fp.write((line.strip() + '\n'))
done += 1 |
class DeploymentResponseSuccessfulMock(object):
status_code = 201
content = {'deployment': {'id': , 'revision': '1.2.3', 'changelog': 'Lorem Ipsum', 'description': 'Lorem ipsum usu amet dicat nullam ea. Nec detracto lucilius democritum in.', 'user': 'username', 'timestamp': '2016-06-21T09:45:08+00:00', 'links': {'application': 12345}}, 'links': {'deployment.agent': '/v2/applications/{application_id}'}} |
.usefixtures('use_tmpdir')
def test_gen_data_eq_config():
alt1 = GenDataConfig(name='ALT1', report_steps=[2, 1, 3])
alt2 = GenDataConfig(name='ALT1', report_steps=[2, 3, 1])
alt3 = GenDataConfig(name='ALT1', report_steps=[3])
alt4 = GenDataConfig(name='ALT4', report_steps=[3])
alt5 = GenDataConfig(name='ALT4', report_steps=[4])
assert (alt1 == alt2)
assert (alt1 != alt3)
assert (alt3 != alt4)
assert (alt4 != alt5) |
def filter_cat_names(cat_names: list[tuple[(str, int, int)]]) -> list[tuple[(str, int, int)]]:
filtered_cat_ids: list[int] = []
cat_data: list[tuple[(str, int, int)]] = []
for (cat_name, cat_id, cat_form) in cat_names:
if (cat_id not in filtered_cat_ids):
filtered_cat_ids.append(cat_id)
cat_data.append((cat_name, cat_id, cat_form))
return cat_data |
def _run_time(cmd_argv, time_argv, repeat, average):
assert (repeat > 1), repeat
run_cmd = _get_cmd_runner(time_argv, cmd_argv)
header = None
aggregate = None
for _ in _utils.scriptutil.iter_with_markers(repeat, verbosity=logger):
text = run_cmd(capture=True)
result = _parse_output(text)
if aggregate:
aggregate = _combine_results(aggregate, result, average=average)
else:
aggregate = result
return aggregate |
def _get_hg_devstr(idtype='rev'):
from os import path
if release:
raise ValueError('revsion devstring not valid for a release version')
try:
from mercurial import hg, ui
from mercurial.node import hex, short
from mercurial.error import RepoError
try:
basedir = path.join(path.split(__file__)[0], '..')
repo = hg.repository(ui.ui(), basedir)
if (idtype == 'rev'):
rstr = ('r' + str(repo['tip'].rev()))
elif (idtype == 'short'):
rstr = ('hg-' + short(repo.lookup('tip')))
elif (idtype == 'hex'):
rstr = ('hg-' + hex(repo.lookup('tip')))
else:
raise TypeError('invalid idtype')
return ('dev-' + rstr)
except RepoError:
from warnings import warn
warn(('No mercurial repository present' + _warntxt))
return 'dev'
except ImportError:
from warnings import warn
warn(('Could not find mercurial' + _warntxt))
return 'dev' |
def secrets_are_valid(secrets: SUPPORTED_STORAGE_SECRETS, storage_type: Union[(StorageType, str)]) -> bool:
if (not isinstance(storage_type, StorageType)):
try:
storage_type = StorageType[storage_type]
except KeyError:
raise ValueError('storage_type argument must be a valid StorageType enum member.')
uploader: Any = _get_authenticator_from_config(storage_type)
return uploader(secrets) |
class RedisThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.running = True
self.enabled = False
self.ppqn = 1
self.shift = 0
self.clock = [0]
self.key = '{}.note'.format(patch.getstring('output', 'prefix'))
def setPpqn(self, ppqn):
if (ppqn != self.ppqn):
with lock:
self.ppqn = ppqn
self.clock = np.mod((np.arange(0, 24, (24 / self.ppqn)) + self.shift), 24).astype(int)
monitor.info(('redis select = ' + str(self.clock)))
def setShift(self, shift):
if (shift != self.shift):
with lock:
self.shift = shift
self.clock = np.mod((np.arange(0, 24, (24 / self.ppqn)) + self.shift), 24).astype(int)
monitor.info(('redis select = ' + str(self.clock)))
def setEnabled(self, enabled):
self.enabled = enabled
def stop(self):
self.enabled = False
self.running = False
def run(self):
while self.running:
if self.enabled:
monitor.debug('redis beat')
for tick in [clock[indx] for indx in self.clock]:
tick.wait()
patch.setvalue(self.key, 1.0)
else:
time.sleep(patch.getfloat('general', 'delay')) |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = 'name'
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'firewall_address': {'required': False, 'type': 'dict', 'default': None, 'options': {}}}
for attribute_name in module_spec['options']:
fields['firewall_address']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['firewall_address']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=True)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'firewall_address')
(is_error, has_changed, result, diff) = fortios_firewall(module.params, fos, module.check_mode)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
class SqliteWriteDispatcher():
def __init__(self, db_context: 'DatabaseContext') -> None:
self._db_context = db_context
self._logger = logs.get_logger('sqlite-writer')
self._writer_queue: 'queue.Queue[WriteEntryType]' = queue.Queue()
self._writer_thread = threading.Thread(target=self._writer_thread_main, daemon=True)
self._writer_loop_event = threading.Event()
self._callback_thread_pool = ThreadPoolExecutor()
self._allow_puts = True
self._is_alive = True
self._exit_when_empty = False
self._writer_thread.start()
def _writer_thread_main(self) -> None:
self._db: sqlite3.Connection = self._db_context.acquire_connection()
maximum_batch_size = 10
write_entries: List[WriteEntryType] = []
write_entry_backlog: List[WriteEntryType] = []
while self._is_alive:
self._writer_loop_event.set()
if len(write_entry_backlog):
assert (maximum_batch_size == 1)
write_entries = [write_entry_backlog.pop(0)]
else:
try:
write_entry: WriteEntryType = self._writer_queue.get(timeout=0.1)
except queue.Empty:
if self._exit_when_empty:
return
continue
write_entries = [write_entry]
while ((len(write_entries) < maximum_batch_size) and (not self._writer_queue.empty())):
write_entries.append(self._writer_queue.get_nowait())
time_start = time.time()
completion_callbacks: List[CompletionEntryType] = []
total_size_hint = 0
try:
with self._db:
self._db.execute('begin')
for (write_callback, completion_callback, entry_size_hint) in write_entries:
write_callback(self._db)
if (completion_callback is not None):
completion_callbacks.append((completion_callback, None))
total_size_hint += entry_size_hint
except Exception as e:
self._logger.exception('Database write failure', exc_info=e)
if (len(write_entries) > 1):
self._logger.debug('Retrying with batch size of 1')
maximum_batch_size = 1
write_entry_backlog = write_entries
continue
if (write_entries[0][1] is not None):
completion_callbacks.append((write_entries[0][1], e))
else:
if len(write_entries):
time_ms = int(((time.time() - time_start) * 1000))
self._logger.debug('Invoked %d write callbacks (hinted at %d bytes) in %d ms', len(write_entries), total_size_hint, time_ms)
for dispatchable_callback in completion_callbacks:
self._callback_thread_pool.submit(self._dispatch_callback, *dispatchable_callback)
def _dispatch_callback(self, callback: CompletionCallbackType, exc_value: Optional[Exception]) -> None:
try:
callback(exc_value)
except Exception as e:
traceback.print_exc()
self._logger.exception('Exception within completion callback', exc_info=e)
def put(self, write_entry: WriteEntryType) -> None:
if (not self._allow_puts):
raise WriteDisabledError()
self._writer_queue.put_nowait(write_entry)
def stop(self) -> None:
if self._exit_when_empty:
return
self._allow_puts = False
self._exit_when_empty = True
self._writer_loop_event.wait()
self._writer_thread.join()
self._db_context.release_connection(self._db)
self._callback_thread_pool.shutdown(wait=True)
self._is_alive = False
def is_stopped(self) -> bool:
return (not self._is_alive) |
def test_delete_policy_cascades(db: Session, policy: Policy) -> None:
rule = policy.rules[0]
target = rule.targets[0]
policy.delete(db=db)
assert (Rule.get(db=db, object_id=rule.id) is None)
assert (RuleTarget.get(db=db, object_id=target.id) is None)
assert (Policy.get(db=db, object_id=policy.id) is None) |
class OptionSeriesScatterSonificationTracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Solution():
def rob(self, root: TreeNode) -> int:
def dfs(node):
if (node is None):
return (0, 0)
(l_robbed, l_not_robbed) = dfs(node.left)
(r_robbed, r_not_robbed) = dfs(node.right)
ret = (((node.val + l_not_robbed) + r_not_robbed), max((l_robbed + r_robbed), (l_not_robbed + r_not_robbed), (l_robbed + r_not_robbed), (l_not_robbed + r_robbed)))
return ret
return max(dfs(root)) |
def maximum_absolute_diff(normalized_density, bottom, top):
def neg_absolute_difference(radius):
return (- np.abs((normalized_density(radius) - straight_line(radius, normalized_density, bottom, top))))
result = minimize_scalar(neg_absolute_difference, bounds=[bottom, top], method='bounded')
radius_split = result.x
max_diff = (- result.fun)
return (radius_split, max_diff) |
class OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMapping(Options):
def frequency(self) -> 'OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingFrequency)
def gapBetweenNotes(self) -> 'OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingGapbetweennotes)
def highpass(self) -> 'OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingHighpass':
return self._config_sub_data('highpass', OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingHighpass)
def lowpass(self) -> 'OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingLowpass':
return self._config_sub_data('lowpass', OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingLowpass)
def noteDuration(self) -> 'OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingNoteduration':
return self._config_sub_data('noteDuration', OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingNoteduration)
def pan(self) -> 'OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingPan':
return self._config_sub_data('pan', OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingPan)
def pitch(self) -> 'OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingPitch':
return self._config_sub_data('pitch', OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingPitch)
def playDelay(self) -> 'OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingPlaydelay':
return self._config_sub_data('playDelay', OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingPlaydelay)
def time(self) -> 'OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingTime':
return self._config_sub_data('time', OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingTime)
def tremolo(self) -> 'OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingTremolo':
return self._config_sub_data('tremolo', OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingTremolo)
def volume(self) -> 'OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingVolume':
return self._config_sub_data('volume', OptionPlotoptionsScatter3dSonificationDefaultinstrumentoptionsMappingVolume) |
class Migration(migrations.Migration):
dependencies = [('django_etebase', '0025_auto__1216')]
operations = [migrations.RenameField(model_name='collectioninvitation', old_name='accessLevel', new_name='accessLevelOld'), migrations.RenameField(model_name='collectionmember', old_name='accessLevel', new_name='accessLevelOld')] |
class CisSpidersItem(Item):
idx = Field()
spider_name = Field()
img = Field()
title = Field()
abstract = Field()
link = Field()
date = Field()
area = Field()
key_words = Field()
contact = Field()
video = Field()
state = Field()
project_holder = Field()
partner = Field()
economic = Field()
coordinates = Field() |
class ReportData(BaseModel):
conv_uid: str
template_name: str
template_introduce: str = None
charts: List[ChartData]
def prepare_dict(self):
return {'conv_uid': self.conv_uid, 'template_name': self.template_name, 'template_introduce': self.template_introduce, 'charts': [chart.dict() for chart in self.charts]} |
class AbstractPopStatsDataContainer(AbstractPlayerInfoDataContainer, abc.ABC):
def _iterate_budgetitems(self, cd: datamodel.CountryData) -> Iterable[Tuple[(str, float)]]:
for pop_stats in self._iterate_popstats(cd):
key = self._get_key_from_popstats(pop_stats)
val = self._get_value_from_popstats(pop_stats)
(yield (key, val))
def _iterate_popstats(self, cd: datamodel.CountryData) -> Iterable[PopStatsType]:
pass
def _get_key_from_popstats(self, ps: PopStatsType) -> str:
pass
def _get_value_from_popstats(self, ps: PopStatsType) -> float:
pass
def _include(self, player_cd):
try:
next(self._iterate_popstats(player_cd))
return True
except StopIteration:
return False |
class ConvTasNetModule(LightningModule):
def __init__(self, model: nn.Module, loss: Callable, optim_fn: Callable[(Iterable[Parameter], Optimizer)], metrics: Mapping[(str, Callable)], lr_scheduler: Optional[_LRScheduler]=None) -> None:
super().__init__()
self.model: nn.Module = model
self.loss: Callable = loss
self.optim: Optimizer = optim_fn(self.model.parameters())
self.lr_scheduler: Optional[_LRScheduler] = (lr_scheduler(self.optim) if lr_scheduler else None)
self.metrics: Mapping[(str, Callable)] = metrics
self.train_metrics: Dict = {}
self.val_metrics: Dict = {}
self.test_metrics: Dict = {}
self.save_hyperparameters()
def setup(self, stage: Optional[str]=None) -> None:
if (stage == 'fit'):
self.train_metrics.update(self.metrics)
self.val_metrics.update(self.metrics)
else:
self.test_metrics.update(self.metrics)
def forward(self, x: torch.Tensor) -> torch.Tensor:
return self.model(x)
def training_step(self, batch: Batch, *args: Any, **kwargs: Any) -> Dict[(str, Any)]:
return self._step(batch, subset='train')
def validation_step(self, batch: Batch, *args: Any, **kwargs: Any) -> Dict[(str, Any)]:
return self._step(batch, subset='val')
def test_step(self, batch: Batch, *args: Any, **kwargs: Any) -> Optional[Dict[(str, Any)]]:
return self._step(batch, subset='test')
def _step(self, batch: Batch, subset: str) -> Dict[(str, Any)]:
(mix, src, mask) = batch
pred = self.model(mix)
loss = self.loss(pred, src, mask)
self.log(f'losses/{subset}_loss', loss.item(), on_step=True, on_epoch=True)
metrics_result = self._compute_metrics(pred, src, mix, mask, subset)
self.log_dict(metrics_result, on_epoch=True)
return loss
def configure_optimizers(self) -> Tuple[Any]:
epoch_schedulers = {'scheduler': self.lr_scheduler, 'monitor': 'losses/val_loss', 'interval': 'epoch'}
return ([self.optim], [epoch_schedulers])
def _compute_metrics(self, pred: torch.Tensor, label: torch.Tensor, inputs: torch.Tensor, mask: torch.Tensor, subset: str) -> Dict[(str, torch.Tensor)]:
metrics_dict = getattr(self, f'{subset}_metrics')
metrics_result = {}
for (name, metric) in metrics_dict.items():
metrics_result[f'metrics/{subset}/{name}'] = metric(pred, label, inputs, mask)
return metrics_result |
class OptionPlotoptionsHistogramStatesInactive(Options):
def animation(self) -> 'OptionPlotoptionsHistogramStatesInactiveAnimation':
return self._config_sub_data('animation', OptionPlotoptionsHistogramStatesInactiveAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def opacity(self):
return self._config_get(0.2)
def opacity(self, num: float):
self._config(num, js_type=False) |
def is_valid_inputs(output_shapes, c_shapes):
msg = ''
if (output_shapes == c_shapes):
return (True, msg)
def _squeeze_leading_1s(shapes):
out = []
if (len(shapes) == 0):
return out
i = 0
for shape in shapes:
if (not isinstance(shape, IntImm)):
break
if (shape.value() != 1):
break
i = (i + 1)
out = shapes[i:]
if (len(out) == 0):
out.append(shapes[(- 1)])
return out
msg = f"C can't be broadcast to the bmm output.Output shapes: {output_shapes}, C shapes: {c_shapes}"
bias_shapes = _squeeze_leading_1s(c_shapes)
if (len(bias_shapes) >= len(output_shapes)):
return (False, msg)
for (o_shape, c_shape) in zip(reversed(output_shapes), reversed(bias_shapes)):
if (o_shape != c_shape):
return (False, msg)
return (True, '') |
def reload_config(conf_file=None):
global CACHE_PERIOD_MIN, CACHE_PERIOD_DEFAULT_MIN, CLIPBOARD_CMD, CONF, MAX_LEN, ENV, ENC, SEQUENCE
CONF = configparser.ConfigParser()
conf_file = (conf_file if (conf_file is not None) else CONF_FILE)
if (not exists(conf_file)):
try:
os.mkdir(os.path.dirname(conf_file))
except OSError:
pass
with open(conf_file, 'w', encoding=ENC) as cfile:
CONF.add_section('dmenu')
CONF.set('dmenu', 'dmenu_command', 'dmenu')
CONF.add_section('dmenu_passphrase')
CONF.set('dmenu_passphrase', 'obscure', 'True')
CONF.set('dmenu_passphrase', 'obscure_color', '#222222')
CONF.add_section('database')
CONF.set('database', 'database_1', '')
CONF.set('database', 'keyfile_1', '')
CONF.set('database', 'pw_cache_period_min', str(CACHE_PERIOD_DEFAULT_MIN))
CONF.set('database', 'autotype_default', SEQUENCE)
CONF.write(cfile)
try:
CONF.read(conf_file)
except configparser.ParsingError as err:
dmenu_err(f'Config file error: {err}')
sys.exit()
if CONF.has_option('dmenu', 'dmenu_command'):
command = shlex.split(CONF.get('dmenu', 'dmenu_command'))
else:
CONF.set('dmenu', 'dmenu_command', 'dmenu')
command = 'dmenu'
if ('-l' in command):
MAX_LEN = int(command[(command.index('-l') + 1)])
if CONF.has_option('database', 'pw_cache_period_min'):
CACHE_PERIOD_MIN = int(CONF.get('database', 'pw_cache_period_min'))
else:
CACHE_PERIOD_MIN = CACHE_PERIOD_DEFAULT_MIN
if CONF.has_option('database', 'autotype_default'):
SEQUENCE = CONF.get('database', 'autotype_default')
if CONF.has_option('database', 'type_library'):
for typ in ['xdotool', 'ydotool', 'wtype', 'dotool']:
if (CONF.get('database', 'type_library') == typ):
try:
_ = run([typ, '--version'], check=False, stdout=DEVNULL, stderr=DEVNULL)
except OSError:
dmenu_err(f'''{typ} not installed.
Please install or remove that option from config.ini''')
sys.exit()
if os.environ.get('WAYLAND_DISPLAY'):
clips = ['wl-copy']
else:
clips = ['xsel -b', 'xclip -selection clip']
for clip in clips:
try:
_ = run(shlex.split(clip), check=False, stdout=DEVNULL, stderr=DEVNULL, input='')
CLIPBOARD_CMD = clip
break
except OSError:
continue
if (CLIPBOARD_CMD == 'true'):
dmenu_err(f"{' or '.join([shlex.split(i)[0] for i in clips])} needed for clipboard support") |
def main():
from argparse import ArgumentParser
p = ArgumentParser()
p.add_argument('-r', '--rx_port', default=5556, help='Port number to receive zmq messages for IO on')
p.add_argument('-t', '--tx_port', default=5555, help='Port number to send IO messages via zmq')
args = p.parse_args()
print('TODO Updated to use IOServer Class') |
class Worker():
def __init__(self, client, notion_ai):
self.client = client
self.notion_ai = notion_ai
self.task_manager = TaskManager(logging, notion_ai)
self.logging = logging
schedule.every(5).minutes.do(self.background_job)
self.stop_run_continuously = self.run_continuously()
def run_continuously(self, interval=1):
cease_continuous_run = threading.Event()
class ScheduleThread(threading.Thread):
def run(cls):
while (not cease_continuous_run.is_set()):
schedule.run_pending()
time.sleep(interval)
continuous_thread = ScheduleThread()
continuous_thread.start()
return cease_continuous_run
def myFunc(self, e):
return (e.title == '')
def _remove_blank_rows(self):
number_of_collections = self.notion_ai.mind_structure.get_number_of_collections()
for i in range(0, number_of_collections):
(collection_id, id) = self.notion_ai.mind_structure.get_collection_by_index(i)
collection = self.client.get_collection(collection_id=collection_id)
cv = collection.parent.views[0]
self.logging.info('Analysing Collection #{0}'.format(i))
filter_params = {'filters': [{'filter': {'operator': 'is_empty'}, 'property': 'title'}], 'operator': 'and'}
result = cv.build_query(filter=filter_params).execute()
self.logging.info('Analyzing this elements #{0}'.format(result))
self._remove_blank_blocks_from_list(result)
self.logging.info('Background work remove blank rows finished at {0}'.format(datetime.now()))
def _remove_blank_blocks_from_list(self, list):
for block in list:
bl = self.client.get_block(block.id)
title = bl.title
url = bl.url
multi_tag = self.notion_ai.property_manager.get_properties(bl, multi_tag_property=1)
ai_tags = self.notion_ai.property_manager.get_properties(bl, ai_tags_property=1)
mind_extension = self.notion_ai.property_manager.get_properties(bl, mind_extension_property=1)
if ((len(title) == 0) and (len(url) == 0) and (len(multi_tag) == 0) and (len(ai_tags) == 0) and (len(mind_extension) == 0)):
self.logging.info('Removing block with id: {0} as it is blank , title: {1}'.format(bl.id, bl.title))
self.logging.info('Have a look at this if you feel something bad was deleted')
bl.remove()
def _do_assigned_temporal_tasks(self):
now = datetime.now()
for task in self.task_manager.tasks:
minutes_diff = ((task.datetime_to_run - now).total_seconds() / 60.0)
if (1 < minutes_diff < 5):
print('Doing temporal tasks at {0}'.format(now))
task.run_task()
self.task_manager.remove_task(task)
def background_job(self):
if (len(self.task_manager.tasks) > 0):
self._do_assigned_temporal_tasks()
time.sleep(2)
try:
self._remove_blank_rows()
except HTTPError as e:
logging.info(str(e)) |
def get_benchmarks(*, _cache={}):
benchmarks = {}
for (suite, info) in BENCHMARKS.items():
if (suite in _cache):
benchmarks[suite] = list(_cache[suite])
continue
url = info['url']
reldir = info['reldir']
reporoot = os.path.join(REPOS_DIR, os.path.basename(url))
if (not os.path.exists(reporoot)):
if (not os.path.exists(REPOS_DIR)):
os.makedirs(REPOS_DIR)
git('clone', url, reporoot, root=None)
names = _get_benchmark_names(os.path.join(reporoot, reldir))
benchmarks[suite] = _cache[suite] = names
return benchmarks |
class OptionPlotoptionsCylinderSonificationDefaultinstrumentoptionsMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_switch_with_loop1(task):
var_1 = Variable('var_1', Pointer(Integer(32, True), 32), None, False, Variable('var_28', Pointer(Integer(32, True), 32), 1, False, None))
var_0 = Variable('var_0', Integer(32, True), None, True, Variable('var_10', Integer(32, True), 0, True, None))
task.graph.add_nodes_from((vertices := [BasicBlock(0, [Assignment(ListOperation([]), print_call('Enter week number(1-7): ', 1)), Assignment(var_1, UnaryOperation(OperationType.address, [var_0], Pointer(Integer(32, True), 32), None, False)), Assignment(ListOperation([]), scanf_call(var_1, , 2)), Branch(Condition(OperationType.greater_us, [var_0, Constant(7, Integer(32, True))], CustomType('bool', 1)))]), BasicBlock(2, [IndirectBranch(var_0)]), BasicBlock(3, [Assignment(ListOperation([]), print_call('Invalid input! Please enter week number between 1-7.', 14))]), BasicBlock(4, [Assignment(ListOperation([]), print_call('Monday', 4))]), BasicBlock(5, [Assignment(ListOperation([]), print_call('Tuesday', 5))]), BasicBlock(6, [Assignment(ListOperation([]), print_call('Wednesday', 7))]), BasicBlock(7, [Assignment(ListOperation([]), print_call('Thursday', 8))]), BasicBlock(8, [Assignment(ListOperation([]), print_call('Friday', 9))]), BasicBlock(9, [Assignment(ListOperation([]), print_call('Saturday', 11))]), BasicBlock(10, [Assignment(ListOperation([]), print_call('Sunday', 13))]), BasicBlock(11, [Return(ListOperation([Constant(0, Integer(32, True))]))])]))
task.graph.add_edges_from([FalseCase(vertices[0], vertices[1]), TrueCase(vertices[0], vertices[2]), SwitchCase(vertices[1], vertices[2], [Constant(0, Integer(32))]), SwitchCase(vertices[1], vertices[3], [Constant(1, Integer(32))]), SwitchCase(vertices[1], vertices[4], [Constant(2, Integer(32))]), SwitchCase(vertices[1], vertices[5], [Constant(3, Integer(32))]), SwitchCase(vertices[1], vertices[6], [Constant(4, Integer(32))]), SwitchCase(vertices[1], vertices[7], [Constant(5, Integer(32))]), SwitchCase(vertices[1], vertices[8], [Constant(6, Integer(32))]), SwitchCase(vertices[1], vertices[9], [Constant(7, Integer(32))]), UnconditionalEdge(vertices[2], vertices[10]), UnconditionalEdge(vertices[3], vertices[5]), UnconditionalEdge(vertices[4], vertices[5]), UnconditionalEdge(vertices[5], vertices[9]), UnconditionalEdge(vertices[6], vertices[10]), UnconditionalEdge(vertices[7], vertices[8]), UnconditionalEdge(vertices[8], vertices[10]), UnconditionalEdge(vertices[9], vertices[3])])
PatternIndependentRestructuring().run(task)
assert (isinstance((seq_node := task._ast.root), SeqNode) and (len(seq_node.children) == 3))
assert (isinstance(seq_node.children[0], CodeNode) and (seq_node.children[0].instructions == vertices[0].instructions[:(- 1)]))
assert isinstance((switch := seq_node.children[1]), SwitchNode)
assert isinstance((return_cond := seq_node.children[2]), ConditionNode)
assert ((switch.expression == var_0) and (len(switch.children) == 8))
assert (isinstance((case1 := switch.cases[0]), CaseNode) and (case1.constant == Constant(1, Integer(32))) and (case1.break_case is False))
assert (isinstance((case2 := switch.cases[1]), CaseNode) and (case2.constant == Constant(2, Integer(32))) and (case2.break_case is False))
assert (isinstance((case3 := switch.cases[2]), CaseNode) and (case3.constant == Constant(3, Integer(32))) and (case3.break_case is False))
assert (isinstance((case7 := switch.cases[3]), CaseNode) and (case7.constant == Constant(7, Integer(32))) and (case7.break_case is True))
assert (isinstance((case4 := switch.cases[4]), CaseNode) and (case4.constant == Constant(4, Integer(32))) and (case4.break_case is True))
assert (isinstance((case5 := switch.cases[5]), CaseNode) and (case5.constant == Constant(5, Integer(32))) and (case5.break_case is False))
assert (isinstance((case6 := switch.cases[6]), CaseNode) and (case6.constant == Constant(6, Integer(32))) and (case6.break_case is True))
assert (isinstance((default := switch.default), CaseNode) and (default.constant == 'default') and (default.break_case is False))
new_variable = case1.child.instructions[0].definitions[0]
new_assignment = Assignment(new_variable, Constant(0, Integer.int32_t()))
assert (isinstance(case1.child, CodeNode) and (case1.child.instructions == [new_assignment]))
assert (isinstance((cn2 := case2.child), ConditionNode) and (cn2.false_branch is None))
assert (task._ast.condition_map[(~ cn2.condition)] == Condition(OperationType.equal, [var_0, Constant(1, Integer(32))]))
assert (cn2.true_branch_child.instructions == vertices[4].instructions)
assert (isinstance((cn3 := case3.child), ConditionNode) and (cn3.false_branch is None))
assert (task._ast.condition_map[(~ cn3.condition)] == Condition(OperationType.equal, [var_0, Constant(1, Integer(32))]))
assert (cn3.true_branch_child.instructions == [Assignment(new_variable, Constant(1, Integer.int32_t()))])
assert (isinstance((loop_seq := case7.child), SeqNode) and (len(loop_seq.children) == 2))
assert (isinstance(case4.child, CodeNode) and (case4.child.instructions == vertices[6].instructions))
assert (isinstance(case5.child, CodeNode) and (case5.child.instructions == vertices[7].instructions))
assert (isinstance(case6.child, CodeNode) and (case6.child.instructions == vertices[8].instructions))
assert (isinstance(default.child, CodeNode) and (default.child.instructions == vertices[2].instructions))
assert isinstance((last_entry := loop_seq.children[0]), ConditionNode)
assert (last_entry.condition.is_conjunction and (len((operands := last_entry.condition.operands)) == 3))
assert ({task._ast.condition_map[(~ cond)] for cond in operands} == {Condition(OperationType.equal, [var_0, Constant(const, Integer(32))]) for const in {1, 2, 3}})
assert ((last_entry.false_branch is None) and isinstance(last_entry.true_branch_child, CodeNode))
assert (last_entry.true_branch_child.instructions == [Assignment(new_variable, Constant(2, Integer.int32_t()))])
assert (isinstance((loop := loop_seq.children[1]), WhileLoopNode) and loop.is_endless_loop)
assert (isinstance((loop_body := loop.body), SeqNode) and (len(loop_body.children) == 2))
assert isinstance((switch2 := loop_body.children[0]), SwitchNode)
assert (isinstance(loop_body.children[1], CodeNode) and (loop_body.children[1].instructions == (vertices[9].instructions + [new_assignment])))
assert ((switch2.expression == new_variable) and (len(switch2.children) == 2))
assert (isinstance((case2_1 := switch2.cases[0]), CaseNode) and (case2_1.constant == Constant(0, Integer(32, True))) and (case2_1.break_case is False))
assert (isinstance((case2_2 := switch2.cases[1]), CaseNode) and (case2_2.constant == Constant(1, Integer(32, True))) and (case2_2.break_case is True))
assert (isinstance(case2_1.child, CodeNode) and (case2_1.child.instructions == vertices[3].instructions))
assert (isinstance(case2_2.child, CodeNode) and (case2_2.child.instructions == (vertices[5].instructions + [new_assignment])))
assert (return_cond.condition.is_disjunction and (len((operands := return_cond.condition.operands)) == 5))
assert ({task._ast.condition_map[cond] for cond in operands} == ({Condition(OperationType.equal, [var_0, Constant(const, Integer(32))]) for const in {0, 4, 5, 6}} | {Condition(OperationType.greater_us, [var_0, Constant(7, Integer(32, True))])}))
assert ((return_cond.false_branch is None) and isinstance(return_cond.true_branch_child, CodeNode))
assert (return_cond.true_branch_child.instructions == vertices[(- 1)].instructions) |
.parametrize('arg_type', [List[str], ZipFile, Optional[ZipFile], Union[(ZipFile, str, Path)], CustomGeneric, CustomGeneric[int], CustomGeneric[str], Optional[CustomGeneric]])
def test_static_deserialize_types_custom_deserialize(arg_type):
split_string = get_list_converter(str)
def convert_zipfile(value: str) -> ZipFile:
return ZipFile(value, 'r')
def convert_generic(value: str) -> str:
return f'generic: {value}'
converters = {ZipFile: convert_zipfile, List[str]: split_string, CustomGeneric: convert_generic, CustomGeneric[str]: str}
get_converter = (lambda v: converters.get(v))
arg = get_arg('test', Arg('--test'), arg_type, orig_type=arg_type, get_converter=get_converter)
arg_json = arg.to_static_json()
new_arg = ArgparseArg.from_static_json(arg_json, converters=converters)
assert (new_arg.type == arg.type)
assert (new_arg.orig_type == stringify_type(arg.orig_type))
assert (new_arg.has_converter == arg.has_converter)
assert (new_arg.action == arg.action)
arg = get_arg('test', Arg('--test'), arg_type, orig_type=arg_type, get_converter=get_converter)
arg_json = arg.to_static_json()
new_arg = ArgparseArg.from_static_json(arg_json)
assert (new_arg.type is str)
assert (new_arg.orig_type == stringify_type(arg.orig_type)) |
('', doc={'description': ''})
class RestFirmwareGetWithoutUid(RestResourceBase):
URL = '/rest/firmware'
_accepted(*PRIVILEGES['view_analysis'])
(responses={200: 'Success', 400: 'Unknown file object'}, params={'offset': {'description': 'offset of results (paging)', 'in': 'query', 'type': 'int'}, 'limit': {'description': 'number of results (paging)', 'in': 'query', 'type': 'int'}, 'query': {'description': 'MongoDB style query', 'in': 'query', 'type': 'dict'}, 'recursive': {'description': 'Query for parent firmware of matching objects (requires query)', 'in': 'query', 'type': 'boolean', 'default': 'false'}, 'inverted': {'description': 'Query for parent firmware that does not include the matching objects (Requires query and recursive)', 'in': 'query', 'type': 'boolean', 'default': 'false'}})
def get(self):
try:
(query, recursive, inverted, offset, limit) = self._get_parameters_from_request(request.args)
except ValueError as value_error:
request_data = {k: request.args.get(k) for k in ['query', 'limit', 'offset', 'recursive', 'inverted']}
return error_message(str(value_error), self.URL, request_data=request_data)
parameters = {'offset': offset, 'limit': limit, 'query': query, 'recursive': recursive, 'inverted': inverted}
try:
uids = self.db.frontend.rest_get_firmware_uids(**parameters)
return success_message({'uids': uids}, self.URL, parameters)
except DbInterfaceError:
return error_message('Unknown exception on request', self.URL, parameters)
def _get_parameters_from_request(request_parameters):
query = get_query(request_parameters)
recursive = get_boolean_from_request(request_parameters, 'recursive')
inverted = get_boolean_from_request(request_parameters, 'inverted')
(offset, limit) = get_paging(request.args)
if (recursive and (not query)):
raise ValueError('Recursive search is only permissible with non-empty query')
if (inverted and (not recursive)):
raise ValueError('Inverted flag can only be used with recursive')
return (query, recursive, inverted, offset, limit)
_accepted(*PRIVILEGES['submit_analysis'])
(firmware_model)
def put(self):
try:
data = self.validate_payload_data(firmware_model)
except MarshallingError as error:
logging.error(f'REST|firmware|PUT: Error in payload data: {error}')
return error_message(str(error), self.URL)
available_plugins = (set(self.intercom.get_available_analysis_plugins()) - {'unpacker'})
unavailable_plugins = (set(data['requested_analysis_systems']) - available_plugins)
if unavailable_plugins:
return error_message(f"The requested analysis plugins are not available: {', '.join(unavailable_plugins)}", self.URL, request_data={k: v for (k, v) in data.items() if (k != 'binary')})
result = self._process_data(data)
if ('error_message' in result):
logging.warning('Submission not according to API guidelines! (data could not be parsed)')
return error_message(result['error_message'], self.URL, request_data=data)
logging.debug('Upload Successful!')
return success_message(result, self.URL, request_data=data)
def _process_data(self, data):
try:
data['binary'] = standard_b64decode(data['binary'])
except binascii.Error:
return {'error_message': 'Could not parse binary (must be valid base64!)'}
firmware_object = convert_analysis_task_to_fw_obj(data)
self.intercom.add_analysis_task(firmware_object)
data.pop('binary')
return {'uid': firmware_object.uid} |
('globals')
def direct_globals(node):
assert isinstance(node, (Function_Definition, Script_File))
class Global_Visitor(AST_Visitor):
def __init__(self):
self.names = set()
def visit(self, node, n_parent, relation):
if isinstance(node, Global_Statement):
self.names |= set((n_ident.t_ident.value for n_ident in node.l_names))
gvis = Global_Visitor()
if isinstance(node, Function_Definition):
node.n_body.visit(None, gvis, 'Root')
else:
node.n_statements.visit(None, gvis, 'Root')
return len(gvis.names) |
class CertStreamThread(Thread):
def __init__(self, q, *args, **kwargs):
self.q = q
self.c = CertStreamClient(self.process, skip_heartbeats=True, on_open=None, on_error=None)
super().__init__(*args, **kwargs)
def run(self):
global THREAD_EVENT
while (not THREAD_EVENT.is_set()):
cprint('Waiting for Certstream events - this could take a few minutes to queue up...', 'yellow', attrs=['bold'])
self.c.run_forever()
THREAD_EVENT.wait(10)
def process(self, message, context):
if (message['message_type'] == 'heartbeat'):
return
if (message['message_type'] == 'certificate_update'):
all_domains = message['data']['leaf_cert']['all_domains']
if (ARGS.skip_lets_encrypt and ("Let's Encrypt" in message['data']['chain'][0]['subject']['aggregated'])):
return
for domain in set(all_domains):
if ((not domain.startswith('*.')) and ('cloudflaressl' not in domain) and ('xn--' not in domain) and (domain.count('-') < 4) and (domain.count('.') < 4)):
parts = tldextract.extract(domain)
for permutation in get_permutations(parts.domain, parts.subdomain):
self.q.put((BUCKET_HOST % permutation)) |
.integration
class TestGetServerResources():
.integration
.parametrize('created_resources', PARAM_MODEL_LIST, indirect=['created_resources'])
def test_get_server_resources_found_resources(self, test_config: FidesConfig, created_resources: List) -> None:
resource_type = created_resources[0]
resource_keys = created_resources[1]
result: List[FidesModel] = _api_helpers.get_server_resources(url=test_config.cli.server_url, resource_type=resource_type, existing_keys=resource_keys, headers=test_config.user.auth_header)
print(result)
assert (set(resource_keys) == set((resource.fides_key for resource in result)))
.parametrize('resource_type', PARAM_MODEL_LIST)
def test_get_server_resources_missing_resources(self, test_config: FidesConfig, resource_type: str) -> None:
resource_keys = [str(uuid.uuid4())]
result: List[FidesModel] = _api_helpers.get_server_resources(url=test_config.cli.server_url, resource_type=resource_type, existing_keys=resource_keys, headers=test_config.user.auth_header)
assert (result == []) |
def test_deepcopy(aggregate):
provider_copy = providers.deepcopy(aggregate)
assert (aggregate is not provider_copy)
assert isinstance(provider_copy, type(aggregate))
assert (aggregate.example_a is not provider_copy.example_a)
assert isinstance(aggregate.example_a, type(provider_copy.example_a))
assert (aggregate.example_a.cls is provider_copy.example_a.cls)
assert (aggregate.example_b is not provider_copy.example_b)
assert isinstance(aggregate.example_b, type(provider_copy.example_b))
assert (aggregate.example_b.cls is provider_copy.example_b.cls) |
class Action(object):
def __init__(self, name, send, params):
self.name = name
self.send = send
self.params = list(params)
__pnmltag__ = 'action'
def __pnmldump__(self):
result = Tree(self.__pnmltag__, None, name=self.name, send=str(self.send))
for param in self.params:
result.add_child(Tree.from_obj(param))
return result
def __pnmlload__(cls, tree):
params = [Tree.to_obj(child) for child in tree.children]
return cls(tree['name'], (tree['send'] == 'True'), params)
def __str__(self):
if self.send:
return ('%s!(%s)' % (self.name, ','.join([str(p) for p in self])))
else:
return ('%s?(%s)' % (self.name, ','.join([str(p) for p in self])))
def __repr__(self):
return ('%s(%s, %s, [%s])' % (self.__class__.__name__, repr(self.name), str(self.send), ', '.join([repr(p) for p in self])))
def __len__(self):
return len(self.params)
def __iter__(self):
for action in self.params:
(yield action)
def __eq__(self, other):
if (self.name != other.name):
return False
elif (self.send != other.send):
return False
elif (len(self.params) != len(other.params)):
return False
for (p, q) in zip(self.params, other.params):
if (p != q):
return False
return True
def __ne__(self, other):
return (not (self == other))
def copy(self, subst=None):
result = self.__class__(self.name, self.send, [p.copy() for p in self.params])
if (subst is not None):
result.substitute(subst)
return result
def substitute(self, subst):
for (i, p) in enumerate(self.params):
if (isinstance(p, Variable) and (p.name in subst)):
self.params[i] = subst(p.name)
def vars(self):
return set((p.name for p in self.params if isinstance(p, Variable)))
def __and__(self, other):
if ((self.name != other.name) or (self.send == other.send)):
raise ConstraintError('actions not conjugated')
elif (len(self) != len(other)):
raise ConstraintError('arities do not match')
result = Substitution()
for (x, y) in zip(self.params, other.params):
if (isinstance(x, Variable) and (x.name in result)):
x = result(x.name)
if (isinstance(y, Variable) and (y.name in result)):
y = result(y.name)
if (isinstance(x, Value) and isinstance(y, Value)):
if (x.value != y.value):
raise ConstraintError('incompatible values')
elif (isinstance(x, Variable) and isinstance(y, Value)):
result += Substitution({x.name: y.copy()})
elif (isinstance(x, Value) and isinstance(y, Variable)):
result += Substitution({y.name: x.copy()})
elif (isinstance(x, Variable) and isinstance(y, Variable)):
if (x.name != y.name):
result += Substitution({x.name: y.copy()})
else:
raise ConstraintError('unexpected action parameter')
return result |
class OptionPlotoptionsVariablepieSonificationTracksMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
_util.copy_func_kwargs(BillingOptions)
def on_plan_update_published(**kwargs) -> _typing.Callable[([OnPlanUpdatePublishedCallable], OnPlanUpdatePublishedCallable)]:
options = BillingOptions(**kwargs)
def on_plan_update_published_inner_decorator(func: OnPlanUpdatePublishedCallable):
_functools.wraps(func)
def on_plan_update_published_wrapped(raw: _ce.CloudEvent):
from firebase_functions.private._alerts_fn import billing_event_from_ce
func(billing_event_from_ce(raw))
_util.set_func_endpoint_attr(on_plan_update_published_wrapped, options._endpoint(func_name=func.__name__, alert_type='billing.planUpdate'))
return on_plan_update_published_wrapped
return on_plan_update_published_inner_decorator |
def get_intermediate_tasks(task1, task2):
intermediate_tasks = []
found_orig_task = False
for parent in reversed((task2.parents + [task2])):
if (parent != task1):
intermediate_tasks.append(parent)
else:
found_orig_task = True
break
if found_orig_task:
return list(reversed(intermediate_tasks))
else:
return [] |
class OptionPlotoptionsAreasplineSonificationTracksMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class KiwoomOpenApiPlusEagerSomeEventHandler(KiwoomOpenApiPlusEagerAllEventHandler):
def __init__(self, control, request, context):
super().__init__(control, context)
self._request = request
def slots(self):
names = self.names()
slots = [getattr(self, name) for name in names]
names_and_slots_implemented = [(name, slot) for (name, slot) in zip(names, slots) if (isimplemented(slot) and (name in self._request.slots))]
return names_and_slots_implemented |
()
def pathy_fixture():
pytest.importorskip('pathy')
import shutil
import tempfile
from pathy import Pathy, use_fs
temp_folder = tempfile.mkdtemp(prefix='thinc-pathy')
use_fs(temp_folder)
root = Pathy('gs://test-bucket')
root.mkdir(exist_ok=True)
(yield root)
use_fs(False)
shutil.rmtree(temp_folder) |
def is_synced(integration: str, integration_item_code: str, variant_id: Optional[str]=None, sku: Optional[str]=None) -> bool:
filter = {'integration': integration, 'integration_item_code': integration_item_code}
if variant_id:
filter.update({'variant_id': variant_id})
item_exists = bool(frappe.db.exists('Ecommerce Item', filter))
if ((not item_exists) and sku):
return _is_sku_synced(integration, sku)
return item_exists |
class S3FileAdmin(BaseFileAdmin):
def __init__(self, bucket_name, region, aws_access_key_id, aws_secret_access_key, *args, **kwargs):
storage = S3Storage(bucket_name, region, aws_access_key_id, aws_secret_access_key)
super(S3FileAdmin, self).__init__(*args, storage=storage, **kwargs) |
def eye(n: int, m: (int | None)=None, k: int=0) -> Matrix:
if (m is None):
m = n
dlen = (m if ((n > m) and (k < 0)) else (m - abs(k)))
a = []
for i in range(n):
pos = (i + k)
idx = (i if (k >= 0) else pos)
d = int((0 <= idx < dlen))
a.append(((([0.0] * clamp(pos, 0, m)) + ([1.0] * d)) + ([0.0] * clamp(((m - pos) - d), 0, m))))
return a |
def run(cls, job, eval=True):
if (job == 'fit'):
(lr, _) = est.get_learner(cls, True, False)
lr.dtype = np.float64
else:
lr = run(cls, 'fit', False)
data = Data(cls, True, False, True)
(X, y) = data.get_data((25, 4), 3)
if (job in ['fit', 'transform']):
((F, wf), _) = data.ground_truth(X, y, data.indexer.partitions)
else:
(_, (F, wf)) = data.ground_truth(X, y, data.indexer.partitions)
args = {'fit': [X, y], 'transform': [X], 'predict': [X]}[job]
P = _run(lr, job, *args, return_preds=True)
if (not eval):
return lr
np.testing.assert_array_equal(P, F)
if (job in ['fit', 'transform']):
lrs = lr.sublearners
else:
lrs = lr.learner
w = [obj.estimator.coef_ for obj in lrs]
np.testing.assert_array_equal(w, wf) |
class ScheduleBByRecipientID(BaseDisbursementAggregate):
__table_args__ = {'schema': 'disclosure'}
__tablename__ = 'dsc_sched_b_aggregate_recipient_id_new'
recipient_id = db.Column('recipient_cmte_id', db.String, primary_key=True, doc=docs.RECIPIENT_ID)
committee = utils.related_committee('committee_id')
recipient = utils.related('CommitteeHistory', 'recipient_id', 'committee_id', cycle_label='cycle')
def committee_name(self):
return self.committee.name
def recipient_name(self):
return self.recipient.name |
def test_adding_load_balancer_source_ranges():
config = '\nservice:\n loadBalancerSourceRanges:\n - 0.0.0.0/0\n '
r = helm_template(config)
assert (r['service'][uname]['spec']['loadBalancerSourceRanges'][0] == '0.0.0.0/0')
config = '\nservice:\n loadBalancerSourceRanges:\n - 192.168.0.0/24\n - 192.168.1.0/24\n '
r = helm_template(config)
ranges = r['service'][uname]['spec']['loadBalancerSourceRanges']
assert (ranges[0] == '192.168.0.0/24')
assert (ranges[1] == '192.168.1.0/24') |
.parametrize(('input_data', 'expected_output'), [({}, False), ({'arch': {}}, False), ({'arch': {'option': {}}}, False), ({'arch': {'error': 'foo'}}, False), ({'arch': {'option': {'error': 'foo'}}}, False), ({'arch': {'option': {'stdout': 'foo', 'stderr': '', 'return_code': '0'}}}, True)])
def test_valid_execution_in_results(input_data, expected_output):
assert (qemu_exec._valid_execution_in_results(input_data) == expected_output) |
def integration_test() -> int:
print('Running `ptr` integration tests (aka run itself)', file=sys.stderr)
stats_file = (Path(gettempdir()) / 'ptr_ci_stats')
ci_cmd = ['python', 'ptr.py', '-d', '--print-cov', '--run-disabled', '--error-on-warnings', '--stats-file', str(stats_file)]
if ('VIRTUAL_ENV' in environ):
ci_cmd.extend(['--venv', environ['VIRTUAL_ENV']])
cp = run(ci_cmd, check=True)
return (cp.returncode + check_ptr_stats_json(stats_file)) |
class RecordMarkerCommandAttributes(betterproto.Message):
marker_name: str = betterproto.string_field(1)
details: Dict[(str, v1common.Payloads)] = betterproto.map_field(2, betterproto.TYPE_STRING, betterproto.TYPE_MESSAGE)
header: v1common.Header = betterproto.message_field(3)
failure: v1failure.Failure = betterproto.message_field(4) |
def run():
print('\nmodule top();\n ')
sites = list(gen_sites())
assert (len(sites) == 1)
(tile_name, int_tiles) = sites[0]
DWE = random.randint(0, 1)
DI14 = random.randint(0, 1)
params = {}
params[int_tiles[0]] = DWE
params[int_tiles[1]] = DI14
print('\n wire [15:0] di;\n wire dwe;\n wire convst;\n\n assign convst = 0;\n assign dwe = {DWE};\n\n assign di[15] = 0;\n assign di[14] = {DI14};\n\n (* KEEP, DONT_TOUCH *)\n XADC xadc (\n .DI(di),\n .DWE(dwe),\n .CONVST(convst)\n );\n '.format(DWE=DWE, DI14=DI14))
print('endmodule')
write_params(params) |
def delete_internal_urls(sess, urls, chunk_size=1000, pbar=True):
pbar = tqdm.tqdm(range(0, len(urls), chunk_size), position=1)
for chunk_idx in pbar:
chunk = urls[chunk_idx:(chunk_idx + chunk_size)]
while 1:
try:
ctbl = version_table(db.WebPages.__table__)
pbar.write(("Example removed URL: '%s'" % (chunk[0],)))
affected_rows_main = sess.execute('\n\t\t\t\t\tWITH deleted AS (\n\t\t\t\t\t\tDELETE FROM\n\t\t\t\t\t\t\tweb_pages\n\t\t\t\t\t\tWHERE\n\t\t\t\t\t\t\turl IN :urls\n\t\t\t\t\t\tRETURNING\n\t\t\t\t\t\t\tid\n\t\t\t\t\t\t)\n\t\t\t\t\tSELECT\n\t\t\t\t\t\tcount(*)\n\t\t\t\t\tFROM\n\t\t\t\t\t\tdeleted;\n\t\t\t\t\t', {'urls': tuple(chunk)})
affected_rows_ver = sess.execute('\n\t\t\t\t\tWITH deleted AS (\n\t\t\t\t\t\tDELETE FROM\n\t\t\t\t\t\t\tweb_pages_version\n\t\t\t\t\t\tWHERE\n\t\t\t\t\t\t\turl IN :urls\n\t\t\t\t\t\tRETURNING\n\t\t\t\t\t\t\tid\n\t\t\t\t\t\t)\n\t\t\t\t\tSELECT\n\t\t\t\t\t\tcount(*)\n\t\t\t\t\tFROM\n\t\t\t\t\t\tdeleted;\n\t\t\t\t\t', {'urls': tuple(chunk)})
affected_rows_main = list(affected_rows_main)[0][0]
affected_rows_ver = list(affected_rows_ver)[0][0]
sess.commit()
pbar.set_description(('Deleted %s rows (%s version table rows). %0.2f%% done.' % (affected_rows_main, affected_rows_ver, (100 * (chunk_idx / len(urls))))))
break
except sqlalchemy.exc.InternalError:
pbar.write('Transaction error (sqlalchemy.exc.InternalError). Retrying.')
sess.rollback()
except sqlalchemy.exc.OperationalError:
pbar.write('Transaction error (sqlalchemy.exc.OperationalError). Retrying.')
traceback.print_exc()
sess.rollback()
except sqlalchemy.exc.IntegrityError:
pbar.write('Transaction error (sqlalchemy.exc.IntegrityError). Retrying.')
sess.rollback()
except sqlalchemy.exc.InvalidRequestError:
pbar.write('Transaction error (sqlalchemy.exc.InvalidRequestError). Retrying.')
traceback.print_exc()
sess.rollback() |
def extractAquarilasScenario(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('In That Moment of Suffering' in item['tags']):
return buildReleaseMessageWithType(item, 'In That Moment of Suffering', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
return False |
class HomeArrivalNotifier(hass.Hass):
def initialize(self):
self.listen_state_handle_list = []
self.app_switch = self.args['app_switch']
self.zone_name = self.args['zone_name']
self.input_boolean = self.args['input_boolean']
self.notify_name = self.args['notify_name']
self.user_name = self.args['user_name']
self.message = self.args['message']
self.notifier = self.get_app('Notifier')
self.listen_state_handle_list.append(self.listen_state(self.state_change, self.input_boolean))
def state_change(self, entity, attribute, old, new, kwargs):
if (self.get_state(self.app_switch) == 'on'):
if ((new != '') and (new != old)):
self.log('{} changed from {} to {}'.format(entity, old, new))
if (new == 'on'):
self.log('{} arrived at {}'.format(self.notify_name, self.zone_name))
self.notifier.notify(self.notify_name, self.message.format(self.user_name))
def terminate(self):
for listen_state_handle in self.listen_state_handle_list:
self.cancel_listen_state(listen_state_handle) |
class TestFnMatchEscapes(unittest.TestCase):
def check_escape(self, arg, expected, unix=None, raw_chars=True):
flags = 0
if (unix is False):
flags = fnmatch.FORCEWIN
elif (unix is True):
flags = fnmatch.FORCEUNIX
self.assertEqual(fnmatch.escape(arg), expected)
self.assertEqual(fnmatch.escape(os.fsencode(arg)), os.fsencode(expected))
self.assertTrue(fnmatch.fnmatch(arg, fnmatch.escape(arg), flags=flags))
def test_escape(self):
check = self.check_escape
check('abc', 'abc')
check('[', '\\[')
check('?', '\\?')
check('*', '\\*')
check('[[_/*?*/_]]', '\\[\\[_/\\*\\?\\*/_\\]\\]')
check('/[[_/*?*/_]]/', '/\\[\\[_/\\*\\?\\*/_\\]\\]/')
(sys.platform.startswith('win'), 'Windows specific test')
def test_escape_windows(self):
check = self.check_escape
check('a:\\?', 'a:\\\\\\?')
check('b:\\*', 'b:\\\\\\*')
check('\\\\?\\c:\\?', '?\\\\c:\\\\\\?')
check('\\\\*\\*\\*', '*\\\\\\*\\\\\\*')
check('//?/c:/?', '//\\?/c:/\\?')
check('//*/*/*', '//\\*/\\*/\\*')
check('//[^what]/name/temp', '//\\[^what\\]/name/temp')
def test_escape_forced_windows(self):
check = self.check_escape
check('a:\\?', 'a:\\\\\\?', unix=False)
check('b:\\*', 'b:\\\\\\*', unix=False)
check('\\\\?\\c:\\?', '?\\\\c:\\\\\\?', unix=True)
check('\\\\*\\*\\*', '*\\\\\\*\\\\\\*', unix=True)
check('//?/c:/?', '//\\?/c:/\\?', unix=True)
check('//*/*/*', '//\\*/\\*/\\*', unix=True)
check('//[^what]/name/temp', '//\\[^what\\]/name/temp', unix=True)
def test_escape_forced_unix(self):
check = self.check_escape
check('a:\\?', 'a:\\\\\\?', unix=True)
check('b:\\*', 'b:\\\\\\*', unix=True)
check('\\\\?\\c:\\?', '?\\\\c:\\\\\\?', unix=True)
check('\\\\*\\*\\*', '*\\\\\\*\\\\\\*', unix=True)
check('//?/c:/?', '//\\?/c:/\\?', unix=True)
check('//*/*/*', '//\\*/\\*/\\*', unix=True)
check('//[^what]/name/temp', '//\\[^what\\]/name/temp', unix=True) |
class RelationshipTlsSubscriptionTlsSubscription(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'data': ([RelationshipMemberTlsSubscription],)}
_property
def discriminator():
return None
attribute_map = {'data': 'data'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def test_custom_analyzer_can_collect_custom_items():
trigram = analysis.tokenizer('trigram', 'nGram', min_gram=3, max_gram=3)
my_stop = analysis.token_filter('my_stop', 'stop', stopwords=['a', 'b'])
umlauts = analysis.char_filter('umlauts', 'pattern_replace', mappings=['u=>ue'])
a = analysis.analyzer('my_analyzer', tokenizer=trigram, filter=['lowercase', my_stop], char_filter=['html_strip', umlauts])
assert (a.to_dict() == 'my_analyzer')
assert ({'analyzer': {'my_analyzer': {'type': 'custom', 'tokenizer': 'trigram', 'filter': ['lowercase', 'my_stop'], 'char_filter': ['html_strip', 'umlauts']}}, 'tokenizer': {'trigram': trigram.get_definition()}, 'filter': {'my_stop': my_stop.get_definition()}, 'char_filter': {'umlauts': umlauts.get_definition()}} == a.get_analysis_definition()) |
class SquaredCategoricalCrossentropy(tf.keras.losses.Loss):
def __init__(self, from_logits=False, label_smoothing=0, reduction=tf.keras.losses.Reduction.AUTO, name='squared_categorical_crossentropy'):
super(SquaredCategoricalCrossentropy, self).__init__(reduction=reduction, name=name)
self.from_logits = from_logits
self.label_smoothing = label_smoothing
def call(self, y_true, y_pred):
return tf.math.square(tf.keras.losses.categorical_crossentropy(y_true, y_pred, self.from_logits, self.label_smoothing))
def residuals(self, y_true, y_pred):
return tf.keras.losses.categorical_crossentropy(y_true, y_pred, self.from_logits, self.label_smoothing)
def get_config(self):
config = {'from_logits': self.from_logits, 'label_smoothing': self.label_smoothing}
base_config = super(SquaredCategoricalCrossentropy, self).get_config()
return dict((base_config + config)) |
class OptionPlotoptionsAreasplineSonificationTracksMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesArearangeSonificationDefaultinstrumentoptionsMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def map_ecstask_to_containerinstance(task: Dict[(str, Any)]) -> ContainerInstance:
container = task['containers'][0]
logging.debug(f'The ECS task response from AWS: {task}')
ip_v4 = (container['networkInterfaces'][0].get('privateIpv4Address') if (len(container['networkInterfaces']) > 0) else None)
status = container['lastStatus']
if (status == 'RUNNING'):
status = ContainerInstanceStatus.STARTED
elif (status == 'STOPPED'):
if (container.get('exitCode') == 0):
status = ContainerInstanceStatus.COMPLETED
else:
status = ContainerInstanceStatus.FAILED
else:
status = ContainerInstanceStatus.UNKNOWN
vcpu = (map_unit_to_vcpu(task['cpu']) if ('cpu' in task) else None)
memory_in_gb = (map_mb_to_gb(task['memory']) if ('memory' in task) else None)
container_permission = None
overrides = task.get('overrides')
if overrides:
task_role_arn = overrides.get('taskRoleArn')
if task_role_arn:
container_permission = ContainerPermissionConfig(task_role_arn)
return ContainerInstance(instance_id=task['taskArn'], ip_address=ip_v4, status=status, cpu=vcpu, memory=memory_in_gb, exit_code=container.get('exitCode'), permission=container_permission) |
(python=python_versions)
def tests(session: Session) -> None:
session.install('.')
session.install('invoke', 'pytest', 'xdoctest', 'coverage[toml]', 'pytest-cov')
try:
session.run('inv', 'tests', env={'COVERAGE_FILE': f'.coverage.{platform.system()}.{platform.python_version()}'})
finally:
if session.interactive:
session.notify('coverage') |
def parse_lanes(lane_expr):
fields = lane_expr.split(',')
lanes = []
for field in fields:
try:
i = field.index('-')
l1 = int(field[:i])
l2 = int(field[(i + 1):])
for i in range(l1, (l2 + 1)):
lanes.append(i)
except ValueError:
lanes.append(int(field))
lanes.sort()
return lanes |
class TaskWindowToggleAction(Action):
name = Property(Str, observe='window.active_task.name')
style = 'toggle'
window = Instance('envisage.ui.tasks.task_window.TaskWindow')
def perform(self, event=None):
if self.window:
self.window.activate()
def _get_name(self):
if self.window.active_task:
return self.window.active_task.name
return ''
_trait_change('window:activated')
def _window_activated(self):
self.checked = True
_trait_change('window:deactivated')
def _window_deactivated(self):
self.checked = False |
class GetInstanceArgs(object):
def __init__(self, node):
self.argspec = []
self.arg = {}
self.buffer = {}
self.net = {}
self.task = {}
seen = set()
for a in (node.args.args + node.args.kwonlyargs):
if (a.arg in seen):
self._raise(CompilationError, ('duplicate argument %r' % a.arg))
seen.add(a.arg)
if (a.annotation is None):
self.argspec.append((a.arg, 'arg'))
else:
self.argspec.append((a.arg, a.annotation.id))
def __call__(self, *args):
self.arg.clear()
self.buffer.clear()
self.net.clear()
self.task.clear()
for ((name, kind), value) in zip(self.argspec, args):
getattr(self, kind)[name] = value
return (self.arg, self.buffer, self.net, self.task) |
def extractAriandeltlWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
_routes.route('/user-details/get-user-id', methods=['GET'])
_required
def get_user_id():
token = None
if ('Authorization' in request.headers):
token = request.headers['Authorization'].split(' ')[1]
if (not token):
return ({'message': 'Authentication Token is missing!', 'data': None, 'error': 'Unauthorized'}, 401)
try:
data = jwt.decode(token, current_app.config['SECRET_KEY'], algorithms=['HS256'])
if (not data.get('identity', False)):
return ({'message': "Can't get user id!", 'data': None}, 404)
return ({'user_id': data['identity']}, 200)
except UnicodeDecodeError:
return ({'message': "Can't get user id!", 'data': None}, 500) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.