code stringlengths 281 23.7M |
|---|
class WindowEventFilter(QtCore.QObject):
def __init__(self, window):
QtCore.QObject.__init__(self)
self._window = weakref.ref(window)
def eventFilter(self, obj, e):
window = self._window()
if ((window is None) or (obj is not window.control)):
return False
typ = e.type()
if (typ == QtCore.QEvent.Type.Close):
GUI.invoke_later(window.close)
if (window.control is not None):
e.ignore()
return True
if (typ == QtCore.QEvent.Type.WindowActivate):
window.activated = window
elif (typ == QtCore.QEvent.Type.WindowDeactivate):
window.deactivated = window
elif (typ in {QtCore.QEvent.Type.Show, QtCore.QEvent.Type.Hide}):
window.visible = window.control.isVisible()
elif (typ == QtCore.QEvent.Type.Resize):
size = e.size()
window._size = (size.width(), size.height())
elif (typ == QtCore.QEvent.Type.Move):
pos = window.control.pos()
window._position = (pos.x(), pos.y())
elif (typ == QtCore.QEvent.Type.KeyPress):
kstr = e.text()
try:
kcode = ord(str(kstr))
except:
kcode = 0
mods = e.modifiers()
window.key_pressed = KeyPressedEvent(alt_down=((mods & QtCore.Qt.KeyboardModifier.AltModifier) == QtCore.Qt.KeyboardModifier.AltModifier), control_down=((mods & QtCore.Qt.KeyboardModifier.ControlModifier) == QtCore.Qt.KeyboardModifier.ControlModifier), shift_down=((mods & QtCore.Qt.KeyboardModifier.ShiftModifier) == QtCore.Qt.KeyboardModifier.ShiftModifier), key_code=kcode, event=e)
elif (typ == QtCore.QEvent.Type.WindowStateChange):
state = obj.windowState()
if (state & QtCore.Qt.WindowState.WindowMaximized):
window.size_state = 'maximized'
else:
window.size_state = 'normal'
return False |
('CupyOps')
class CupyOps(Ops):
name = 'cupy'
xp = cupy
_xp2 = cupyx
def __init__(self, device_type: DeviceTypes='gpu', device_id: int=0, **kwargs) -> None:
self.device_type = device_type
self.device_id = device_id
def to_numpy(self, data, *, byte_order=None):
if (not isinstance(data, numpy.ndarray)):
data = data.get()
if byte_order:
dtype = data.dtype.newbyteorder(byte_order)
data = numpy.asarray(data, dtype=dtype)
return data
def gather_add(self, table, indices):
if (table.dtype in ('float32', 'float64')):
return _custom_kernels.gather_add(table, indices)
else:
return super().gather_add(table, indices)
def dish(self, X, inplace=False):
if (X.dtype in ('float32', 'float64')):
return _custom_kernels.dish(X, inplace=inplace)
else:
return super().dish(X, inplace=inplace)
def backprop_dish(self, dY, X, inplace=False):
if ((X.dtype == dY.dtype) and (X.dtype in ('float32', 'float64'))):
return _custom_kernels.backprop_dish(dY, X, inplace=inplace)
else:
return super().backprop_dish(dY, X, inplace=inplace)
def gelu(self, X, inplace=False):
if (X.dtype in ('float32', 'float64')):
return _custom_kernels.gelu(X, inplace=inplace, threshold=6.0)
else:
return super().gelu(X, inplace=inplace)
def backprop_gelu(self, dY, X, inplace=False):
if ((X.dtype == dY.dtype) and (X.dtype in ('float32', 'float64'))):
return _custom_kernels.backprop_gelu(dY, X, inplace=inplace, threshold=6.0)
else:
return super().backprop_gelu(dY, X, inplace=inplace)
def gemm(self, x, y, out=None, trans1=False, trans2=False):
if (isinstance(x, numpy.ndarray) or isinstance(y, numpy.ndarray)):
raise ValueError('Encountered a numpy array when processing with cupy. Did you call model.ops.asarray on your data?')
if trans1:
x = x.T
if trans2:
y = y.T
if (out is None):
return self.xp.dot(x, y)
else:
self.xp.dot(x, y, out=out)
return out
def asarray(self, data, dtype=None):
if is_cupy_array(data):
array = self.xp.asarray(data, dtype=dtype)
elif is_torch_cuda_array(data):
array = torch2xp(data)
elif is_tensorflow_gpu_array(data):
array = tensorflow2xp(data)
elif is_mxnet_gpu_array(data):
array = mxnet2xp(data)
else:
array = self.xp.array(data, dtype=dtype)
if (dtype is not None):
array = array.astype(dtype=dtype, copy=False)
return array
def pad(self, seqs, round_to=1):
if (not seqs):
raise ValueError('Cannot pad empty sequence')
if (len(set((seq.ndim for seq in seqs))) != 1):
raise ValueError('Cannot pad sequences with different ndims')
if (len(set((seq.dtype for seq in seqs))) != 1):
raise ValueError('Cannot pad sequences with different dtypes')
if (len(set((seq.shape[1:] for seq in seqs))) != 1):
raise ValueError('Cannot pad sequences that differ on other dimensions')
if ((not all((seq.flags['C_CONTIGUOUS'] for seq in seqs))) or (seqs[0].dtype not in ('float32', 'float64', 'int32', 'int64'))):
return super().pad(seqs, round_to)
return _custom_kernels.pad(seqs, round_to)
def maxout(self, X):
if (X.dtype in ('float32', 'float64')):
return _custom_kernels.maxout(X)
else:
return super().maxout(X)
def backprop_maxout(self, dY, which, P):
if ((dY.dtype in ('float32', 'float64')) and (which.dtype == 'int32')):
return _custom_kernels.backprop_maxout(dY, which, P)
else:
return super().backprop_maxout(dY, which, P)
def relu(self, X, inplace=False):
if (not inplace):
return (X * (X > 0))
else:
X *= (X > 0)
return X
def backprop_relu(self, dY, Y, inplace=False):
if (not inplace):
return (dY * (Y > 0))
dY *= (Y > 0)
return dY
def clipped_linear(self, X, slope: float=1.0, offset: float=0.0, min_val: float=0.0, max_val: float=1.0, inplace: bool=False):
if (X.dtype in ('float32', 'float64')):
return _custom_kernels.clipped_linear(X, inplace=inplace, slope=slope, offset=offset, min_val=min_val, max_val=max_val)
else:
return super().clipped_linear(X, inplace=inplace, slope=slope, offset=offset, min_val=min_val, max_val=max_val)
def backprop_clipped_linear(self, dY, X, slope: float=1.0, offset: float=0.0, min_val: float=0.0, max_val: float=1.0, inplace: bool=False):
if ((X.dtype == dY.dtype) and (X.dtype in ('float32', 'float64'))):
return _custom_kernels.backprop_clipped_linear(dY, X, slope=slope, offset=offset, min_val=min_val, max_val=max_val, inplace=inplace)
else:
return super().backprop_clipped_linear(dY=dY, X=X, slope=slope, offset=offset, min_val=min_val, max_val=max_val, inplace=inplace)
def backprop_hard_swish(self, dY, X, inplace: bool=False):
if ((X.dtype == dY.dtype) and (X.dtype in ('float32', 'float64'))):
return _custom_kernels.backprop_hard_swish(dY, X, inplace=inplace)
else:
return super().backprop_hard_swish(dY, X, inplace=inplace)
def backprop_hard_swish_mobilenet(self, dY, X, inplace: bool=False):
if ((X.dtype == dY.dtype) and (X.dtype in ('float32', 'float64'))):
return _custom_kernels.backprop_hard_swish_mobilenet(dY, X, inplace=inplace)
else:
return super().backprop_hard_swish_mobilenet(dY, X, inplace=inplace)
def mish(self, X, threshold=20.0, inplace=False):
if (X.dtype in ('float32', 'float64')):
return _custom_kernels.mish(X, inplace=inplace, threshold=threshold)
else:
return super().mish(X, threshold, inplace)
def backprop_mish(self, dY, X, threshold=20.0, inplace=False):
if ((X.dtype == dY.dtype) and (X.dtype in ('float32', 'float64'))):
return _custom_kernels.backprop_mish(dY, X, inplace=inplace, threshold=threshold)
else:
return super().backprop_mish(dY, X, threshold, inplace)
def swish(self, X, inplace=False):
if (X.dtype in ('float32', 'float64')):
return _custom_kernels.swish(X, inplace=inplace, threshold=17.0)
else:
return super().swish(X, inplace=inplace)
def backprop_swish(self, dY, X, Y, inplace=False):
if ((X.dtype == dY.dtype == Y.dtype) and (X.dtype in ('float32', 'float64'))):
return _custom_kernels.backprop_swish(dY, X, Y, inplace=inplace, threshold=17.0)
else:
return super().backprop_swish(dY, X, Y, inplace=inplace)
def clip_gradient(self, gradient, threshold):
def frobenius_norm(X):
X_vec = X.reshape((- 1))
return cupy.cublas.nrm2(X_vec)
grad_norm = cupy.maximum(frobenius_norm(gradient), 1e-12)
gradient *= (cupy.minimum(threshold, grad_norm) / grad_norm)
return gradient
def seq2col(self, seq, nW, *, lengths=None):
if ((seq.dtype in ('float32', 'float64')) and ((lengths is None) or (lengths.dtype == 'int32'))):
return _custom_kernels.seq2col(seq, nW, lengths=lengths)
else:
return super().seq2col(seq, nW, lengths=lengths)
def backprop_seq2col(self, dY, nW, *, lengths=None):
if ((dY.dtype in ('float32', 'float64')) and ((lengths is None) or (lengths.dtype == 'int32'))):
return _custom_kernels.backprop_seq2col(dY, nW, lengths=lengths)
else:
return super().backprop_seq2col(dY, nW, lengths=lengths)
def reduce_mean(self, X, lengths):
if ((X.dtype in ('float32', 'float64')) and (lengths.dtype == 'int32')):
return _custom_kernels.reduce_mean(X, lengths=lengths)
else:
super().reduce_mean(X, lengths)
def backprop_reduce_mean(self, d_means, lengths):
if ((d_means.dtype in ('float32', 'float64')) and (lengths.dtype == 'int32')):
return _custom_kernels.backprop_reduce_mean(d_means, lengths)
else:
super().backprop_reduce_mean(d_means, lengths)
def reduce_max(self, X, lengths):
if ((X.dtype in ('float32', 'float64')) and (lengths.dtype == 'int32')):
return _custom_kernels.reduce_max(X, lengths)
else:
super().reduce_max(X, lengths)
def backprop_reduce_max(self, d_maxes, which, lengths):
if ((d_maxes.dtype in ('float32', 'float64')) and (which.dtype == 'int32') and (lengths.dtype == 'int32')):
return _custom_kernels.backprop_reduce_max(d_maxes, which, lengths)
else:
super().backprop_reduce_max(d_maxes, which, lengths)
def reduce_sum(self, X, lengths):
if ((X.dtype in ('float32', 'float64')) and (lengths.dtype == 'int32')):
return _custom_kernels.reduce_sum(X, lengths)
else:
return super().reduce_sum(X, lengths)
def backprop_reduce_sum(self, d_sums, lengths):
if ((d_sums.dtype in ('float32', 'float64')) and (lengths.dtype == 'int32')):
return _custom_kernels.backprop_reduce_sum(d_sums, lengths)
else:
return super().backprop_reduce_sum(d_sums, lengths)
def hash(self, ids, seed):
return _custom_kernels.hash(ids, seed)
def scatter_add(self, table, indices, values):
self._xp2.scatter_add(table, indices, values)
def adam(self, weights, gradient, mom1, mom2, beta1, beta2, eps, learn_rate, mod_rate=1.0):
_check_compatible_shape(weights, gradient)
_check_compatible_shape(weights, mom1)
_check_compatible_shape(weights, mom2)
adam_kernel(gradient, learn_rate, (1 - beta1), (1 - beta2), eps, weights, mom1, mom2)
gradient.fill(0)
return (weights, gradient, mom1, mom2)
def position_encode(self, N, D, period=10000, out=None):
positions = NumpyOps().position_encode(N, D, period=period, out=out)
return self.asarray(positions) |
class OptionSeriesBarSonificationContexttracksMappingHighpass(Options):
def frequency(self) -> 'OptionSeriesBarSonificationContexttracksMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionSeriesBarSonificationContexttracksMappingHighpassFrequency)
def resonance(self) -> 'OptionSeriesBarSonificationContexttracksMappingHighpassResonance':
return self._config_sub_data('resonance', OptionSeriesBarSonificationContexttracksMappingHighpassResonance) |
class ParallelRolloutRunner(RolloutRunner):
def __init__(self, n_episodes: int, max_episode_steps: int, deterministic: bool, n_processes: int, record_trajectory: bool, record_event_logs: bool):
super().__init__(n_episodes=n_episodes, max_episode_steps=max_episode_steps, deterministic=deterministic, record_trajectory=record_trajectory, record_event_logs=record_event_logs)
self.n_processes = n_processes
self.epoch_stats_aggregator = None
self.reporting_queue = None
self.seeding_queue = None
(RolloutRunner)
def run_with(self, env: ConfigType, wrappers: CollectionOfConfigType, agent: ConfigType):
workers = self._launch_workers(env, wrappers, agent)
try:
self._monitor_rollout(workers)
except KeyboardInterrupt:
self._attempt_graceful_exit(workers)
def _launch_workers(self, env: ConfigType, wrappers: CollectionOfConfigType, agent: ConfigType) -> Iterable[Process]:
env_seeds = self.maze_seeding.get_explicit_env_seeds(self.n_episodes)
agent_seeds = self.maze_seeding.get_explicit_agent_seeds(self.n_episodes)
assert (len(env_seeds) == len(agent_seeds))
self.seeding_queue = Queue()
for (env_seed, agent_seed) in zip(env_seeds, agent_seeds):
self.seeding_queue.put((env_seed, agent_seed))
actual_number_of_episodes = min(len(env_seeds), self.n_episodes)
if (actual_number_of_episodes < self.n_episodes):
BColors.print_colored(f'Only {len(env_seeds)} explicit seed(s) given, thus the number of episodes changed from: {self.n_episodes} to {actual_number_of_episodes}.', BColors.WARNING)
self.n_episodes = actual_number_of_episodes
workers = self._configure_and_launch_processes(parallel_worker_type=ParallelRolloutWorker, env=env, wrappers=wrappers, agent=agent)
return workers
def _configure_and_launch_processes(self, parallel_worker_type: type(ParallelRolloutWorker), env: ConfigType, wrappers: CollectionOfConfigType, agent: ConfigType) -> Iterable[Process]:
self.reporting_queue = Queue()
workers = []
for _ in range(min(self.n_processes, self.n_episodes)):
p = Process(target=parallel_worker_type.run, args=(env, wrappers, agent, self.deterministic, self.max_episode_steps, self.record_trajectory, self.input_dir, self.reporting_queue, self.seeding_queue), daemon=True)
p.start()
workers.append(p)
if self.record_event_logs:
LogEventsWriterRegistry.register_writer(LogEventsWriterTSV(log_dir='./event_logs'))
register_log_stats_writer(LogStatsWriterConsole())
self.epoch_stats_aggregator = LogStatsAggregator(LogStatsLevel.EPOCH)
self.epoch_stats_aggregator.register_consumer(get_stats_logger('rollout_stats'))
return workers
def _monitor_rollout(self, workers: Iterable[Process]) -> None:
for _ in tqdm(range(self.n_episodes), desc='Episodes done', unit=' episodes'):
report = self.reporting_queue.get()
if isinstance(report, ExceptionReport):
for p in workers:
p.terminate()
raise RuntimeError((f'''A worker encountered the following error on env_seed: {report.env_seed} with agent_seed: {report.agent_seed}:
''' + report.traceback)) from report.exception
(episode_stats, episode_event_log) = report
if (episode_stats is not None):
self.epoch_stats_aggregator.receive(episode_stats)
if (episode_event_log is not None):
LogEventsWriterRegistry.record_event_logs(episode_event_log)
for w in workers:
w.join()
if (len(self.epoch_stats_aggregator.input) != 0):
self.epoch_stats_aggregator.reduce()
def _attempt_graceful_exit(self, workers: Iterable[Process]) -> None:
print('\n\nShut down requested, exiting gracefully...\n')
for w in workers:
w.terminate()
if (len(self.epoch_stats_aggregator.input) != 0):
print('Stats from the completed part of rollout:\n')
self.epoch_stats_aggregator.reduce()
print('\nRollout done (terminated prematurely).') |
def test_format__symlinks(tmp_path):
file_path_1 = (tmp_path / 'test_markdown1.md')
file_path_2 = (tmp_path / 'test_markdown2.md')
file_path_1.write_text(UNFORMATTED_MARKDOWN)
file_path_2.write_text(UNFORMATTED_MARKDOWN)
subdir_path = (tmp_path / 'subdir')
subdir_path.mkdir()
symlink_1 = (subdir_path / 'symlink1.md')
symlink_1.symlink_to(file_path_1)
symlink_2 = (tmp_path / 'symlink2.md')
symlink_2.symlink_to(file_path_2)
assert (run([str(subdir_path), str(symlink_2)]) == 0)
assert (file_path_1.read_text() == FORMATTED_MARKDOWN)
assert (file_path_2.read_text() == FORMATTED_MARKDOWN)
assert symlink_1.is_symlink()
assert symlink_2.is_symlink() |
class HangmanTests(unittest.TestCase):
def test_initially_9_failures_are_allowed(self):
game = Hangman('foo')
self.assertEqual(game.get_status(), hangman.STATUS_ONGOING)
self.assertEqual(game.remaining_guesses, 9)
def test_initially_no_letters_are_guessed(self):
game = Hangman('foo')
self.assertEqual(game.get_masked_word(), '___')
def test_after_10_failures_the_game_is_over(self):
game = Hangman('foo')
for i in range(10):
game.guess('x')
self.assertEqual(game.get_status(), hangman.STATUS_LOSE)
with self.assertRaises(ValueError) as err:
game.guess('x')
self.assertEqual(type(err.exception), ValueError)
self.assertEqual(err.exception.args[0], 'The game has already ended.')
def test_feeding_a_correct_letter_removes_underscores(self):
game = Hangman('foobar')
game.guess('b')
self.assertEqual(game.get_status(), hangman.STATUS_ONGOING)
self.assertEqual(game.remaining_guesses, 9)
self.assertEqual(game.get_masked_word(), '___b__')
game.guess('o')
self.assertEqual(game.get_status(), hangman.STATUS_ONGOING)
self.assertEqual(game.remaining_guesses, 9)
self.assertEqual(game.get_masked_word(), '_oob__')
def test_feeding_a_correct_letter_twice_counts_as_a_failure(self):
game = Hangman('foobar')
game.guess('b')
self.assertEqual(game.get_status(), hangman.STATUS_ONGOING)
self.assertEqual(game.remaining_guesses, 9)
self.assertEqual(game.get_masked_word(), '___b__')
game.guess('b')
self.assertEqual(game.get_status(), hangman.STATUS_ONGOING)
self.assertEqual(game.remaining_guesses, 8)
self.assertEqual(game.get_masked_word(), '___b__')
def test_getting_all_the_letters_right_makes_for_a_win(self):
game = Hangman('hello')
game.guess('b')
self.assertEqual(game.get_status(), hangman.STATUS_ONGOING)
self.assertEqual(game.remaining_guesses, 8)
self.assertEqual(game.get_masked_word(), '_____')
game.guess('e')
self.assertEqual(game.get_status(), hangman.STATUS_ONGOING)
self.assertEqual(game.remaining_guesses, 8)
self.assertEqual(game.get_masked_word(), '_e___')
game.guess('l')
self.assertEqual(game.get_status(), hangman.STATUS_ONGOING)
self.assertEqual(game.remaining_guesses, 8)
self.assertEqual(game.get_masked_word(), '_ell_')
game.guess('o')
self.assertEqual(game.get_status(), hangman.STATUS_ONGOING)
self.assertEqual(game.remaining_guesses, 8)
self.assertEqual(game.get_masked_word(), '_ello')
game.guess('h')
self.assertEqual(game.get_status(), hangman.STATUS_WIN)
self.assertEqual(game.get_masked_word(), 'hello')
with self.assertRaises(ValueError) as err:
game.guess('x')
self.assertEqual(type(err.exception), ValueError)
self.assertEqual(err.exception.args[0], 'The game has already ended.')
def test_winning_on_last_guess_still_counts_as_a_win(self):
game = Hangman('aaa')
for ch in 'bcdefghij':
game.guess(ch)
game.guess('a')
self.assertEqual(game.remaining_guesses, 0)
self.assertEqual(game.get_status(), hangman.STATUS_WIN)
self.assertEqual(game.get_masked_word(), 'aaa') |
class FingerHoleEdge(BaseEdge):
char = 'h'
description = 'Edge (parallel Finger Joint Holes)'
def __init__(self, boxes, fingerHoles=None, **kw) -> None:
settings = None
if isinstance(fingerHoles, Settings):
settings = fingerHoles
fingerHoles = FingerHoles(boxes, settings)
super().__init__(boxes, settings, **kw)
self.fingerHoles = (fingerHoles or boxes.fingerHolesAt)
def __call__(self, length, bedBolts=None, bedBoltSettings=None, **kw):
dist = self.fingerHoles.settings.edge_width
with self.saved_context():
self.fingerHoles(0, ((self.burn + dist) + (self.settings.thickness / 2)), length, 0, bedBolts=bedBolts, bedBoltSettings=bedBoltSettings)
if self.settings.bottom_lip:
h = (self.settings.bottom_lip + self.fingerHoles.settings.edge_width)
sp = self.boxes.spacing
self.moveTo(((- sp) / 2), ((- h) - sp))
self.rectangularWall((length - (1.05 * self.boxes.thickness)), h)
self.edge(length, tabs=2)
def startwidth(self) -> float:
return (self.fingerHoles.settings.edge_width + self.settings.thickness)
def margin(self) -> float:
if self.settings.bottom_lip:
return ((self.settings.bottom_lip + self.fingerHoles.settings.edge_width) + self.boxes.spacing)
return 0.0 |
def test_private_other(slave_channel):
chat = PrivateChat(channel=slave_channel, name='__name__', alias='__alias__', uid='__id__')
assert isinstance(chat.other, ChatMember)
assert (not isinstance(chat.other, SelfChatMember))
assert (not isinstance(chat.other, SystemChatMember))
assert (chat.other in chat.members)
assert (chat.name == chat.other.name)
assert (chat.alias == chat.other.alias)
assert (chat.uid == chat.other.uid) |
def verify_statistics_map(fledge_url, skip_verify_north_interface):
get_url = '/fledge/statistics'
jdoc = utils.get_request(fledge_url, get_url)
actual_stats_map = utils.serialize_stats_map(jdoc)
assert (1 <= actual_stats_map[south_asset_name.upper()])
assert (1 <= actual_stats_map['READINGS'])
if (not skip_verify_north_interface):
assert (1 <= actual_stats_map['Readings Sent'])
assert (1 <= actual_stats_map[north_task_name]) |
class MailType():
USER_REGISTER = 'user_registration'
USER_CONFIRM = 'user_confirmation'
USER_CHANGE_EMAIL = 'user_change_email'
NEW_SESSION = 'new_session'
PASSWORD_RESET = 'password_reset'
PASSWORD_CHANGE = 'password_change'
PASSWORD_RESET_AND_VERIFY = 'password_reset_verify'
EVENT_ROLE = 'event_role'
SPEAKER_INVITE = 'speaker_invite'
GROUP_ROLE = 'group_role'
SESSION_STATE_CHANGE = 'session_state_change'
TICKET_PURCHASED = 'ticket_purchased'
TICKET_PURCHASED_ATTENDEE = 'ticket_purchased_attendee'
TICKET_PURCHASED_ORGANIZER = 'ticket_purchased_organizer'
TICKET_CANCELLED = 'ticket_cancelled'
EVENT_EXPORTED = 'event_exported'
EVENT_EXPORT_FAIL = 'event_export_fail'
EVENT_IMPORTED = 'event_imported'
EVENT_IMPORT_FAIL = 'event_import_fail'
MONTHLY_PAYMENT = 'monthly_payment'
AFTER_EVENT = 'after_event'
AFTER_EVENT_SPEAKER = 'after_event_speaker'
MONTHLY_PAYMENT_FOLLOWUP = 'monthly_payment_follow_up'
MONTHLY_PAYMENT_PRE_DUE = 'monthly_payment_pre_due'
MONTHLY_PAYMENT_POST_DUE = 'monthly_payment_post_due'
TEST_MAIL = 'test_mail'
CONTACT_ORGANIZERS = 'contact_organizers'
CONTACT_GROUP_ORGANIZERS = 'contact_group_organizers'
VIDEO_MODERATOR_INVITE = 'video_moderator_invite'
TICKET_SALES_END = 'ticket_sales_end'
TICKET_SALES_END_TOMORROW = 'ticket_sales_end_tomorrow'
TICKET_SALES_END_NEXT_WEEK = 'ticket_sales_end_next_week'
ANNOUNCE_EVENT = 'announce_event'
def entries():
return list(map((lambda entry: entry[1]), filter((lambda entry: ((not entry[0].startswith('__')) and (type(entry[1]) == str))), MailType.__dict__.items()))) |
class TwoPiece(Boxes):
description = '\nSet *hi* larger than *h* to leave gap between the inner and outer shell. This can be used to make opening the box easier. Set *hi* smaller to only have a small inner ridge that will allow the content to be more visible after opening.\n\n\n'
ui_group = 'Box'
def __init__(self) -> None:
Boxes.__init__(self)
self.buildArgParser('x', 'y', 'h', 'hi', 'outside')
self.addSettingsArgs(edges.FingerJointSettings, finger=2.0, space=2.0)
self.argparser.add_argument('--play', action='store', type=float, default=0.15, help='play between the two parts as multiple of the wall thickness')
def render(self):
(x, y, h) = (self.x, self.y, self.h)
hi = (self.hi or self.h)
t = self.thickness
p = (self.play * t)
if self.outside:
x -= ((4 * t) + (2 * p))
y -= ((4 * t) + (2 * p))
h -= (2 * t)
hi -= (2 * t)
self.edges['f'].settings.setValues(t, False, edge_width=(self.edges['f'].settings.edge_width + p))
for i in range(2):
d = ((i * 2) * (t + p))
height = [hi, h][i]
with self.saved_context():
self.rectangularWall((x + d), height, 'fFeF', move='right')
self.rectangularWall((y + d), height, 'ffef', move='right')
self.rectangularWall((x + d), height, 'fFeF', move='right')
self.rectangularWall((y + d), height, 'ffef', move='right')
self.rectangularWall(y, height, 'ffef', move='up only')
self.rectangularWall(x, y, 'hhhh', bedBolts=None, move='right')
self.rectangularWall((x + d), (y + d), 'FFFF', bedBolts=None, move='right') |
class Autoregressive(Bijector):
domain = constraints.real_vector
codomain = constraints.real_vector
def __init__(self, params_fn: Optional[flowtorch.Lazy]=None, *, shape: torch.Size, context_shape: Optional[torch.Size]=None, **kwargs: Any) -> None:
self.domain = constraints.independent(constraints.real, len(shape))
self.codomain = constraints.independent(constraints.real, len(shape))
if (not params_fn):
params_fn = DenseAutoregressive()
assert ((params_fn is not None) and issubclass(params_fn.cls, DenseAutoregressive))
super().__init__(params_fn, shape=shape, context_shape=context_shape)
def inverse(self, y: torch.Tensor, x: Optional[torch.Tensor]=None, context: Optional[torch.Tensor]=None) -> torch.Tensor:
assert (context is None)
assert (self._params_fn is not None)
if self._check_bijective_y(y, context):
assert isinstance(y, BijectiveTensor)
return y.get_parent_from_bijector(self)
x_new = torch.zeros_like(y)
permutation = self._params_fn.permutation
log_detJ: Optional[torch.Tensor] = None
for idx in cast(torch.LongTensor, permutation):
_params = self._params_fn(x_new.clone(), context=context)
(x_temp, log_detJ) = self._inverse(y, params=_params)
x_new[(..., idx)] = x_temp[(..., idx)]
if is_record_flow_graph_enabled():
x_new = to_bijective_tensor(x_new, y, context=context, bijector=self, mode='inverse', log_detJ=log_detJ)
return x_new
def _log_abs_det_jacobian(self, x: torch.Tensor, y: torch.Tensor, params: Optional[Sequence[torch.Tensor]]) -> torch.Tensor:
raise NotImplementedError |
class OrOp(Node):
def forward(self, *args, **kwargs):
if any([(a == True) for a in args]):
return True
elif all([(a == False) for a in args]):
return False
else:
return None
def follow(self, *args, **kwargs):
return fmap(('*', self.forward(*args)))
def final(self, args, operands=None, result=None, **kwargs):
if result:
if any((((a == 'fin') and (v == True)) for (a, v) in zip(args, operands))):
return 'fin'
return 'var'
else:
if any(((a == 'var') for a in args)):
return 'var'
return 'fin' |
def checksum_by_replay_chunk(table_name, delta_table_name, old_column_list, pk_list, id_col_name, id_limit, max_replayed, chunk_size) -> str:
col_list = ['count(*) AS `cnt`']
for col in old_column_list:
column_with_tbl = '`{}`.`{}`'.format(escape(table_name), escape(col))
chksm = wrap_checksum_function(column_with_tbl)
as_str = '{} AS `{}`'.format(chksm, escape(col))
col_list.append(as_str)
checksum_col_list = ', '.join(col_list)
return 'SELECT {col_list} FROM ( SELECT * FROM `{delta}` WHERE `{id}` > {id_limit} AND `{id}` <= least({id_limit} + {chunk_size}, {max_replayed}) AND NOT EXISTS ( SELECT 1 FROM `{delta}` as `t` WHERE {exist_join} AND `t`.{id} < `{delta}`.`{id}` )) as chg LEFT JOIN `{old_table}` ON {join_clause} '.format(**{'id': escape(id_col_name), 'col_list': checksum_col_list, 'delta': escape(delta_table_name), 'old_table': escape(table_name), 'id_limit': id_limit, 'max_replayed': max_replayed, 'join_clause': get_match_clause(table_name, 'chg', pk_list, separator=' AND '), 'exist_join': get_match_clause('t', delta_table_name, pk_list, separator=' AND '), 'chunk_size': chunk_size}) |
class Skin(RCareWorldBaseObject):
def __init__(self, env, id: int, name: str, is_in_scene: bool=True):
super().__init__(env=env, id=id, name=name, is_in_scene=is_in_scene)
def getInfo(self) -> dict:
info_dict = {}
info_dict['forces'] = self.env.instance_channel.data[self.id]['forces']
info_dict['positions'] = self.env.instance_channel.data[self.id]['positions']
info_dict['ids'] = self.env.instance_channel.data[self.id]['ids']
return info_dict
def getInfoByID(self, this_id: int) -> dict:
info_dict = self.getInfo()
for i in len(info_dict['ids']):
if (info_dict['ids'][i] == this_id):
force = info_dict['forces'][i]
position = info_dict['positions'][i]
info_dict_id = {}
info_dict_id['force'] = force
info_dict_id['position'] = position
info_dict_id['id'] = this_id
return info_dict_id
raise ValueError(f'id {this_id} not found')
return {} |
class PhotoContainer(containers.DeclarativeContainer):
database = providers.Dependency()
file_storage = providers.Dependency()
photo = providers.Factory(entities.Photo)
photo_repository = providers.Singleton(repositories.PhotoRepository, entity_factory=photo.provider, fs=file_storage, db=database) |
class OptionSeriesVennCluster(Options):
def allowOverlap(self):
return self._config_get(True)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self):
return self._config_get({'duration': 500})
def animation(self, flag: bool):
self._config(flag, js_type=False)
def dataLabels(self) -> 'OptionSeriesVennClusterDatalabels':
return self._config_sub_data('dataLabels', OptionSeriesVennClusterDatalabels)
def drillToCluster(self):
return self._config_get(True)
def drillToCluster(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def events(self) -> 'OptionSeriesVennClusterEvents':
return self._config_sub_data('events', OptionSeriesVennClusterEvents)
def layoutAlgorithm(self) -> 'OptionSeriesVennClusterLayoutalgorithm':
return self._config_sub_data('layoutAlgorithm', OptionSeriesVennClusterLayoutalgorithm)
def marker(self) -> 'OptionSeriesVennClusterMarker':
return self._config_sub_data('marker', OptionSeriesVennClusterMarker)
def minimumClusterSize(self):
return self._config_get(2)
def minimumClusterSize(self, num: float):
self._config(num, js_type=False)
def states(self) -> 'OptionSeriesVennClusterStates':
return self._config_sub_data('states', OptionSeriesVennClusterStates)
def zones(self) -> 'OptionSeriesVennClusterZones':
return self._config_sub_data('zones', OptionSeriesVennClusterZones) |
class StatsController(ControllerBase):
def __init__(self, req, link, data, **config):
super().__init__(req, link, data, **config)
self.dpset = data['dpset']
self.waiters = data['waiters']
def get_dpids(self, req, **_kwargs):
dps = list(self.dpset.dps.keys())
body = json.dumps(dps)
return Response(content_type='application/json', body=body)
_method
def get_desc_stats(self, req, dp, ofctl, **kwargs):
return ofctl.get_desc_stats(dp, self.waiters)
_method
def get_flow_desc(self, req, dp, ofctl, **kwargs):
flow = (req.json if req.body else {})
return ofctl.get_flow_desc(dp, self.waiters, flow)
_method
def get_flow_stats(self, req, dp, ofctl, **kwargs):
flow = (req.json if req.body else {})
return ofctl.get_flow_stats(dp, self.waiters, flow)
_method
def get_aggregate_flow_stats(self, req, dp, ofctl, **kwargs):
flow = (req.json if req.body else {})
return ofctl.get_aggregate_flow_stats(dp, self.waiters, flow)
_method
def get_table_stats(self, req, dp, ofctl, **kwargs):
return ofctl.get_table_stats(dp, self.waiters)
_method
def get_table_features(self, req, dp, ofctl, **kwargs):
return ofctl.get_table_features(dp, self.waiters)
_method
def get_port_stats(self, req, dp, ofctl, port=None, **kwargs):
if (port == 'ALL'):
port = None
return ofctl.get_port_stats(dp, self.waiters, port)
_method
def get_queue_stats(self, req, dp, ofctl, port=None, queue_id=None, **kwargs):
if (port == 'ALL'):
port = None
if (queue_id == 'ALL'):
queue_id = None
return ofctl.get_queue_stats(dp, self.waiters, port, queue_id)
_method
def get_queue_config(self, req, dp, ofctl, port=None, **kwargs):
if (port == 'ALL'):
port = None
return ofctl.get_queue_config(dp, self.waiters, port)
_method
def get_queue_desc(self, req, dp, ofctl, port=None, queue=None, **_kwargs):
if (port == 'ALL'):
port = None
if (queue == 'ALL'):
queue = None
return ofctl.get_queue_desc(dp, self.waiters, port, queue)
_method
def get_meter_features(self, req, dp, ofctl, **kwargs):
return ofctl.get_meter_features(dp, self.waiters)
_method
def get_meter_config(self, req, dp, ofctl, meter_id=None, **kwargs):
if (meter_id == 'ALL'):
meter_id = None
return ofctl.get_meter_config(dp, self.waiters, meter_id)
_method
def get_meter_desc(self, req, dp, ofctl, meter_id=None, **kwargs):
if (meter_id == 'ALL'):
meter_id = None
return ofctl.get_meter_desc(dp, self.waiters, meter_id)
_method
def get_meter_stats(self, req, dp, ofctl, meter_id=None, **kwargs):
if (meter_id == 'ALL'):
meter_id = None
return ofctl.get_meter_stats(dp, self.waiters, meter_id)
_method
def get_group_features(self, req, dp, ofctl, **kwargs):
return ofctl.get_group_features(dp, self.waiters)
_method
def get_group_desc(self, req, dp, ofctl, group_id=None, **kwargs):
return ofctl.get_group_desc(dp, self.waiters)
_method
def get_group_stats(self, req, dp, ofctl, group_id=None, **kwargs):
if (group_id == 'ALL'):
group_id = None
return ofctl.get_group_stats(dp, self.waiters, group_id)
_method
def get_port_desc(self, req, dp, ofctl, port_no=None, **kwargs):
return ofctl.get_port_desc(dp, self.waiters)
_method
def get_role(self, req, dp, ofctl, **kwargs):
return ofctl.get_role(dp, self.waiters)
_method
def mod_flow_entry(self, req, dp, ofctl, flow, cmd, **kwargs):
cmd_convert = {'add': dp.ofproto.OFPFC_ADD, 'modify': dp.ofproto.OFPFC_MODIFY, 'modify_strict': dp.ofproto.OFPFC_MODIFY_STRICT, 'delete': dp.ofproto.OFPFC_DELETE, 'delete_strict': dp.ofproto.OFPFC_DELETE_STRICT}
mod_cmd = cmd_convert.get(cmd, None)
if (mod_cmd is None):
raise CommandNotFoundError(cmd=cmd)
ofctl.mod_flow_entry(dp, flow, mod_cmd)
_method
def delete_flow_entry(self, req, dp, ofctl, flow, **kwargs):
flow = {'table_id': dp.ofproto.OFPTT_ALL}
ofctl.mod_flow_entry(dp, flow, dp.ofproto.OFPFC_DELETE)
_method
def mod_meter_entry(self, req, dp, ofctl, meter, cmd, **kwargs):
cmd_convert = {'add': dp.ofproto.OFPMC_ADD, 'modify': dp.ofproto.OFPMC_MODIFY, 'delete': dp.ofproto.OFPMC_DELETE}
mod_cmd = cmd_convert.get(cmd, None)
if (mod_cmd is None):
raise CommandNotFoundError(cmd=cmd)
ofctl.mod_meter_entry(dp, meter, mod_cmd)
_method
def mod_group_entry(self, req, dp, ofctl, group, cmd, **kwargs):
cmd_convert = {'add': dp.ofproto.OFPGC_ADD, 'modify': dp.ofproto.OFPGC_MODIFY, 'delete': dp.ofproto.OFPGC_DELETE}
mod_cmd = cmd_convert.get(cmd, None)
if (mod_cmd is None):
raise CommandNotFoundError(cmd=cmd)
ofctl.mod_group_entry(dp, group, mod_cmd)
_method
def mod_port_behavior(self, req, dp, ofctl, port_config, cmd, **kwargs):
port_no = port_config.get('port_no', None)
port_no = int(str(port_no), 0)
port_info = self.dpset.port_state[int(dp.id)].get(port_no)
if port_info:
port_config.setdefault('hw_addr', port_info.hw_addr)
port_config.setdefault('advertise', port_info.advertised)
else:
raise PortNotFoundError(port_no=port_no)
if (cmd != 'modify'):
raise CommandNotFoundError(cmd=cmd)
ofctl.mod_port_behavior(dp, port_config)
_method
def send_experimenter(self, req, dp, ofctl, exp, **kwargs):
ofctl.send_experimenter(dp, exp)
_method
def set_role(self, req, dp, ofctl, role, **kwargs):
ofctl.set_role(dp, role) |
class FingerJointEdge(BaseEdge, FingerJointBase):
char = 'f'
description = 'Finger Joint'
positive = True
def draw_finger(self, f, h, style, positive: bool=True, firsthalf: bool=True) -> None:
t = self.settings.thickness
if positive:
if (style == 'springs'):
self.polyline(0, (- 90), (0.8 * h), (90, (0.2 * h)), (0.1 * h), 90, (0.9 * h), (- 180), (0.9 * h), 90, (f - (0.6 * h)), 90, (0.9 * h), (- 180), (0.9 * h), 90, (0.1 * h), (90, (0.2 * h)), (0.8 * h), (- 90))
elif (style == 'barbs'):
n = int(((h - (0.1 * t)) // (0.3 * t)))
a = math.degrees(math.atan(0.5))
l = (5 ** 0.5)
poly = ([(h - ((n * 0.3) * t))] + ([(- 45), ((0.1 * (2 ** 0.5)) * t), (45 + a), ((l * 0.1) * t), (- a), 0] * n))
self.polyline(0, (- 90), *poly, 90, f, 90, *reversed(poly), (- 90))
elif ((style == 'snap') and (f > (1.9 * t))):
a12 = math.degrees(math.atan(0.5))
l12 = (t / math.cos(math.radians(a12)))
d = (4 * t)
d2 = (d + (1 * t))
a = math.degrees(math.atan(((0.5 * t) / (h + d2))))
l = ((h + d2) / math.cos(math.radians(a)))
poly = [0, 90, d, (- 180), (d + h), (- 90), (0.5 * t), (90 + a12), l12, (90 - a12), (0.5 * t), (90 - a), l, (+ a), 0, ((- 180), (0.1 * t)), (h + d2), 90, (f - (1.7 * t)), (90 - a12), l12, a12, h, (- 90), 0]
if firsthalf:
poly = list(reversed(poly))
self.polyline(*poly)
else:
self.polyline(0, (- 90), h, 90, f, 90, h, (- 90))
else:
self.polyline(0, 90, h, (- 90), f, (- 90), h, 90)
def __call__(self, length, bedBolts=None, bedBoltSettings=None, **kw):
positive = self.positive
t = self.settings.thickness
(s, f) = (self.settings.space, self.settings.finger)
thickness = self.settings.thickness
style = self.settings.style
play = self.settings.play
(fingers, leftover) = self.calcFingers(length, bedBolts)
if ((fingers == 0) and f and (leftover > (0.75 * thickness)) and (leftover > (4 * play))):
fingers = 1
f = leftover = (leftover / 2.0)
bedBolts = None
style = 'rectangular'
if (not positive):
f += play
s -= play
leftover -= play
self.edge((leftover / 2.0), tabs=1)
(l1, l2) = self.fingerLength(self.settings.angle)
h = (l1 - l2)
d = (bedBoltSettings or self.bedBoltSettings)[0]
for i in range(fingers):
if (i != 0):
if ((not positive) and bedBolts and bedBolts.drawBolt(i)):
self.hole((0.5 * s), (0.5 * self.settings.thickness), (0.5 * d))
if (positive and bedBolts and bedBolts.drawBolt(i)):
self.bedBoltHole(s, bedBoltSettings)
else:
self.edge(s)
self.draw_finger(f, h, style, positive, (i < (fingers // 2)))
self.edge((leftover / 2.0), tabs=1)
def margin(self) -> float:
widths = self.fingerLength(self.settings.angle)
if self.positive:
if (self.settings.style == 'snap'):
return ((widths[0] - widths[1]) + self.settings.thickness)
return (widths[0] - widths[1])
return 0.0
def startwidth(self) -> float:
widths = self.fingerLength(self.settings.angle)
return widths[self.positive] |
class ConcatExp(Exp):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.e1 = (build_exp(**kwargs['e1']) if isinstance(kwargs['e1'], dict) else kwargs['e1'])
self.e2 = (build_exp(**kwargs['e2']) if isinstance(kwargs['e2'], dict) else kwargs['e2'])
def __repr__(self):
return '(Concat {} {})'.format(repr(self.e1), repr(self.e2))
def __str__(self):
return '(Concat {} {})'.format(str(self.e1), str(self.e2)) |
class Requested():
def find_spec(self, name, path=None, target=None):
LOADED_MODULES[name] = []
if CLIMETLAB_DEBUG_IMPORTS:
for f in inspect.stack():
if (f.filename == __file__):
continue
if ('importlib._bootstrap' in f.filename):
continue
LOADED_MODULES[name].append(f'{f.filename}:{f.lineno}') |
class LigatureMorphActionTest(unittest.TestCase):
def setUp(self):
self.font = FakeFont(['.notdef', 'A', 'B', 'C'])
def testDecompileToXML(self):
a = otTables.LigatureMorphAction()
actionReader = OTTableReader(deHexStr('DEADBEEF 7FFFFFFE '))
a.decompile(OTTableReader(deHexStr('1234FAB30001')), self.font, actionReader)
toXML = (lambda w, f: a.toXML(w, f, {'Test': 'Foo'}, 'Transition'))
self.assertEqual(getXML(toXML, self.font), ['<Transition Test="Foo">', ' <NewState value="4660"/>', ' <Flags value="SetComponent,DontAdvance"/>', ' <ReservedFlags value="0x1AB3"/>', ' <Action GlyphIndexDelta="-2" Flags="Store"/>', ' <Action GlyphIndexDelta="3"/>', '</Transition>'])
def testCompileActions_empty(self):
act = otTables.LigatureMorphAction()
(actions, actionIndex) = act.compileActions(self.font, [])
self.assertEqual(actions, b'')
self.assertEqual(actionIndex, {})
def testCompileActions_shouldShareSubsequences(self):
state = otTables.AATState()
t = state.Transitions = {i: otTables.LigatureMorphAction() for i in range(3)}
ligs = [otTables.LigAction() for _ in range(3)]
for (i, lig) in enumerate(ligs):
lig.GlyphIndexDelta = i
t[0].Actions = ligs[1:2]
t[1].Actions = ligs[0:3]
t[2].Actions = ligs[1:3]
(actions, actionIndex) = t[0].compileActions(self.font, [state])
self.assertEqual(actions, deHexStr(' '))
self.assertEqual(actionIndex, {deHexStr(' '): 0, deHexStr(' '): 1, deHexStr(''): 2, deHexStr(''): 3}) |
class RegistrationDB(Model):
def __init__(self, **kwargs: Any) -> None:
custom_path = kwargs.pop('custom_path', None)
super().__init__(**kwargs)
this_dir = os.getcwd()
self.db_path = (os.path.join(this_dir, 'registration.db') if (custom_path is None) else custom_path)
if (not os.path.exists(os.path.dirname(os.path.abspath(self.db_path)))):
raise ValueError(f'Path={self.db_path} not valid!')
self._initialise_backend()
def _initialise_backend(self) -> None:
self._execute_single_sql('CREATE TABLE IF NOT EXISTS registered_table (address TEXT, ethereum_address TEXT, ethereum_signature TEXT, fetchai_signature TEXT, developer_handle TEXT, tweet TEXT)')
self._execute_single_sql('CREATE TABLE IF NOT EXISTS trade_table (address TEXT PRIMARY KEY, first_trade timestamp, second_trade timestamp, first_info TEXT, second_info TEXT)')
def set_trade(self, address: str, timestamp: datetime.datetime, data: Dict[(str, str)]) -> None:
record = self.get_trade_table(address)
if (record is None):
command = 'INSERT INTO trade_table(address, first_trade, second_trade, first_info, second_info) values(?, ?, ?, ?, ?)'
variables: Tuple[(str, datetime.datetime, Optional[datetime.datetime], str, Optional[str])] = (address, timestamp, None, json.dumps(data), None)
else:
(_, first_trade, second_trade, first_info, _) = record
is_second = ((first_trade is not None) and (second_trade is None))
is_more_than_two = ((first_trade is not None) and (second_trade is not None))
if (is_more_than_two or (not is_second)):
return
command = 'INSERT or REPLACE into trade_table(address, first_trade, second_trade, first_info, second_info) values(?, ?, ?, ?, ?)'
variables = (address, first_trade, timestamp, first_info, json.dumps(data))
self._execute_single_sql(command, variables)
def get_trade_table(self, address: str) -> Optional[Tuple]:
command = 'SELECT * FROM trade_table where address=?'
ret = self._execute_single_sql(command, (address,))
return (ret[0] if (len(ret) > 0) else None)
def set_registered(self, address: str, developer_handle: str) -> None:
if self.is_registered(address):
return
command = 'INSERT OR REPLACE INTO registered_table(address, ethereum_address, ethereum_signature, fetchai_signature, developer_handle, tweet) values(?, ?, ?, ?, ?, ?)'
variables = (address, '', '', '', developer_handle, '')
self._execute_single_sql(command, variables)
def is_registered(self, address: str) -> bool:
command = 'SELECT * FROM registered_table WHERE address=?'
variables = (address,)
result = self._execute_single_sql(command, variables)
return (len(result) != 0)
def is_allowed_to_trade(self, address: str, minimum_hours_between_txs: int) -> bool:
record = self.get_trade_table(address)
if (record is None):
return True
first_trade: Optional[str] = record[1]
second_trade: Optional[str] = record[2]
first_trade_present: bool = (first_trade is not None)
second_trade_present: bool = (second_trade is not None)
if ((not first_trade_present) and (not second_trade_present)):
return True
if ((first_trade is not None) and (not second_trade_present)):
now = datetime.datetime.now()
first_trade_dt = datetime.datetime.strptime(first_trade, '%Y-%m-%d %H:%M:%S.%f')
is_allowed_to_trade_ = ((now - first_trade_dt) > datetime.timedelta(hours=minimum_hours_between_txs))
if (not is_allowed_to_trade_):
self.context.logger.info(f'Invalid attempt for counterparty={address}, not enough time since last trade!')
return is_allowed_to_trade_
self.context.logger.info(f'Invalid attempt for counterparty={address}, already completed 2 trades!')
return False
def has_completed_two_trades(self, address: str) -> bool:
record = self.get_trade_table(address)
if (record is None):
return False
first_trade: Optional[str] = record[1]
second_trade: Optional[str] = record[2]
first_trade_present: bool = (first_trade is not None)
second_trade_present: bool = (second_trade is not None)
return (first_trade_present and second_trade_present)
def completed_two_trades(self) -> List[Tuple[(str, str, str)]]:
command = 'SELECT * FROM registered_table'
variables = ()
result = self._execute_single_sql(command, variables)
completed: List[Tuple[(str, str, str)]] = []
for row in result:
address = row[0]
ethereum_address = row[1]
developer_handle = row[4]
if self.has_completed_two_trades(address):
completed.append((address, ethereum_address, developer_handle))
return completed
def _execute_single_sql(self, command: str, variables: Tuple[(Any, ...)]=(), print_exceptions: bool=True) -> List[Tuple[(str, ...)]]:
conn = None
ret: List[Tuple[(str, ...)]] = []
try:
conn = sqlite3.connect(self.db_path, timeout=300)
c = conn.cursor()
c.execute(command, variables)
ret = c.fetchall()
conn.commit()
except Exception as e:
if print_exceptions:
self.context.logger.warning(f'Exception in database: {e}')
finally:
if (conn is not None):
conn.close()
return ret |
class MemoizedClass(type):
def __new__(metacls, name: str, bases: Tuple[(type, ...)], attrs: Dict[(str, Any)]) -> type:
if ('_cache' not in attrs):
attrs['_cache'] = {}
return super(MemoizedClass, metacls).__new__(metacls, name, bases, attrs)
def __call__(cls, *args):
if (args not in cls._cache):
new_instance = super(MemoizedClass, cls).__call__(*args)
cls._cache[args] = new_instance
return new_instance
return cls._cache[args] |
class Migration(migrations.Migration):
dependencies = [('search', '0008_awardsearch_table')]
operations = [migrations.DeleteModel(name='AwardSearchView'), migrations.DeleteModel(name='ContractAwardSearchMatview'), migrations.DeleteModel(name='DirectPaymentAwardSearchMatview'), migrations.DeleteModel(name='GrantAwardSearchMatview'), migrations.DeleteModel(name='IDVAwardSearchMatview'), migrations.DeleteModel(name='LoanAwardSearchMatview'), migrations.DeleteModel(name='OtherAwardSearchMatview'), migrations.DeleteModel(name='Pre2008AwardSearchMatview')] |
def prove_blind_and_swap(A1: POINT, B1: POINT, A2: POINT, B2: POINT, factor: int, swap=False):
if (not swap):
(C1, D1, C2, D2) = (curve.multiply(P, factor) for P in (A1, B1, A2, B2))
else:
(C1, D1, C2, D2) = (curve.multiply(P, factor) for P in (A2, B2, A1, B1))
msg = b''.join((serialize_point(x) for x in (A1, B1, A2, B2, C1, C2, D1, D2)))
r = hash_to_int((msg + b'\x01'))
BASE = lincomb((A1, B1, A2, B2), (1, r, (r ** 2), (r ** 3)))
if (not swap):
PUB_WITHSWAP = lincomb((C2, D2, C1, D1), (1, r, (r ** 2), (r ** 3)))
proof = sign_firstof2(msg, factor, PUB_WITHSWAP, BASE)
else:
PUB_NOSWAP = lincomb((C1, D1, C2, D2), (1, r, (r ** 2), (r ** 3)))
proof = sign_secondof2(msg, PUB_NOSWAP, factor, BASE)
return (C1, D1, C2, D2, proof) |
class OptionSeriesHeatmapSonificationTracksMappingLowpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def generate_dynamo_db_datasets(aws_config: Optional[AWSConfig]) -> Dataset:
client = get_aws_client(service='dynamodb', aws_config=aws_config)
dynamo_tables = get_dynamo_tables(client)
described_dynamo_tables = describe_dynamo_tables(client, dynamo_tables)
dynamo_dataset = create_dynamodb_dataset(described_dynamo_tables)
return dynamo_dataset |
def convert_inputs_to_MNI_space(reg_settings, hcp_templates, temp_dir, use_T2=None):
logger.info(section_header('Registering T1wImage to MNI template using FSL FNIRT'))
run_T1_FNIRT_registration(reg_settings, temp_dir)
logger.info(section_header('Applying MNI transform to label files'))
for image in ['wmparc', 'aparc.a2009s+aseg', 'aparc+aseg']:
apply_nonlinear_warp_to_nifti_rois(image, reg_settings, hcp_templates)
apply_nonlinear_warp_to_nifti_rois('brainmask_fs', reg_settings, hcp_templates, import_labels=False)
if use_T2:
apply_nonlinear_warp_to_nifti_rois('T2w', reg_settings, hcp_templates, import_labels=False) |
_bpdu_type
class RstBPDUs(ConfigurationBPDUs):
VERSION_ID = PROTOCOLVERSION_ID_RSTBPDU
BPDU_TYPE = TYPE_RSTBPDU
_PACK_STR = '!B'
PACK_LEN = struct.calcsize(_PACK_STR)
def __init__(self, flags=0, root_priority=DEFAULT_BRIDGE_PRIORITY, root_system_id_extension=0, root_mac_address='00:00:00:00:00:00', root_path_cost=0, bridge_priority=DEFAULT_BRIDGE_PRIORITY, bridge_system_id_extension=0, bridge_mac_address='00:00:00:00:00:00', port_priority=DEFAULT_PORT_PRIORITY, port_number=0, message_age=0, max_age=DEFAULT_MAX_AGE, hello_time=DEFAULT_HELLO_TIME, forward_delay=DEFAULT_FORWARD_DELAY):
self._version_1_length = VERSION_1_LENGTH
super(RstBPDUs, self).__init__(flags, root_priority, root_system_id_extension, root_mac_address, root_path_cost, bridge_priority, bridge_system_id_extension, bridge_mac_address, port_priority, port_number, message_age, max_age, hello_time, forward_delay)
def check_parameters(self):
assert ((self.root_priority % self._BRIDGE_PRIORITY_STEP) == 0)
assert ((self.bridge_priority % self._BRIDGE_PRIORITY_STEP) == 0)
assert ((self.port_priority % self._PORT_PRIORITY_STEP) == 0)
assert ((self.message_age % self._TIMER_STEP) == 0)
assert ((self.max_age % self._TIMER_STEP) == 0)
assert ((self.hello_time % self._TIMER_STEP) == 0)
assert ((self.forward_delay % self._TIMER_STEP) == 0)
def parser(cls, buf):
(get_cls, next_type, buf) = super(RstBPDUs, cls).parser(buf)
(version_1_length,) = struct.unpack_from(RstBPDUs._PACK_STR, buf)
assert (version_1_length == VERSION_1_LENGTH)
return (get_cls, next_type, buf[RstBPDUs.PACK_LEN:])
def serialize(self, payload, prev):
base = super(RstBPDUs, self).serialize(payload, prev)
sub = struct.pack(RstBPDUs._PACK_STR, self._version_1_length)
return (base + sub) |
class Robertson1968(CCT):
NAME = 'robertson-1968'
CHROMATICITY = 'uv-1960'
def __init__(self, cmfs: dict[(int, tuple[(float, float, float)])]=cmfs.CIE_1931_2DEG, white: VectorLike=cat.WHITES['2deg']['D65'], mired: VectorLike=MIRED_EXTENDED, sigfig: int=5, planck_step: int=1) -> None:
self.white = white
self.table = self.generate_table(cmfs, white, mired, sigfig, planck_step)
def generate_table(self, cmfs: dict[(int, tuple[(float, float, float)])], white: VectorLike, mired: VectorLike, sigfig: int, planck_step: int) -> list[tuple[(float, float, float, float)]]:
xyzw = util.xy_to_xyz(white)
table = []
to_uv = (util.xy_to_uv_1960 if (self.CHROMATICITY == 'uv-1960') else util.xy_to_uv)
for t in mired:
uv1 = to_uv(planck.temp_to_xy_planckian_locus((1000000.0 / (t - 0.01)), cmfs, xyzw, step=planck_step))
uv2 = to_uv(planck.temp_to_xy_planckian_locus((1000000.0 / (t + 0.01)), cmfs, xyzw, step=planck_step))
if (t == 0):
factor = 0.5
uv = [alg.lerp(uv1[0], uv2[0], factor), alg.lerp(uv1[1], uv2[1], factor)]
else:
uv = to_uv(planck.temp_to_xy_planckian_locus((1000000.0 / t), cmfs, xyzw, step=planck_step))
d1 = math.sqrt((((uv[1] - uv1[1]) ** 2) + ((uv[0] - uv1[0]) ** 2)))
d2 = math.sqrt((((uv2[1] - uv[1]) ** 2) + ((uv2[0] - uv[0]) ** 2)))
factor = (d1 / (d1 + d2))
m1 = (- (((uv[1] - uv1[1]) / (uv[0] - uv1[0])) ** (- 1)))
m2 = (- (((uv2[1] - uv[1]) / (uv2[0] - uv[0])) ** (- 1)))
m = alg.lerp(m1, m2, factor)
if sigfig:
template = '{{:.{}g}}'.format(sigfig)
table.append((float(t), float(template.format(uv[0])), float(template.format(uv[1])), float(template.format(m))))
else:
table.append((t, uv[0], uv[1], m))
return table
def to_cct(self, color: Color, **kwargs: Any) -> Vector:
(u, v) = color.split_chromaticity(self.CHROMATICITY)[:(- 1)]
end = (len(self.table) - 1)
slope_invert = False
previous_di = temp = duv = 0.0
for (index, current) in enumerate(self.table):
if (current[3] < 0):
di = ((v - current[2]) - (current[3] * (u - current[1])))
else:
slope_invert = True
di = ((current[2] - v) - (current[3] * (current[1] - u)))
if ((index > 0) and ((di <= 0.0) or (index == end))):
previous = self.table[(index - 1)]
current_denom = math.sqrt((1.0 + (current[3] ** 2)))
di /= current_denom
previous_denom = math.sqrt((1.0 + (previous[3] ** 2)))
dip = (previous_di / previous_denom)
factor = (dip / (dip - di))
mired = alg.lerp(previous[0], current[0], factor)
temp = ((1000000.0 / mired) if (mired > 0) else math.inf)
dup = (1 / previous_denom)
dvp = (previous[3] / previous_denom)
du = (1 / current_denom)
dv = (current[3] / current_denom)
du = alg.lerp(dup, du, factor)
dv = alg.lerp(dvp, dv, factor)
denom = math.sqrt(((du ** 2) + (dv ** 2)))
du /= denom
dv /= denom
duv = ((du * (u - alg.lerp(previous[1], current[1], factor))) + (dv * (v - alg.lerp(previous[2], current[2], factor))))
break
previous_di = di
return [temp, ((- duv) if (duv and (not slope_invert)) else duv)]
def from_cct(self, color: type[Color], space: str, kelvin: float, duv: float, scale: bool, scale_space: (str | None), **kwargs: Any) -> Color:
r = (1000000.0 / kelvin)
u = v = 0.0
end = (len(self.table) - 2)
for (index, current) in enumerate(self.table):
future = self.table[(index + 1)]
if ((r < future[0]) or (index == end)):
f = ((future[0] - r) / (future[0] - current[0]))
u = alg.lerp(future[1], current[1], f)
v = alg.lerp(future[2], current[2], f)
if duv:
slope_invert = (current[3] >= 0)
u1 = 1.0
v1 = current[3]
length = math.sqrt((1.0 + (v1 ** 2)))
u1 /= length
v1 /= length
u2 = 1.0
v2 = future[3]
length = math.sqrt((1.0 + (v2 ** 2)))
u2 /= length
v2 /= length
du = alg.lerp(u2, u1, f)
dv = alg.lerp(v2, v1, f)
denom = math.sqrt(((du ** 2) + (dv ** 2)))
du /= denom
dv /= denom
u += (du * ((- duv) if (not slope_invert) else duv))
v += (dv * ((- duv) if (not slope_invert) else duv))
break
return color.chromaticity(space, [u, v, 1], self.CHROMATICITY, scale=scale, scale_space=scale_space) |
class ImportManager():
online: bool = True
self_contained: bool = False
_static_path: Optional[str] = None
set_exports: bool = False
def __init__(self, page=None):
(self.page, ovr_version, self.__pkgs) = (page, {}, None)
self.force_position = {}
self.reload()
def packages_from_json(self, dependency_file: str, ext_packages: Dict[(str, dict)]):
global JS_IMPORTS
global CSS_IMPORTS
(temp_js, temp_css) = ({}, {})
for (k, v) in ext_packages.items():
self.addPackage(k, v)
with open(dependency_file) as fp:
package_json = json.load(fp)
for (dependency, version) in package_json['dependencies'].items():
if (dependency in JS_IMPORTS):
if (version and ('version' in JS_IMPORTS[dependency])):
if version.startswith('^'):
JS_IMPORTS[dependency]['version'] = version[1:]
for module in JS_IMPORTS[dependency]['modules']:
module['version'] = version[1:]
else:
JS_IMPORTS[dependency]['version'] = version
for module in JS_IMPORTS[dependency]['modules']:
module['version'] = version
temp_js[dependency] = JS_IMPORTS[dependency]
for req in JS_IMPORTS[dependency].get('req', []):
temp_js[req['alias']] = JS_IMPORTS[req['alias']]
if (dependency in CSS_IMPORTS):
if version:
if version.startswith('^'):
CSS_IMPORTS[dependency]['version'] = version[1:]
for module in CSS_IMPORTS[dependency]['modules']:
module['version'] = version[1:]
else:
CSS_IMPORTS[dependency]['version'] = version
for module in CSS_IMPORTS[dependency]['modules']:
module['version'] = version
temp_css[dependency] = CSS_IMPORTS[dependency]
for req in CSS_IMPORTS[dependency].get('req', []):
temp_css[req['alias']] = CSS_IMPORTS[req['alias']]
JS_IMPORTS = temp_js
CSS_IMPORTS = temp_css
self.reload()
def reload(self):
ovr_version = {}
if ((self.page is not None) and (self.page.ext_packages is not None)):
extend_imports(self.page.ext_packages)
if (self.page is not None):
self.page._with_google_imports = False
ovr_version.update(self.page._props.get('packages', {}))
(self.jsImports, self.cssImports, self.moduleConfigs, self.reqVersion) = ({}, {}, {}, {})
self.__add_imports([('js', self.jsImports, JS_IMPORTS), ('css', self.cssImports, CSS_IMPORTS)])
def __add_imports(self, modules: list, ovr_version: Optional[dict]=None):
for (folder, import_dict, import_type) in modules:
if ((folder is None) and (import_type is None)):
continue
if (folder is None):
for (alias, definition) in import_type.items():
main_css = collections.OrderedDict()
(main_js, main_js_types) = (collections.OrderedDict(), collections.OrderedDict())
for (i, mod) in enumerate(definition['modules']):
if ((ovr_version is not None) and (alias in ovr_version)):
mod['version'] = ovr_version[alias]
else:
mod['version'] = definition['version']
script_path = script_cdnjs_path(alias, mod)
if script_path.endswith('.js'):
main = main_js
main_js_types[script_path] = mod.get('type', 'text/javascript')
else:
main = main_css
if ('url' in definition):
main[('%s%s' % (definition['url'], mod['script']))] = mod['version']
else:
main[script_path] = script_version(alias, mod)
modules = collections.OrderedDict()
self.getModules(modules, alias, folder, import_type)
if ('config' in definition):
self.moduleConfigs[alias] = definition['config']
if main_css:
self.cssImports[alias] = {'main': main_css, 'dep': list(modules.keys()), 'versions': list(main_css.values())}
if main_js:
self.jsImports[alias] = {'main': main_js, 'dep': list(modules.keys()), 'versions': list(main_js.values()), 'type': main_js_types}
else:
for (alias, definition) in import_type.items():
(main, main_types) = (collections.OrderedDict(), collections.OrderedDict())
for (i, mod) in enumerate(definition['modules']):
if ((ovr_version is not None) and (alias in ovr_version)):
mod['version'] = ovr_version[alias]
script_path = script_cdnjs_path(alias, mod)
mod_type = ('stylesheet' if script_path.endswith('.css') else 'text/javascript')
if ('url' in definition):
main[('%s%s' % (definition['url'], mod['script']))] = mod['version']
main_types[('%s%s' % (definition['url'], mod['script']))] = mod_type
else:
main[script_path] = script_version(alias, mod)
main_types[script_path] = mod_type
modules = collections.OrderedDict()
self.getModules(modules, alias, folder, import_type)
if ('config' in definition):
self.moduleConfigs[alias] = definition['config']
(main_keys, versions) = ([], [])
for (k, v) in main.items():
main_keys.append(k)
versions.append(v)
import_dict[alias] = {'main': main, 'dep': list(modules.keys()), 'versions': versions, 'type': main_types}
def static_url(self) -> str:
return self._static_path
_url.setter
def static_url(self, path: str):
if (path is not None):
self.online = False
self._static_path = path
def add(self, alias: str):
if (alias in JS_IMPORTS):
self.page.jsImports.add(alias)
if (alias in CSS_IMPORTS):
self.page.cssImport.add(alias)
def extend(self, aliases: List[str]):
for alias in aliases:
self.add(alias)
def requirements(self) -> set:
module_alias = set(self.cleanImports(self.page.jsImports, JS_IMPORTS))
for css in self.cleanImports(self.page.cssImport, CSS_IMPORTS):
module_alias.add(css)
return module_alias
def getModules(self, modules: dict, alias: Union[(str, dict)], folder: Optional[str]=None, module_details: Optional[dict]=None):
if isinstance(alias, dict):
alias = alias['alias']
if ((module_details is None) or (alias not in module_details)):
module_details = dict(JS_IMPORTS)
import_ref = JS_IMPORTS
if ((self.page.ext_packages is not None) and (alias in self.page.ext_packages)):
import_ref = self.page.ext_packages
for mod in module_details[alias]['modules']:
if ('version' not in mod):
mod['version'] = (import_ref[alias]['version'] if (alias in import_ref) else CSS_IMPORTS[alias]['version'])
script = ''.join([(mod['path'] % mod), mod['script']])
if ('url' in module_details[alias]):
modules[('%s/%s' % (module_details[alias]['url'], script))] = True
else:
modules[('%s\\%s' % (STATIC_PATH.replace('\\', '/'), script))] = True
for req in module_details.get(alias, {}).get('req', []):
self.getModules(modules, req, folder, module_details)
return modules
def getReq(self, mod: str, modules: List[dict], import_hierarchy: Optional[dict]=None, use_require_js: bool=False):
import_hierarchy = (import_hierarchy or JS_IMPORTS)
if isinstance(mod, dict):
if ('version' in mod):
if ((self.page is not None) and self.page.verbose):
logging.warning(('Setting %(alias)s to version %(version)s' % mod))
if ((self.reqVersion.get(mod['alias']) is None) or (mod['version'] < self.reqVersion[mod['alias']])):
self.reqVersion[mod['alias']] = mod['version']
(new_main_for_alias, new_main_for_alias_css) = (collections.OrderedDict(), collections.OrderedDict())
for path in self.jsImports[mod['alias']]['main']:
for v in self.jsImports[mod['alias']]['versions']:
new_main_for_alias[path.replace(v, self.reqVersion[mod['alias']])] = self.reqVersion[mod['alias']]
if (mod['alias'] in self.cssImports):
for path in self.cssImports[mod['alias']]['main']:
for v in self.cssImports[mod['alias']]['versions']:
new_main_for_alias_css[path.replace(v, self.reqVersion[mod['alias']])] = self.reqVersion[mod['alias']]
self.cssImports[mod['alias']]['main'] = new_main_for_alias_css
self.jsImports[mod['alias']]['main'] = new_main_for_alias
for (i, path) in enumerate(self.jsImports[mod['alias']]['dep']):
for v in self.jsImports[mod['alias']]['versions']:
path = path.replace(v, self.reqVersion[mod['alias']])
self.jsImports[mod['alias']]['dep'][i] = path
mod = mod['alias']
modules.append(mod)
req_key = 'req'
if use_require_js:
if ('req_js' in import_hierarchy.get(mod, {})):
req_key = 'req_js'
for req in import_hierarchy.get(mod, {}).get(req_key, []):
self.getReq(req, modules, import_hierarchy, use_require_js=use_require_js)
def cleanImports(self, imports: List[str], import_hierarchy: Optional[dict]=None, use_require_js: bool=False):
(import_resolved, polyfills) = ([], [])
for mod in imports:
self.getReq(mod, import_resolved, (import_hierarchy or JS_IMPORTS), use_require_js=use_require_js)
for a in set(import_resolved):
if (a in PACKAGE_STATUS):
if (not PACKAGE_STATUS[a].get('allowed', True)):
raise ValueError(('Package %s not allowed' % a))
if ((self.page is not None) and ('info' in PACKAGE_STATUS[a])):
logging.info(('%s: %s' % (a, PACKAGE_STATUS[a]['info'])))
occurrences = [j for (j, x) in enumerate(import_resolved) if (x == a)]
if (len(occurrences) > 1):
for j in occurrences[::(- 1)][1:]:
import_resolved.pop(j)
if JS_IMPORTS.get(a, {}).get('polyfill'):
import_resolved.remove(a)
polyfills.append(a)
(local_pkgs, ext_pkgs) = ([], [])
for pkg in import_resolved[::(- 1)]:
if pkg.startswith('local_'):
local_pkgs.append(pkg)
else:
ext_pkgs.append(pkg)
return ((polyfills + ext_pkgs) + local_pkgs)
def cssResolve(self, css_aliases: List[str], local_css: Optional[dict]=None, excluded: List[str]=None):
css = []
self.__add_imports([(None, None, self.page.ext_packages)])
css_aliases = [c for c in self.cleanImports(css_aliases, JS_IMPORTS) if ((c in self.cssImports) or (c in _SERVICES))]
for css_alias in css_aliases:
if ((excluded is not None) and (css_alias in excluded)):
continue
if (not self.online):
self.pkgs.get(css_alias).set_local(static_url=self.static_url)
if (css_alias in _SERVICES):
for service in _SERVICES[css_alias].get('css', []):
css.append(('<link rel="stylesheet" href="%s">' % service))
continue
for urlModule in list(self.cssImports[css_alias]['main']):
if (self.page._node_modules is not None):
node_sub_path = CSS_IMPORTS.get(css_alias, {}).get('register', {}).get('npm_path')
if (node_sub_path is not None):
css_file = os.path.split(urlModule)[1]
npm_alias = CSS_IMPORTS[css_alias]['register'].get('npm', css_alias)
package_path = os.path.join(self.page._node_modules[0], 'node_modules', npm_alias, node_sub_path, css_file)
if os.path.exists(package_path):
urlModule = os.path.join(self.page._node_modules[1], npm_alias, node_sub_path, css_file).replace('\\', '/')
if os.path.isabs(urlModule):
with open(urlModule, 'rb') as fp:
base64_bytes = base64.b64encode(fp.read())
base64_message = base64_bytes.decode('ascii')
urlModule = ('data:text/css;base64,%s' % base64_message)
elif self.self_contained:
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3', 'Accept-Encoding': 'none', 'Accept-Language': 'en-US,en;q=0.8', 'Connection': 'keep-alive'}
request = Request(urlModule, None, headers)
with urlopen(request) as response:
base64_bytes = base64.b64encode(response.read())
base64_message = base64_bytes.decode('ascii')
urlModule = ('data:text/css;base64,%s' % base64_message)
except Exception as err:
print(urlModule)
print(traceback.format_exc())
css.append(('<link rel="stylesheet" href="%s" type="text/css">' % urlModule))
if (local_css is not None):
for css_file in local_css:
css.append(('<link rel="stylesheet" href="%s" type="text/css">' % css_file))
return '\n'.join(css)
def cssURLs(self, css_str: str):
return re.findall('<link rel="stylesheet" href="(.*?)" type="text/css">', css_str)
def jsResolve(self, js_aliases: List[str], local_js: Optional[dict]=None, excluded: Optional[List[str]]=None):
js = []
if self.set_exports:
js.append('<script>var exports = {};</script>')
js_aliases = self.cleanImports(js_aliases, JS_IMPORTS)
for js_alias in js_aliases:
if ((excluded is not None) and (js_alias in excluded)):
continue
if (not self.online):
self.pkgs.get(js_alias).set_local(static_url=self.static_url)
extra_configs = (('?%s' % self.moduleConfigs[js_alias]) if (js_alias in self.moduleConfigs) else '')
for url_module in list(self.jsImports.get(js_alias, {}).get('main', [])):
if (self.page._node_modules is not None):
node_sub_path = JS_IMPORTS.get(js_alias, {}).get('register', {}).get('npm_path')
if (node_sub_path is not None):
js_file = os.path.split(url_module)[1]
npm_alias = JS_IMPORTS[js_alias]['register'].get('npm', js_alias)
package_path = os.path.join(self.page._node_modules[0], 'node_modules', npm_alias, node_sub_path, js_file)
if os.path.exists(package_path):
url_module = os.path.join(self.page._node_modules[1], npm_alias, node_sub_path, js_file).replace('\\', '/')
mod_type = self.jsImports[js_alias]['type'].get(url_module, 'text/javascript')
if (os.path.isabs(url_module) and (not url_module.startswith('/static'))):
(file_name, file_extension) = os.path.splitext(url_module)
if (not file_extension.endswith('.js')):
continue
with open(url_module, 'rb') as fp:
if (mod_type == 'text/javascript'):
tmp_file = []
for line in fp.readlines():
for (m_expr, m_rep) in {b'^export ': b''}.items():
line = re.sub(m_expr, m_rep, line)
tmp_file.append(line)
js_content = b''.join(tmp_file)
else:
js_content = fp.read()
base64_bytes = base64.b64encode(js_content)
base64_message = base64_bytes.decode('ascii')
url_module = ('data:text/js;base64,%s' % base64_message)
elif self.self_contained:
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3', 'Accept-Encoding': 'none', 'Accept-Language': 'en-US,en;q=0.8', 'Connection': 'keep-alive'}
request = Request(url_module, None, headers)
with urlopen(request) as response:
base64_bytes = base64.b64encode(response.read())
base64_message = base64_bytes.decode('ascii')
url_module = ('data:text/js;base64,%s' % base64_message)
except Exception as err:
logging.error(url_module)
logging.error(traceback.format_exc())
if self.pkgs.get(js_alias).defer:
js.append(('<script language="javascript" type="%s" src="%s%s" defer></script>' % (mod_type, url_module, extra_configs)))
elif self.pkgs.get(js_alias).asynchrone:
js.append(('<script language="javascript" type="%s" src="%s%s" async></script>' % (mod_type, url_module, extra_configs)))
else:
js.append(('<script language="javascript" type="%s" src="%s%s"></script>' % (mod_type, url_module, extra_configs)))
if ((local_js is not None) and (len(local_js) > 0)):
for local_js_file in local_js:
js.append(('<script language="javascript" type="text/javascript" src="%s"></script>' % local_js_file))
return '\n'.join(js)
def jsURLs(self, expr: str):
return re.findall('<script language="javascript" type="text/javascript" src="(.*?)"></script>', expr)
def getFiles(self, css_alias: List[str], js_alias: List[str]):
files = {'css': [], 'js': []}
(mod_css, mod_js) = ({}, {})
for (alias, details) in CSS_IMPORTS.items():
mod_css[alias] = []
for module in details['modules']:
mod_css[alias].append({'version': module.get('version', ''), 'alias': alias, 'file': module, 'website': details.get('website', ''), 'status': details.get('status', '')})
for (alias, details) in JS_IMPORTS.items():
mod_js[alias] = []
for module in details['modules']:
mod_js[alias].append({'version': module.get('version', ''), 'alias': alias, 'file': module, 'website': details.get('website', ''), 'status': details.get('status', '')})
for css_file in self.cleanImports(css_alias, CSS_IMPORTS):
files['css'].extend(mod_css[css_file])
for js_file in self.cleanImports(js_alias, JS_IMPORTS):
files['js'].extend(mod_js[js_file])
return files
def cssGetAll(self):
return self.cssResolve(set(CSS_IMPORTS.keys()))
def jsGetAll(self):
return self.jsResolve(set(JS_IMPORTS.keys()))
def getFullPackage(self, alias: str, version: Optional[str]=None, static_path: Optional[str]=None, reload: bool=False):
import zipfile
import shutil
import io
import os
if (not hasattr(self.page, 'py')):
from epyk.core.py.PyRest import PyRest
webscrapper = PyRest().webscrapping
else:
webscrapper = self.page.py.requests.webscrapping
if ('package' in JS_IMPORTS[alias]):
if ('version' not in JS_IMPORTS[alias]['modules'][0]):
JS_IMPORTS[alias]['modules'][0]['version'] = JS_IMPORTS[alias]['version']
version_dict = {'version': (JS_IMPORTS[alias]['modules'][0]['version'] if (version is None) else version)}
package_path = (JS_IMPORTS[alias]['package']['zip'] % version_dict)
if (static_path is None):
static_path = os.path.join(os.path.dirname(__file__), '..', '..', 'static', JS_IMPORTS[alias]['package']['folder'])
else:
static_path = os.path.join(static_path, 'static')
if (not os.path.exists(static_path)):
os.makedirs(static_path)
dst_path = os.path.join(static_path, JS_IMPORTS[alias]['package'].get('folder', ''), (JS_IMPORTS[alias]['package'].get('path', '%(version)s') % version_dict))
v_reload_path = True
if os.path.exists(dst_path):
if (not reload):
v_reload_path = False
else:
shutil.rmtree(dst_path)
if v_reload_path:
logging.warning((' > Downloading package %s' % package_path))
r = webscrapper(package_path)
z = zipfile.ZipFile(io.BytesIO(r))
z.extractall(static_path)
if (JS_IMPORTS[alias]['package']['root'] is not None):
root = (JS_IMPORTS[alias]['package']['root'] % version_dict)
shutil.copytree(os.path.join(static_path, root), dst_path)
shutil.rmtree(os.path.join(static_path, root))
logging.warning((' < Package %s. Done ! ' % alias))
else:
logging.warning((' < Package %s already loaded ' % alias))
return self
def setVersion(self, alias: str, version: str, js: Optional[dict]=None, css: Optional[dict]=None, verbose: bool=None) -> bool:
global CSS_IMPORTS, JS_IMPORTS
self.reqVersion[alias] = version
current_version = JS_IMPORTS.get(alias, CSS_IMPORTS.get(alias, {})).get('version')
if (version == current_version):
return False
if verbose:
print(('Moving %s from %s to %s' % (alias, current_version, version)))
if (alias in CSS_IMPORTS):
CSS_IMPORTS[alias]['version'] = version
for module in CSS_IMPORTS[alias].get('modules', []):
module['version'] = version
if (alias in JS_IMPORTS):
JS_IMPORTS[alias]['version'] = version
for module in JS_IMPORTS[alias].get('modules', []):
module['version'] = version
if (js is not None):
if (not js):
if (alias in JS_IMPORTS):
del self.jsImports[alias]
del JS_IMPORTS[alias]
else:
self.jsImports[alias] = {'main': collections.OrderedDict(), 'dep': [], 'versions': version}
for (k, v) in js.items():
JS_IMPORTS[alias][k] = v
for module in js['modules']:
module['path'] = (module['path'] % {'version': version})
self.jsImports[alias]['main'][('%(cdnjs)s/%(path)s%(script)s' % module)] = version
if (css is not None):
if (not css):
if (alias in CSS_IMPORTS):
del self.cssImports[alias]
del CSS_IMPORTS[alias]
else:
for (k, v) in css.items():
CSS_IMPORTS.setdefault(alias, {})[k] = v
for module in css['modules']:
module['path'] = (module['path'] % {'version': version})
self.cssImports.setdefault(alias, {}).setdefault('main', {})[('%(cdnjs)s/%(path)s%(script)s' % module)] = version
return True
def addPackage(self, alias: str, config: dict):
global CSS_IMPORTS
global JS_IMPORTS
mod_entry = {'css': {}, 'js': {}}
for mod in config['modules']:
if mod['script'].endswith('.css'):
if ('cdnjs' not in mod):
mod['cdnjs'] = CDNJS_REPO
mod_entry['css'].setdefault('modules', []).append(mod)
if ('req' in config):
for req in config['req']:
if (req['alias'] in CSS_IMPORTS):
mod_entry['css'].setdefault('req', []).append(req)
elif (mod['script'].endswith('.js') or mod['script'].startswith('js') or mod['script'].startswith('api')):
if ('cdnjs' not in mod):
mod['cdnjs'] = CDNJS_REPO
mod_entry['js'].setdefault('modules', []).append(mod)
if ('register' in config):
mod_entry['js']['register'] = config['register']
if ('req' in config):
for req in config['req']:
if (req['alias'] in JS_IMPORTS):
mod_entry['js'].setdefault('req', []).append(req)
if (len(mod_entry['css']) > 0):
CSS_IMPORTS.setdefault(alias, {}).update(mod_entry['css'])
self.cssImports[alias] = {'main': collections.OrderedDict(), 'versions': [], 'dep': []}
for pkg in mod_entry.get('js', {}).get('modules', []):
self.cssImports[alias]['main'][script_cdnjs_path(alias, pkg)] = pkg.get('version', config['version'])
if (len(mod_entry['js']) > 0):
JS_IMPORTS.setdefault(alias, {}).update(mod_entry['js'])
JS_IMPORTS[alias]['version'] = config.get('version', '')
self.jsImports[alias] = {'main': collections.OrderedDict(), 'versions': [], 'dep': [], 'type': {}}
for pkg in mod_entry['js']['modules']:
self.jsImports[alias]['main'][script_cdnjs_path(alias, pkg)] = pkg.get('version', config.get('version', ''))
if ('type' in pkg):
self.jsImports[alias]['type'][script_cdnjs_path(alias, pkg)] = pkg['type']
return self
def to_requireJs(self, data: dict, excluded_packages: Optional[list]=None):
(deps_level, alias_to_name, alias_to_var, name_to_alias, results) = ({}, {}, {}, {}, {'jsFrgs': data['jsFrgs'], 'paths': {}})
m_versions = {}
for m in self.page.jsImports:
import_ref = JS_IMPORTS
if ((self.page.ext_packages is not None) and (m in self.page.ext_packages)):
import_ref = self.page.ext_packages
req_alias = ('req_js' if ('req_js' in import_ref[m]) else 'req')
for req in import_ref[m].get(req_alias, []):
if ('version' in req):
m_versions[req['alias']] = req['version']
for m in self.cleanImports(self.page.jsImports, JS_IMPORTS, use_require_js=True):
if (m.startswith('local_') or ((excluded_packages is not None) and (m in excluded_packages))):
continue
if (not self.online):
self.pkgs.get(m).set_local(static_url=self.static_url)
import_ref = JS_IMPORTS
if ((self.page.ext_packages is not None) and (m in self.page.ext_packages)):
import_ref = self.page.ext_packages
if ('register' in import_ref[m]):
alias = import_ref[m]['register'].get('alias', m)
first_module = import_ref[m]['modules'][0]
if ('version' not in first_module):
first_module['version'] = import_ref[m]['version']
if (m in m_versions):
first_module['version'] = m_versions[m]
if (not self.online):
self.pkgs.get(m).set_local(static_url=self.static_url)
results['paths'][("'%s'" % alias)] = list(self.jsImports[m]['main'].keys())[0][:(- 3)]
else:
results['paths'][("'%s'" % alias)] = ('%s/%s%s' % (first_module['cdnjs'], (first_module['path'] % first_module), import_ref[m]['register'].get('module', first_module['script'][:(- 3)])))
alias_to_name[m] = alias
alias_to_var[m] = import_ref[m]['register'].get('variable', alias)
name_to_alias[alias] = m
req_alias = ('req_js' if ('req_js' in import_ref[m]) else 'req')
if (req_alias in import_ref[m]):
req_levels = [deps_level.get(req_def['alias'], (- 1)) for req_def in import_ref[m][req_alias]]
deps_level[m] = (max(req_levels) + 1)
else:
deps_level[m] = 0
(level, group) = (None, [])
for (k, v) in sorted(deps_level.items(), key=(lambda item: item[1]))[::(- 1)]:
if (level is None):
level = v
if (level != v):
for (g, var) in group:
if ('init_fnc' in JS_IMPORTS[name_to_alias[g]]['register']):
results['jsFrgs'] = ('%s; %s' % (JS_IMPORTS[name_to_alias[g]]['register']['init_fnc'], results['jsFrgs']))
results['jsFrgs'] = ("require(['%s'], function (%s) { %s })" % ("', '".join([g for (g, _) in group]), ', '.join([g for (_, g) in group]), results['jsFrgs']))
(level, group) = (v, [(alias_to_name[k], alias_to_var[k])])
else:
group.append((alias_to_name[k], alias_to_var[k]))
if group:
for (g, var) in group:
import_ref = JS_IMPORTS
if ((self.page.ext_packages is not None) and (name_to_alias[g] in self.page.ext_packages)):
import_ref = self.page.ext_packages
if ('init_fnc' in import_ref[name_to_alias[g]]['register']):
results['jsFrgs'] = ('%s; %s' % (import_ref[name_to_alias[g]]['register']['init_fnc'], results['jsFrgs']))
results['jsFrgs'] = ("require(['%s'], function (%s) { %s })" % ("', '".join([g for (g, _) in group]), ', '.join([g for (_, g) in group]), results['jsFrgs']))
return results
def show(self, all: bool=False):
packages = {}
if (not all):
for (imp, repo) in [(self.page.cssImport, CSS_IMPORTS), (self.page.jsImports, JS_IMPORTS)]:
pkg = self.cleanImports(imp, repo)
for c in pkg:
for s in repo[c].get('modules', []):
if ('version' not in s):
s['version'] = (JS_IMPORTS[c]['version'] if (c in JS_IMPORTS) else CSS_IMPORTS[c]['version'])
s['path'] = (s['path'] % s)
packages.setdefault(c, []).append({'script': ('%(cdnjs)s/%(path)s/%(script)s' % s), 'version': s['version']})
else:
for mod in [CSS_IMPORTS, JS_IMPORTS]:
for (c, pkg) in mod.items():
for s in pkg.get('modules', []):
if ('version' not in s):
s['version'] = (JS_IMPORTS[c]['version'] if (c in JS_IMPORTS) else CSS_IMPORTS[c]['version'])
s['path'] = (s['path'] % s)
packages.setdefault(c, []).append({'script': ('%(cdnjs)s/%(path)s/%(script)s' % s), 'version': s['version']})
return packages
def google_products(self, products: List[str], api_key: Optional[str]=None, site_key: str='6LeIxAcTAAAAAJcZVRqyHh71UMIEGNQ_MXjiZKhI'):
global JS_IMPORTS
for p in products:
for m in GOOGLE_EXTENSIONS[p].get('modules', []):
m['script'] = (m['script'] % {'api_key': api_key, 'site_key': site_key})
self.addPackage(('google-%s' % p), GOOGLE_EXTENSIONS[p])
JS_IMPORTS[('google-%s' % p)] = GOOGLE_EXTENSIONS[p]
if ('launcher' in GOOGLE_EXTENSIONS[p]):
self.page.properties.js.add_builders(GOOGLE_EXTENSIONS[p]['launcher'])
self.page._with_google_imports = True
def locals(self, aliases: List[str], end_points: Optional[str]=None):
global JS_IMPORTS
global CSS_IMPORTS
logging.warning(('Routing packages %s locally this should not be put on a server !' % aliases))
for alias in aliases:
if (alias in JS_IMPORTS):
for m in JS_IMPORTS[alias]['modules']:
m.update({'path': '', 'cdnjs': (end_points or self.static_url)})
for alias in aliases:
if (alias in CSS_IMPORTS):
for m in CSS_IMPORTS[alias]['modules']:
m.update({'path': '', 'cdnjs': (end_points or self.static_url)})
def pkgs(self) -> ImportPackages:
if (self.__pkgs is None):
self.__pkgs = ImportPackages(self.jsImports, self.cssImports, page=self.page)
return self.__pkgs
def website(self, alias: str):
if (alias not in JS_IMPORTS):
return ''
return JS_IMPORTS[alias].get('website', '')
def append_to(self, alias: str, js_modules: List[dict]=None, css_modules: List[dict]=None, version: str=None):
if (js_modules is not None):
version = (version or JS_IMPORTS[alias]['version'])
cdnjs = None
if JS_IMPORTS[alias]['modules']:
cdnjs = JS_IMPORTS[alias]['modules'][0].get('cdnjs')
for js_module in js_modules:
if (('cdnjs' not in js_module) and (cdnjs is not None)):
js_module['cdnjs'] = cdnjs
self.jsImports[alias]['main'][script_cdnjs_path(alias, js_module)] = version
self.jsImports[alias]['type'][script_cdnjs_path(alias, js_module)] = 'text/javascript'
if (css_modules is not None):
version = (version or CSS_IMPORTS[alias].get('version') or JS_IMPORTS[alias]['version'])
cdnjs = None
if CSS_IMPORTS[alias]['modules']:
cdnjs = CSS_IMPORTS[alias]['modules'][0].get('cdnjs')
for css_module in css_modules:
if (('cdnjs' not in css_module) and (cdnjs is not None)):
css_module['cdnjs'] = cdnjs
self.jsImports[alias]['main'][script_cdnjs_path(alias, css_module)] = version
self.jsImports[alias]['type'][script_cdnjs_path(alias, css_module)] = 'stylesheet' |
class build_ext(_build_ext, object):
cythonize_dir = 'build'
fplll = None
other = None
def_varnames = ['HAVE_QD', 'HAVE_LONG_DOUBLE', 'HAVE_NUMPY']
config_pxi_path = os.path.join('.', 'src', 'fpylll', 'config.pxi')
def finalize_options(self):
super(build_ext, self).finalize_options()
def_vars = self._generate_config_pxi()
include_dirs = [os.path.join(sys.prefix, 'include')]
library_dirs = [os.path.join(sys.exec_prefix, 'lib')]
cxxflags = list(filter(None, os.environ.get('CXXFLAGS', '').split()))
if (self.fplll is None):
self.fplll = {'include_dirs': include_dirs, 'library_dirs': library_dirs, 'language': 'c++', 'libraries': ['gmp', 'mpfr', 'fplll'], 'extra_compile_args': (['-std=c++11'] + cxxflags), 'extra_link_args': ['-std=c++11']}
if def_vars['HAVE_QD']:
self.fplll['libraries'].append('qd')
if (self.other is None):
self.other = {'include_dirs': include_dirs, 'library_dirs': library_dirs, 'libraries': ['gmp']}
if ('READTHEDOCS' in os.environ):
self.fplll['extra_compile_args'].append('-D_GLIBCXX_USE_CXX11_ABI=0')
if def_vars['HAVE_NUMPY']:
import numpy
numpy_args = copy(self.fplll)
numpy_args['include_dirs'].append(numpy.get_include())
self.extensions.append(Extension('fpylll.numpy', ['src/fpylll/numpy.pyx'], **numpy_args))
for ext in self.extensions:
if ext.fplll:
for (key, value) in self.fplll.items():
setattr(ext, key, value)
elif ext.other:
for (key, value) in self.other.items():
setattr(ext, key, value)
def run(self):
import Cython.Build
self.extensions = Cython.Build.cythonize(self.extensions, include_path=['src'], build_dir=self.cythonize_dir, compiler_directives={'binding': True, 'embedsignature': True, 'language_level': 2})
super(build_ext, self).run()
def _generate_config_pxi(self):
def_vars = {}
config_pxi = []
for defvar in self.def_varnames:
value = os.environ.get(defvar)
if (value is not None):
value = (value.lower() in ['1', 'true', 'yes'])
else:
value = getattr(self, ('_get_' + defvar.lower()))()
config_pxi.append('DEF {0}={1}'.format(defvar, value))
def_vars[defvar] = value
config_pxi = ('\n'.join(config_pxi) + '\n')
try:
cur_config_pxi = open(self.config_pxi_path, 'r').read()
except IOError:
cur_config_pxi = ''
if (cur_config_pxi != config_pxi):
with open(self.config_pxi_path, 'w') as fw:
fw.write(config_pxi)
return def_vars
def _get_have_qd(self):
if ('CONDA_PREFIX' in os.environ):
os.environ['PKG_CONFIG_PATH'] = ':'.join([os.path.join(os.environ['CONDA_PREFIX'], 'lib', 'pkgconfig'), os.environ.get('PKG_CONFIG_PATH', '')])
if ('VIRTUAL_ENV' in os.environ):
os.environ['PKG_CONFIG_PATH'] = ':'.join([os.path.join(os.environ['VIRTUAL_ENV'], 'lib', 'pkgconfig'), os.environ.get('PKG_CONFIG_PATH', '')])
try:
libs = subprocess.check_output(['pkg-config', 'fplll', '--libs'])
if (b'-lqd' in libs):
return True
except (subprocess.CalledProcessError, FileNotFoundError):
pass
return False
def _get_have_numpy(self):
try:
import numpy
return True
except ImportError:
pass
return False
def _get_have_long_double(self):
return (not sys.platform.startswith('cygwin')) |
class RedirectingResourceWithHeaders():
def on_get(self, req, resp):
raise falcon.HTTPMovedPermanently('/moved/perm', headers={'foo': 'bar'})
def on_post(self, req, resp):
raise falcon.HTTPFound('/found', headers={'foo': 'bar'})
def on_put(self, req, resp):
raise falcon.HTTPSeeOther('/see/other', headers={'foo': 'bar'})
def on_delete(self, req, resp):
raise falcon.HTTPTemporaryRedirect('/tmp/redirect', headers={'foo': 'bar'})
def on_head(self, req, resp):
raise falcon.HTTPPermanentRedirect('/perm/redirect', headers={'foo': 'bar'}) |
def random_internal_ip() -> ipaddress.IPv4Address:
network = random.choice([ipaddress.IPv4Network('10.0.0.0/8'), ipaddress.IPv4Network('172.16.0.0/12'), ipaddress.IPv4Network('192.168.0.0/16')])
return ipaddress.IPv4Address(random.randrange((int(network.network_address) + 1), (int(network.broadcast_address) - 1))) |
def validate_email(email, required=False):
if ((email is None) and (not required)):
return None
if ((not isinstance(email, str)) or (not email)):
raise ValueError('Invalid email: "{0}". Email must be a non-empty string.'.format(email))
parts = email.split('')
if ((len(parts) != 2) or (not parts[0]) or (not parts[1])):
raise ValueError('Malformed email address string: "{0}".'.format(email))
return email |
class PornembyDragonRainMonitor():
class PornembyDragonRainClickMonitor(Monitor):
name = 'Pornemby '
chat_user = ['PronembyTGBot2_bot', 'PronembyTGBot3_bot', 'PornembyBot']
chat_name = 'Pornemby'
chat_keyword = [None]
additional_auth = ['pornemby_pack']
allow_edit = True
debug_no_log = True
async def on_trigger(self, message: Message, key, reply):
if pornemby_alert.get(self.client.me.id, False):
self.log.info(f'.')
return
if message.reply_markup:
if isinstance(message.reply_markup, InlineKeyboardMarkup):
buttons = flatten(message.reply_markup.inline_keyboard)
for b in buttons:
if ('' in b.text):
if (random.random() > self.config.get('possibility', 1.0)):
self.log.info(f'.')
return
try:
(await message.click(b.text))
except TimeoutError:
self.log.info(' Pornemby , , .')
except RPCError:
self.log.info(' Pornemby , .')
else:
self.log.info(' Pornemby , , .')
return
class PornembyDragonRainStatusMonitor(Monitor):
name = 'Pornemby '
chat_user = ['PronembyTGBot2_bot', 'PronembyTGBot3_bot', 'PornembyBot']
chat_name = 'Pornemby'
chat_keyword = '\\s+(.*):(\\d+)'
allow_edit = True
async def on_trigger(self, message: Message, key, reply):
for me in message.entities:
if (me.type == MessageEntityType.TEXT_MENTION):
if (me.user.id == self.client.me.id):
self.log.info(f': {key[1]} .') |
class OptionSeriesHeatmapSonificationTracksMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def make_linear_interpolator(field, grid=None, fill_value=None):
if (grid is None):
grid = field.grid
if grid.is_unstructured:
return make_linear_interpolator_unstructured(field, grid, fill_value)
else:
return make_linear_interpolator_separated(field, grid, fill_value) |
_filter('repo_url')
def repo_url(url):
parsed = urlparse(url)
if (parsed.scheme == 'copr'):
owner = parsed.netloc
prj = parsed.path.split('/')[1]
if (owner[0] == ''):
url = url_for('coprs_ns.copr_detail', group_name=owner[1:], coprname=prj)
else:
url = url_for('coprs_ns.copr_detail', username=owner, coprname=prj)
return helpers.fix_protocol_for_frontend(url) |
class TCPMappingTLSOriginationContextTest(AmbassadorTest):
extra_ports = [6789]
target: ServiceType
def init(self) -> None:
self.target = HTTP()
def manifests(self) -> str:
return (f'''
---
apiVersion: v1
kind: Secret
metadata:
name: {self.path.k8s}-clientcert
type: kubernetes.io/tls
data:
tls.crt: {TLSCerts['presto.example.com'].k8s_crt}
tls.key: {TLSCerts['presto.example.com'].k8s_key}
---
apiVersion: getambassador.io/v2
kind: TLSContext
metadata:
name: {self.path.k8s}-tlsclient
spec:
ambassador_id: [ {self.ambassador_id} ]
secret: {self.path.k8s}-clientcert
sni: my-funny-name
---
apiVersion: getambassador.io/v2
kind: TCPMapping
metadata:
name: {self.path.k8s}
spec:
ambassador_id: [ {self.ambassador_id} ]
port: 6789
service: {self.target.path.fqdn}:443
tls: {self.path.k8s}-tlsclient
''' + super().manifests())
def queries(self):
(yield Query(self.url('', port=6789)))
def check(self):
assert (self.results[0].json['backend'] == self.target.path.k8s)
assert (self.results[0].json['request']['tls']['enabled'] == True)
assert (self.results[0].json['request']['tls']['server-name'] == 'my-funny-name') |
class SpeakersCall(SoftDeletionModel):
__tablename__ = 'speakers_calls'
id = db.Column(db.Integer, primary_key=True)
announcement = db.Column(db.Text, nullable=True)
starts_at = db.Column(db.DateTime(timezone=True), nullable=False)
soft_ends_at = db.Column(db.DateTime(timezone=True), nullable=True)
ends_at = db.Column(db.DateTime(timezone=True), nullable=False)
hash = db.Column(db.String, nullable=True)
privacy = db.Column(db.String, nullable=False, default='public')
event_id = db.Column(db.Integer, db.ForeignKey('events.id', ondelete='CASCADE'))
event = db.relationship('Event', backref=backref('speakers_call', uselist=False))
def __repr__(self):
return ('<speakers_call %r>' % self.announcement) |
class OperatorSpecSchema(BaseSpecSchema):
operator_class = ma.fields.String(required=True)
operator_class_module = ma.fields.String(required=True)
property_preprocessors = ma.fields.List(ma.fields.Nested(PropertyPreprocessorSchema))
_schema
def valid_preprocessor_property_names(self, data):
preprocessors = data.get('property_preprocessors', [])
if (not preprocessors):
return
properties = frozenset(data.get('parameters_jsonschema', {}).get('properties', []))
for preprocessor in preprocessors:
missing = [property_name for property_name in preprocessor['apply_to_properties'] if (property_name not in properties)]
if missing:
raise ma.ValidationError('Properties specified by preprocessor `{}` are not present in the schema: `{}`'.format(preprocessor['type'], '`, `'.join(missing)))
assigned_preprocessors = {property_name: [preprocessor['type'] for preprocessor in preprocessors if (property_name in preprocessor['apply_to_properties'])] for property_name in properties}
dupes = [key for (key, value) in six.iteritems(assigned_preprocessors) if (len(value) > 1)]
if dupes:
raise ma.ValidationError('One or more properties were assigned multiple preprocessors. This is not permitted. Found: {}'.format({key: value for (key, value) in six.iteritems(assigned_preprocessors) if (len(value) > 1)}), ['property_preprocessors']) |
class VkontakteCallbackRouter(VkontaktePayloadRouter):
def extract_keys(self, context):
if isinstance(context.update, Message):
return ()
if (context.backend.get_identity() != 'vk'):
return ()
if (context.update['type'] != 'message_event'):
return ()
payload = context.update['object']['payload']
context.payload = payload
async def _send_message_event_answer(event_data, **kwargs):
return (await context.request('messages.sendMessageEventAnswer', {'event_id': context.update['object']['event_id'], 'user_id': context.update['object']['user_id'], 'peer_id': context.update['object']['peer_id'], 'event_data': json.dumps(event_data), **kwargs}))
context.send_message_event_answer = _send_message_event_answer
if isinstance(payload, dict):
return (self._to_hashable(pick(payload, possible_key_shape)) for possible_key_shape in self._possible_key_shapes)
else:
return (self._to_hashable(payload),) |
def fillbox():
global remaininglinks
if bool(remaininglinks):
text = list_to_text(remaininglinks)
remaininglinks = []
return [text, 'Links updated!\nClick Download All! to download the rest of the links', gr.Button.update(visible=False)]
return ['', '', gr.Button.update(visible=False)] |
class OptionSeriesOrganizationSonificationDefaultspeechoptionsMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('undefined')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('undefined')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('undefined')
def min(self, text: str):
self._config(text, js_type=False)
def within(self):
return self._config_get('undefined')
def within(self, text: str):
self._config(text, js_type=False) |
def lambda_handler(event, context):
print(event)
cognito_id = event['requestContext']['authorizer']['claims']['sub']
body = json.loads(event['body'])
date = str(datetime.datetime.now().isoformat())
if ('sentence_id' == ''):
body['sentence_id'] = generate_sentence_id()
try:
response = update_sentence(cognito_id, body, date)
except Exception as e:
print(f'Error: Failed to update sentence - {body}')
print(e)
return api_response.response(502, f'Failed to save sentence.')
return api_response.response(200, f'Successfully saved sentence.') |
class OptionPlotoptionsBoxplotSonificationDefaultspeechoptionsPointgrouping(Options):
def algorithm(self):
return self._config_get('last')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def get_nonzero_filter():
nonzero_outlay = Q((((((Q(gross_outlay_amount_FYB_to_period_end__gt=0) | Q(gross_outlay_amount_FYB_to_period_end__lt=0)) | Q(USSGL487200_downward_adj_prior_year_prepaid_undeliv_order_oblig__gt=0)) | Q(USSGL487200_downward_adj_prior_year_prepaid_undeliv_order_oblig__lt=0)) | Q(USSGL497200_downward_adj_of_prior_year_paid_deliv_orders_oblig__gt=0)) | Q(USSGL497200_downward_adj_of_prior_year_paid_deliv_orders_oblig__lt=0)))
nonzero_toa = Q((Q(transaction_obligated_amount__gt=0) | Q(transaction_obligated_amount__lt=0)))
return (nonzero_outlay | nonzero_toa) |
def construct_result(file_object):
type_of_file = file_object.processed_analysis['file_type']['result']['full']
arch_dict = file_object.processed_analysis.get('cpu_architecture', {})
architecture = _search_for_arch_keys(type_of_file, _architectures, delimiter='')
if (not architecture):
return arch_dict
bitness = _search_for_arch_keys(type_of_file, _bitness)
endianness = _search_for_arch_keys(type_of_file, _endianness)
full_isa_result = f'{architecture}{bitness}{endianness} (M)'
arch_dict.update({full_isa_result: 'Detection based on meta data'})
return arch_dict |
class CallbackPacket(cstruct.Instance):
counter: int
size: int
callback: BeaconCallback
data: bytes
def __init__(self, *args, **kwargs):
instance = c2struct.CallbackPacket(*args, **kwargs)
super().__init__(instance._type, instance._values, instance._sizes)
def __eq__(self, other):
return (self._values == other._values)
def __hash__(self):
return hash(tuple(self._values.items())) |
def extractMeixiangsiHomeBlog(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class PreviewTagPair(sublime_plugin.EventListener):
def on_query_context(self, view: sublime.View, key: str, *args):
if (key == 'emmet_tag_preview'):
return tag_pair.has_preview(view)
return None
_pair.allow_preview
def on_selection_modified_async(self, view: sublime.View):
tag_pair.handle_selection_change(view) |
def change_ld(binary, ld, output):
if ((not binary) or (not ld) or (not output)):
log.failure("Try 'python change_ld.py -h' for more information.")
return None
binary = ELF(binary)
for segment in binary.segments:
if (segment.header['p_type'] == 'PT_INTERP'):
size = segment.header['p_memsz']
addr = segment.header['p_paddr']
data = segment.data()
if (size <= len(ld)):
log.failure('Failed to change PT_INTERP')
return None
binary.write(addr, '/lib64/ld-glibc-{}'.format(ld).ljust(size, '\x00'))
if os.access(output, os.F_OK):
os.remove(output)
binary.save(output)
os.chmod(output, 448)
success('PT_INTERP has changed. Saved temp file {}'.format(output)) |
class NoteUpperWidget(urwid.WidgetWrap):
def __init__(self, note, onclick=None, first_corner=False, last_corner=False, **kw):
self.onclick = onclick
self.first_corner = first_corner
self.last_corner = last_corner
self.note = note
self.text = urwid.Text(self._build_text(), wrap='clip')
super(NoteUpperWidget, self).__init__(self.text)
def update(self, highlight=False):
self.text.set_text(self._build_text(highlight=highlight))
def _build_text(self, highlight=False):
return render_upper_part_key(self.note, self.first_corner, self.last_corner, highlight=highlight) |
def cli(args=None):
args = (args or sys.argv[2:])
if ((len(args) > 0) and (args[0] == 'balance')):
args = args[1:]
balance_main(args)
return
elif ('--layout' in args):
layout_main(args)
else:
args = argparser(args)
lmtp_serve_main(args) |
class MESH_OT_maze_mesh(bpy.types.Operator):
bl_idname = 'mesh.maze_mesh'
bl_label = 'Maze mesh selection'
bl_description = 'Generate a maze on selected part of mesh'
bl_options = {'REGISTER', 'UNDO'}
def poll(cls, context):
obj = context.edit_object
return ((obj is not None) and (obj.type == 'MESH'))
link_centers = []
vert_centers = []
offset_modes = (('OFFSET', 'Offset', 'Width is offset of new edges from original', 1), ('WIDTH', 'Width', 'Width is width of new face', 2), ('DEPTH', 'Depth', 'Width is distance from original edge to bevel face', 3), ('PERCENT', 'Percent', 'Width is percent of adjacent edge length', 4))
wall_types = (('0', 'Thick', 'Boundary wall extends to edge of selection', 0), ('1', 'Thin', 'Boundary wall is similar thickness to internal walls', 1), ('2', 'None', 'Boundary wall is not extruded', 2))
offset_type: bpy.props.EnumProperty(name='Bevel Amount Type', description='What distance Width measures', items=offset_modes)
offset: bpy.props.FloatProperty(name='Width', description='path width', default=0.1, soft_min=0, soft_max=1.0, precision=3)
use_clamp_overlap: bpy.props.BoolProperty(name='Clamp Overlap', description='Do not allow bevel edges to overlap', default=False)
use_loop_slide: bpy.props.BoolProperty(name='Loop Slide', description='Prefer slide along edge to even widths', default=True)
use_even_offset: bpy.props.BoolProperty(name='Offset Even', description='Scale the offset to give more even thickness', default=False)
use_relative_offset: bpy.props.BoolProperty(name='Offset Relative', description='Scale the offset by surrounding geometry', default=False)
thickness: bpy.props.FloatProperty(name='Thickness', description='wall width at top', default=0.0, soft_min=(- 1.0), soft_max=1.0, precision=4)
depth: bpy.props.FloatProperty(name='Extrude', description='wall height', default=0.1, soft_min=(- 1.0), soft_max=1.0, precision=4)
use_outset: bpy.props.BoolProperty(name='Outset', description='Outset rather than inset', default=False)
def update_maze(self, context):
self.update = True
rseed: bpy.props.IntProperty(name='Random seed', description='redo maze pattern', default=0, min=0, max=100, update=update_maze)
braid: bpy.props.FloatProperty(name='Braiding', description='fraction of dead ends to make into loops', default=0, min=0.0, max=1.0, precision=2, update=update_maze)
boundary_type: bpy.props.EnumProperty(name='Boundary Wall Type', description='type of wall on boundary of maze', items=wall_types, default='1')
options: bpy.props.BoolProperty(name='Advanced Options', description='More options', default=False)
update: bpy.props.BoolProperty(name='update maze', description='update maze', default=True)
def draw(self, context):
layout = self.layout
box_maze = layout.box()
box_maze.label(text='Maze Parameters')
box_maze.prop(self, 'rseed')
box_maze.prop(self, 'braid')
box_maze.prop(self, 'boundary_type')
box_maze.prop(self, 'options')
box_path = layout.box()
box_path.label(text='Path Paramters')
box_path.prop(self, 'offset_type', text='')
box_path.prop(self, 'offset')
if self.options:
box_path.prop(self, 'use_clamp_overlap')
box_path.prop(self, 'use_loop_slide')
box_wall = layout.box()
box_wall.label(text='Wall Paramters')
box_wall.prop(self, 'use_relative_offset')
box_wall.prop(self, 'depth')
if self.options:
box_wall.prop(self, 'use_even_offset')
box_wall.prop(self, 'thickness')
box_wall.prop(self, 'use_outset')
def get_maze_params(self):
maze_params = {}
maze_params['maze_update'] = self.update
maze_params['rseed'] = self.rseed
maze_params['link_centers'] = self.link_centers
maze_params['vert_centers'] = self.vert_centers
maze_params['offset'] = self.offset
maze_params['offset_type'] = self.offset_type
maze_params['use_loop_slide'] = self.use_loop_slide
maze_params['use_clamp_overlap'] = self.use_clamp_overlap
maze_params['boundary_type'] = int(self.boundary_type)
maze_params['depth'] = self.depth
maze_params['thickness'] = self.thickness
maze_params['use_even_offset'] = self.use_even_offset
maze_params['use_outset'] = self.use_outset
maze_params['use_relative_offset'] = self.use_relative_offset
maze_params['braid'] = self.braid
return maze_params
def execute(self, context):
obj = context.object
if (obj.data.count_selected_items()[0] == 0):
self.report({'WARNING'}, 'No suitable selection found. Operation cancelled')
return {'CANCELLED'}
bm = bmesh.from_edit_mesh(obj.data)
if (len(self.vert_centers) == 0):
self.update = True
maze_params = self.get_maze_params()
bpy.ops.mesh.select_mode(type='EDGE')
(bm, self.link_centers, self.vert_centers) = mesh_maze.generate_maze(bm, maze_params)
self.update = False
bmesh.update_edit_mesh(obj.data, destructive=True)
return {'FINISHED'} |
class Array(Expression):
def __init__(self, ast_node, expressions, name=None, type_string=None):
super().__init__(ast_node)
self.type_string = type_string
self.name: str = name
self.expressions = expressions
def __str__(self):
return f'Array {self.name}' |
class AnalysisKnobs():
export: t.Optional[Path] = (typer.Option(None, '-e', '--export', help='Export the fuzzability report to a path based on the file extension.Fuzzable supports exporting to `json`, `csv`, or `md`.'),)
list_ignored: bool = (typer.Option(False, help='If set, will also additionally output and/or export ignored symbols.'),)
include_sym: t.Optional[str] = (typer.Option(None, help='Comma-seperated list of symbols to absolutely be considered for analysis.'),)
include_nontop: bool = (typer.Option(False, help="If set, won't filter out only on top-level function definitions."),)
skip_sym: t.Optional[str] = (typer.Option(None, help='Comma-seperated list of symbols to skip during analysis.'),)
skip_stripped: bool = (typer.Option(False, help='If set, ignore symbols that are stripped in binary analysis.Will be ignored if fuzzability analysis is done on source code.'),)
ignore_metrics: bool = (typer.Option(True, help="If set, include individual metrics' scores for each function target analyzed."),)
score_weights: t.Optional[str] = (typer.Option(None, '-w', '--score-weights', help='Comma-seperated list of reconfigured weights for multi-criteria decision analysis when determining fuzzability.'),) |
class TwoFerTest(unittest.TestCase):
def test_no_name_given(self):
self.assertEqual(two_fer(), 'One for you, one for me.')
def test_a_name_given(self):
self.assertEqual(two_fer('Alice'), 'One for Alice, one for me.')
def test_another_name_given(self):
self.assertEqual(two_fer('Bob'), 'One for Bob, one for me.') |
class Message():
def __init__(self, db, config):
if config['DEBUG']:
print('Initializing Message')
self.db = db
self.config = config
self.message_event = self.config['MESSAGE_EVENT']
if self.db:
sql = '\n CREATE TABLE IF NOT EXISTS Message (\n MessageID INTEGER PRIMARY KEY AUTOINCREMENT,\n CallLogID INTEGER,\n Played BOOLEAN DEFAULT 0 NOT NULL CHECK (Played IN (0,1)),\n Filename TEXT,\n DateTime TEXT,\n FOREIGN KEY(CallLogID) REFERENCES CallLog(CallLogID));'
curs = self.db.cursor()
curs.executescript(sql)
curs.close()
self._update_unplayed_count()
if config['DEBUG']:
print('Message initialized')
def add(self, call_no, filepath):
sql = '\n INSERT INTO Message(\n CallLogID,\n Filename,\n DateTime)\n VALUES(?,?,?)\n '
arguments = [call_no, filepath, datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')[:19]]
self.db.execute(sql, arguments)
self.db.commit()
query = 'SELECT last_insert_rowid()'
curs = self.db.cursor()
curs.execute(query)
msg_no = curs.fetchone()[0]
curs.close()
self._update_unplayed_count()
return msg_no
def delete(self, msg_no):
sql = 'SELECT Filename FROM Message WHERE MessageID=:msg_no'
arguments = {'msg_no': msg_no}
curs = self.db.execute(sql, arguments)
results = curs.fetchone()
curs.close()
success = True
if (len(results) > 0):
basename = os.path.basename(results[0])
filepath = os.path.join(self.config['VOICE_MAIL_MESSAGE_FOLDER'], basename)
print('Deleting message: {}'.format(filepath))
try:
os.remove(filepath)
except Exception as error:
pprint(error)
print('{} cannot be removed'.format(filepath))
success = False
if success:
sql = 'DELETE FROM Message WHERE MessageID=:msg_no'
arguments = {'msg_no': msg_no}
self.db.execute(sql, arguments)
self.db.commit()
if self.config['DEBUG']:
print('Message entry removed')
pprint(arguments)
self._update_unplayed_count()
return success
def update_played(self, msg_no, played=1):
try:
sql = 'UPDATE Message SET Played=:played WHERE MessageID=:msg_no'
arguments = {'msg_no': msg_no, 'played': played}
self.db.execute(sql, arguments)
self.db.commit()
except Exception as e:
print('** Error updating message played status:')
pprint(e)
return False
self._update_unplayed_count()
return True
def get_unplayed_count(self):
global unplayed_count
return unplayed_count
def _update_unplayed_count(self):
sql = 'SELECT COUNT(*) FROM Message WHERE Played = 0'
curs = self.db.execute(sql)
global unplayed_count
unplayed_count = curs.fetchone()[0]
if self.config['DEBUG']:
print('Unplayed message count is {}'.format(unplayed_count))
self.message_event.set()
self.message_event.clear() |
class TestDispatcher(UnitTestWithNamespace):
def setUp(self) -> None:
super().setUp()
with Workflow(name='workflow', namespace=self.namespace_name) as workflow:
op = Operator(name='op')
op.action_on_condition(action=TaskAction.START, condition=Condition(expect_event_keys=['event_1']))
self.workflow_meta = self.metadata_manager.add_workflow(namespace=self.namespace_name, name=workflow.name, content='', workflow_object=cloudpickle.dumps(workflow))
self.metadata_manager.flush()
self.workflow_trigger = self.metadata_manager.add_workflow_trigger(workflow_id=self.workflow_meta.id, rule=cloudpickle.dumps(WorkflowRule(condition=Condition(expect_event_keys=['event_2']))))
self.metadata_manager.flush()
self.snapshot_meta = self.metadata_manager.add_workflow_snapshot(workflow_id=self.workflow_meta.id, workflow_object=self.workflow_meta.workflow_object, uri='url', signature='')
self.metadata_manager.flush()
self.workflow_execution_meta = self.metadata_manager.add_workflow_execution(workflow_id=self.workflow_meta.id, run_type=ExecutionType.MANUAL, snapshot_id=self.snapshot_meta.id)
self.metadata_manager.flush()
self.metadata_manager.update_workflow_execution(workflow_execution_id=self.workflow_execution_meta.id, status=WorkflowStatus.RUNNING)
self.metadata_manager.flush()
def test_dispatch(self):
worker_num = 3
workers = []
for i in range(worker_num):
workers.append(Worker())
dispatcher = Dispatcher(workers=workers)
event: Event = StartWorkflowExecutionEvent(workflow_id=self.workflow_meta.id, snapshot_id=self.snapshot_meta.id)
event.offset = 1
dispatcher.dispatch(event)
self.assertEqual(1, workers[1].input_queue.qsize())
event = StartTaskExecutionEvent(workflow_execution_id=2, task_name='op')
event.offset = 1
dispatcher.dispatch(event)
self.assertEqual(1, workers[2].input_queue.qsize())
event = Event(key='event_1', value='')
event.namespace = self.namespace_name
event.offset = 1
dispatcher.dispatch(event)
self.assertEqual(2, workers[1].input_queue.qsize())
event = Event(key='event_2', value='')
event.namespace = self.namespace_name
event.offset = 1
dispatcher.dispatch(event)
self.assertEqual(3, workers[1].input_queue.qsize())
def test__get_max_committed_offset(self):
self.assertEqual((- 1), Dispatcher._get_max_committed_offset())
self.metadata_manager.set_workflow_event_offset(self.workflow_meta.id, 10)
self.assertEqual(10, Dispatcher._get_max_committed_offset())
self.metadata_manager.set_workflow_execution_event_offset(self.workflow_execution_meta.id, 11)
self.assertEqual(11, Dispatcher._get_max_committed_offset())
def test_scheduling_event_in_recovery_mode(self):
worker_num = 3
workers = []
for i in range(worker_num):
workers.append(Worker())
event1: Event = StartWorkflowExecutionEvent(self.workflow_meta.id, self.snapshot_meta.id)
event1.offset = 1
event2: Event = StartWorkflowExecutionEvent(self.workflow_meta.id, self.snapshot_meta.id)
event2.offset = 2
event3: Event = StopWorkflowExecutionEvent(self.workflow_execution_meta.id)
event3.offset = 1
event4: Event = StopWorkflowExecutionEvent(self.workflow_execution_meta.id)
event4.offset = 3
self.metadata_manager.set_workflow_execution_event_offset(self.workflow_execution_meta.id, 2)
self.metadata_manager.set_workflow_event_offset(self.workflow_meta.id, 1)
self.metadata_manager.commit()
dispatcher = Dispatcher(workers=workers)
dispatcher.dispatch(event1)
self.assertEqual(0, workers[1].input_queue.qsize())
dispatcher.dispatch(event2)
self.assertEqual(1, workers[1].input_queue.qsize())
dispatcher.dispatch(event3)
self.assertEqual(1, workers[1].input_queue.qsize())
dispatcher.dispatch(event4)
self.assertEqual(2, workers[1].input_queue.qsize())
def test_non_scheduling_event_in_recovery_mode(self):
worker_num = 3
workers = []
for i in range(worker_num):
workers.append(Worker())
event1: Event = Event(key='event_1', value=None)
event1.namespace = self.namespace_name
event1.offset = 1
event2: Event = Event(key='event_1', value=None)
event2.namespace = self.namespace_name
event2.offset = 3
event3: Event = Event(key='event_2', value=None)
event3.namespace = self.namespace_name
event3.offset = 1
event4: Event = Event(key='event_2', value=None)
event4.namespace = self.namespace_name
event4.offset = 2
self.metadata_manager.set_workflow_execution_event_offset(self.workflow_execution_meta.id, 2)
self.metadata_manager.set_workflow_event_offset(self.workflow_meta.id, 1)
self.metadata_manager.commit()
dispatcher = Dispatcher(workers=workers)
dispatcher.dispatch(event1)
self.assertEqual(0, workers[1].input_queue.qsize())
dispatcher.dispatch(event2)
self.assertEqual(1, workers[1].input_queue.qsize())
dispatcher.dispatch(event3)
self.assertEqual(1, workers[1].input_queue.qsize())
dispatcher.dispatch(event4)
self.assertEqual(2, workers[1].input_queue.qsize()) |
class conv2d_bias_add_relu(conv2d_bias_add_activation):
def __init__(self, stride, pad, dilate=1, group=1) -> None:
super().__init__('relu', stride, pad, dilate=dilate, group=group)
def _get_op_attributes(self):
attr = super()._get_op_attributes()
del attr['activation']
return attr |
class Hardfork():
mod: ModuleType
def discover(cls: Type[H]) -> List[H]:
path = getattr(ethereum, '__path__', None)
if (path is None):
raise ValueError('module `ethereum` has no path information')
modules = pkgutil.iter_modules(path, (ethereum.__name__ + '.'))
modules = (module for module in modules if module.ispkg)
forks: List[H] = []
for pkg in modules:
mod = importlib.import_module(pkg.name)
if hasattr(mod, 'FORK_CRITERIA'):
forks.append(cls(mod))
forks.sort(key=(lambda fork: fork.criteria))
return forks
def load(cls: Type[H], config_dict: Dict[(ForkCriteria, str)]) -> List[H]:
config = sorted(config_dict.items(), key=(lambda x: x[0]))
forks = []
for (criteria, name) in config:
mod = importlib.import_module(('ethereum.' + name))
mod.FORK_CRITERIA = criteria
forks.append(cls(mod))
return forks
def load_from_json(cls: Type[H], json: Any) -> List[H]:
c = json['config']
config = {ByBlockNumber(0): 'frontier', ByBlockNumber(c['homesteadBlock']): 'homestead', ByBlockNumber(c['eip150Block']): 'tangerine_whistle', ByBlockNumber(c['eip155Block']): 'spurious_dragon', ByBlockNumber(c['byzantiumBlock']): 'byzantium', ByBlockNumber(c['constantinopleBlock']): 'constantinople', ByBlockNumber(c['istanbulBlock']): 'istanbul', ByBlockNumber(c['berlinBlock']): 'berlin', ByBlockNumber(c['londonBlock']): 'london', ByBlockNumber(c['mergeForkBlock']): 'paris', ByTimestamp(c['shanghaiTime']): 'shanghai'}
if ('daoForkBlock' in c):
raise Exception('Hardfork.load_from_json() does not support Mainnet')
return cls.load(config)
def __init__(self, mod: ModuleType) -> None:
self.mod = mod
def consensus(self) -> ConsensusType:
if hasattr(self.module('fork'), 'validate_proof_of_work'):
return ConsensusType.PROOF_OF_WORK
else:
return ConsensusType.PROOF_OF_STAKE
def criteria(self) -> ForkCriteria:
criteria = self.mod.FORK_CRITERIA
assert isinstance(criteria, ForkCriteria)
return criteria
def block(self) -> int:
if isinstance(self.criteria, ByBlockNumber):
return self.criteria.block_number
else:
raise AttributeError
def timestamp(self) -> int:
if isinstance(self.criteria, ByTimestamp):
return self.criteria.timestamp
else:
raise AttributeError
def has_activated(self, block_number: int, timestamp: int) -> bool:
return self.criteria.check(block_number, timestamp)
def path(self) -> Optional[str]:
return getattr(self.mod, '__path__', None)
def short_name(self) -> str:
return self.mod.__name__.split('.')[(- 1)]
def name(self) -> str:
return self.mod.__name__
def title_case_name(self) -> str:
return self.short_name.replace('_', ' ').title()
def __repr__(self) -> str:
return (((((self.__class__.__name__ + '(') + f'name={self.name!r}, ') + f'criteria={self.criteria}, ') + '...') + ')')
def import_module(self) -> ModuleType:
return self.mod
def module(self, name: str) -> Any:
return importlib.import_module(((self.mod.__name__ + '.') + name))
def optimized_module(self, name: str) -> Any:
assert self.mod.__name__.startswith('ethereum.')
module = ((('ethereum_optimized' + self.mod.__name__[8:]) + '.') + name)
return importlib.import_module(module)
def iter_modules(self) -> Iterator[ModuleInfo]:
if (self.path is None):
raise ValueError(f'cannot walk {self.name}, path is None')
return pkgutil.iter_modules(self.path, (self.name + '.'))
def walk_packages(self) -> Iterator[ModuleInfo]:
if (self.path is None):
raise ValueError(f'cannot walk {self.name}, path is None')
return pkgutil.walk_packages(self.path, (self.name + '.')) |
class CrawlerConfig(crawler.CrawlerConfig):
def __init__(self, storage, progresser, api_client, variables=None):
super(CrawlerConfig, self).__init__()
self.storage = storage
self.progresser = progresser
self.variables = ({} if (not variables) else variables)
self.client = api_client |
def get_dependencies(primary_type, types):
deps = set()
struct_names_yet_to_be_expanded = [primary_type]
while (len(struct_names_yet_to_be_expanded) > 0):
struct_name = struct_names_yet_to_be_expanded.pop()
deps.add(struct_name)
fields = types[struct_name]
for field in fields:
field_type = field['type']
if is_array_type(field_type):
field_type = field_type[:field_type.index('[')]
if (field_type not in types):
continue
elif (field_type not in deps):
struct_names_yet_to_be_expanded.append(field_type)
elif (field_type in deps):
continue
else:
raise TypeError(f'Unable to determine type dependencies with type `{field_type}`.')
deps.remove(primary_type)
return tuple(deps) |
class WhoosheeStamp(object):
PATH = os.path.join(app.config['WHOOSHEE_DIR'], 'whooshee-version')
def current(cls):
packages = ['python3-flask-whooshee', 'python3-whoosh']
cmd = (['rpm', '-q', '--qf', '%{NAME}-%{VERSION}\n'] + packages)
process = Popen(cmd, stdout=PIPE, stderr=PIPE)
(out, err) = process.communicate()
return out.decode('utf-8').rstrip()
def store(cls):
with open(cls.PATH, 'w') as f:
f.write(cls.current())
def read(cls):
try:
with open(cls.PATH, 'r') as f:
return f.read().rstrip()
except OSError:
return None
def is_valid(cls):
return (cls.read() == cls.current()) |
class OptionPlotoptionsSolidgaugeSonificationDefaultinstrumentoptionsMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class FullstoryTestClient():
headers: object = {}
base_url: str = ''
def __init__(self, connection_config_fullstory: ConnectionConfig):
fullstory_secrets = connection_config_fullstory.secrets
self.headers = {'Authorization': f"Basic {fullstory_secrets['api_key']}"}
self.base_url = f"
def get_user(self, user_id: str) -> requests.Response:
user_response: requests.Response = requests.get(url=f'{self.base_url}/users/v1/individual/{user_id}', headers=self.headers)
return user_response
def update_user(self, user_id: str, user_data) -> requests.Response:
user_response: requests.Response = requests.post(url=f'{self.base_url}/users/v1/individual/{user_id}/customvars', json=user_data, headers=self.headers)
return user_response |
class Link():
bot = 'embykeeper_auth_bot'
def __init__(self, client: Client):
self.client = client
self.log = logger.bind(scheme='telelink', username=client.me.name)
def instance(self):
rd = random.Random()
rd.seed(uuid.getnode())
return uuid.UUID(int=rd.getrandbits(128))
async def delete_messages(self, messages: List[Message]):
async def delete(m: Message):
try:
(await m.delete(revoke=True))
text = (m.text or m.caption or '')
text = truncate_str(text.replace('\n', ''), 30)
self.log.debug(f'[gray50] API : {text}[/]')
except asyncio.CancelledError:
pass
return (await asyncio.gather(*[delete(m) for m in messages]))
async def post(self, cmd, photo=None, condition: Callable=None, timeout: int=20, retries=3, name: str=None) -> Tuple[(Optional[str], Optional[str])]:
for r in range(retries):
self.log.debug(f'[gray50] {timeout} : {self.bot}[/]')
(await self.client.mute_chat(self.bot, ((time.time() + timeout) + 5)))
future = asyncio.Future()
handler = MessageHandler(async_partial(self._handler, cmd=cmd, future=future, condition=condition), ((filters.text & filters.bot) & filters.user(self.bot)))
(await self.client.add_handler(handler, group=1))
try:
messages = []
messages.append((await self.client.send_message(self.bot, f'/start quiet')))
(await asyncio.sleep(0.5))
if photo:
messages.append((await self.client.send_photo(self.bot, photo, cmd)))
else:
messages.append((await self.client.send_message(self.bot, cmd)))
self.log.debug(f'[gray50]-> {cmd}[/]')
results = (await asyncio.wait_for(future, timeout=timeout))
except asyncio.CancelledError:
try:
(await asyncio.wait_for(self.delete_messages(messages), 1.0))
except asyncio.TimeoutError:
pass
finally:
raise
except asyncio.TimeoutError:
(await self.delete_messages(messages))
if ((r + 1) < retries):
self.log.info(f'{name} ({(r + 1)}/{retries}), 3 .')
(await asyncio.sleep(3))
continue
else:
self.log.warning(f'{name} ({(r + 1)}/{retries}).')
return None
else:
(await self.delete_messages(messages))
(status, errmsg) = [results.get(p, None) for p in ('status', 'errmsg')]
if (status == 'error'):
self.log.warning(f'{name}: {errmsg}.')
return False
elif (status == 'ok'):
return results
else:
self.log.warning(f'{name}.')
return False
finally:
(await self.client.remove_handler(handler, group=1))
async def _handler(self, client: Client, message: Message, cmd: str, future: asyncio.Future, condition: Union[(bool, Callable[(..., Coroutine)], Callable)]=None):
try:
toml = tomli.loads(message.text)
except tomli.TOMLDecodeError:
self.delete_messages([message])
else:
try:
if (toml.get('command', None) == cmd):
if (condition is None):
cond = True
elif asyncio.iscoroutinefunction(condition):
cond = (await condition(toml))
elif callable(condition):
cond = condition(toml)
if cond:
future.set_result(toml)
(await asyncio.sleep(0.5))
(await self.delete_messages([message]))
return
except asyncio.CancelledError as e:
try:
(await asyncio.wait_for(self.delete_messages([message]), 1))
except asyncio.TimeoutError:
pass
finally:
future.set_exception(e)
finally:
message.continue_propagation()
async def delete_history(self):
self.log.debug('.')
(await self.client.invoke(DeleteHistory(max_id=0, peer=(await self.client.resolve_peer(self.bot)))))
async def auth(self, service: str):
results = (await self.post(f'/auth {service} {self.instance}', name=f' {service.upper()} '))
return bool(results)
async def captcha(self):
results = (await self.post(f'/captcha {self.instance}', timeout=240, name=''))
if results:
return [results.get(p, None) for p in ('token', 'proxy', 'useragent')]
else:
return (None, None, None)
async def answer(self, question: str):
results = (await self.post(f'/answer {self.instance} {question}', timeout=20, name=''))
if results:
return (results.get('answer', None), results.get('by', None))
else:
return (None, None)
async def gpt(self, prompt: str):
results = (await self.post(f'/gpt {self.instance} {prompt}', timeout=20, name=''))
if results:
return (results.get('answer', None), results.get('by', None))
else:
return (None, None)
async def visual(self, photo, options: List[str], question=None):
cmd = f"/visual {self.instance} {'/'.join(options)}"
if question:
cmd += f' {question}'
results = (await self.post(cmd, photo=photo, timeout=20, name=''))
if results:
return (results.get('answer', None), results.get('by', None))
else:
return (None, None)
async def send_log(self, message):
results = (await self.post(f'/log {self.instance} {message}', name=' Telegram'))
return bool(results) |
class ZMQDeserializer(lg.Node):
INPUT = lg.Topic(ZMQMessage)
OUTPUT = lg.Topic(RandomMessage)
(INPUT)
(OUTPUT)
async def deserialize(self, message: ZMQMessage) -> lg.AsyncPublisher:
(yield (self.OUTPUT, RandomMessage(timestamp=time.time(), data=np.frombuffer(message.data)))) |
.xfail(raises=ImageComparisonFailure, reason='Matplotlib plots for reasons a different image size.')
.skipif((HIGH_MEMORY > memory), reason='Travis has too less memory to run it.')
def test_hicPlotMatrix_cool_log1p():
outfile = NamedTemporaryFile(suffix='.png', prefix='hicexplorer_test_cool', delete=False)
args = '--matrix {0}/Li_et_al_2015.cool --log1p --outFileName {1} '.format(ROOT, outfile.name).split()
compute(hicexplorer.hicPlotMatrix.main, args, 5)
res = compare_images(((ROOT + 'hicPlotMatrix') + '/Li_cool_log1p.png'), outfile.name, tol=tolerance)
assert (res is None), res
if REMOVE_OUTPUT:
os.remove(outfile.name) |
class ListMeta(Meta):
def __getitem__(self, type_elem):
if isinstance(type_elem, str):
type_elem = str2type(type_elem)
return type('ListBis', (List,), {'type_elem': type_elem})
def get_template_parameters(self):
if hasattr(self.type_elem, 'get_template_parameters'):
return self.type_elem.get_template_parameters()
return tuple()
def __repr__(self):
if (not hasattr(self, 'type_elem')):
return super().__repr__()
if isinstance(self.type_elem, Meta):
string = repr(self.type_elem)
elif isinstance(self.type_elem, type):
string = self.type_elem.__name__
else:
string = repr(self.type_elem)
return f'List[{string}]'
def format_as_backend_type(self, backend_type_formatter, **kwargs):
return backend_type_formatter.make_list_code(self.type_elem, **kwargs) |
class TestCreateIndexTemplateRunner():
('elasticsearch.Elasticsearch')
.asyncio
async def test_create_index_templates(self, es):
es.indices.put_template = mock.AsyncMock()
r = runner.CreateIndexTemplate()
params = {'templates': [('templateA', {'settings': {}}), ('templateB', {'settings': {}})], 'request-params': {'timeout': 50, 'create': 'true'}}
result = (await r(es, params))
assert (result == {'weight': 2, 'unit': 'ops', 'success': True})
es.indices.put_template.assert_has_awaits([mock.call(name='templateA', body={'settings': {}}, params=params['request-params']), mock.call(name='templateB', body={'settings': {}}, params=params['request-params'])])
('elasticsearch.Elasticsearch')
.asyncio
async def test_param_templates_mandatory(self, es):
es.indices.put_template = mock.AsyncMock()
r = runner.CreateIndexTemplate()
params = {}
with pytest.raises(exceptions.DataError, match="Parameter source for operation 'create-index-template' did not provide the mandatory parameter 'templates'. Add it to your parameter source and try again."):
(await r(es, params))
assert (es.indices.put_template.await_count == 0) |
class Information(ErsiliaBase):
def __init__(self, model_id, config_json=None):
ErsiliaBase.__init__(self, config_json=config_json, credentials_json=None)
self.model_id = model_id
self.repository_folder = os.path.join(self._get_bento_location(model_id=self.model_id))
self.dest_folder = os.path.join(self._model_path(model_id=model_id))
def _get_pack_mode(self):
pack_mode_file = os.path.join(self.dest_folder, PACKMODE_FILE)
if os.path.exists(pack_mode_file):
with open(pack_mode_file, 'r') as f:
return f.read().rstrip()
else:
return None
def _get_service_class(self):
service_class_file = os.path.join(self.repository_folder, SERVICE_CLASS_FILE)
if os.path.exists(service_class_file):
with open(service_class_file, 'r') as f:
return f.read().rstrip()
else:
return None
def _get_api_schema(self):
api_schema_file = os.path.join(self.dest_folder, API_SCHEMA_FILE)
if os.path.exists(api_schema_file):
with open(api_schema_file, 'r') as f:
return json.load(f)
else:
return None
def _get_size(self):
size_file = os.path.join(self.dest_folder, MODEL_SIZE_FILE)
if os.path.exists(size_file):
with open(size_file, 'r') as f:
return json.load(f)
else:
return None
def _get_metadata(self):
metadata_file = os.path.join(self.dest_folder, METADATA_JSON_FILE)
if os.path.exists(metadata_file):
with open(metadata_file, 'r') as f:
return json.load(f)
else:
return None
def _get_card(self):
card_file = os.path.join(self.dest_folder, CARD_FILE)
if os.path.exists(card_file):
with open(card_file, 'r') as f:
return json.load(f)
else:
return None
def _get_apis_list(self):
apis_list_file = os.path.join(self.dest_folder, CARD_FILE)
if os.path.exists(apis_list_file):
with open(os.path.join(self.repository_folder, APIS_LIST_FILE), 'r') as f:
return [x.rstrip() for x in f.readlines()]
else:
return None
def get(self):
data = {'pack_mode': self._get_pack_mode(), 'service_class': self._get_service_class(), 'apis_list': self._get_apis_list(), 'api_schema': self._get_api_schema(), 'size': self._get_size(), 'metadata': self._get_metadata(), 'card': self._get_card()}
return data |
_parameters()
(name='day', period='day', fail_value=4, pass_value=1)
(name='hour', period='hour', fail_value=(4 * 24), pass_value=24)
def test_anomalyless_table_volume_anomalies_periods_params(test_id: str, dbt_project: DbtProject, period: str, fail_value: int, pass_value: int):
utc_today = (datetime.utcnow().date() - timedelta(days=4))
data = [{TIMESTAMP_COLUMN: cur_date.strftime(DATE_FORMAT)} for cur_date in generate_dates(base_date=utc_today)]
data += [{TIMESTAMP_COLUMN: utc_today.strftime(DATE_FORMAT)}]
test_args = {**DBT_TEST_ARGS, 'training_period': {'period': 'day', 'count': 30}, 'detection_period': {'period': period, 'count': pass_value}}
test_result = dbt_project.test(test_id, DBT_TEST_NAME, test_args, data=data)
assert (test_result['status'] == 'pass')
test_args = {**test_args, 'detection_period': {'period': period, 'count': fail_value}}
test_result = dbt_project.test(test_id, DBT_TEST_NAME, test_args, data=data)
assert (test_result['status'] == 'fail') |
def start_task_execution(workflow_execution_id: int, task_name: str) -> str:
client = get_scheduler_client()
try:
return client.start_task_execution(workflow_execution_id, task_name)
except AIFlowException as e:
logger.exception('Failed to start execution for task %s with exception %s', f'{workflow_execution_id}.{task_name}', str(e))
raise e |
def fill_context(ctx, database, table, location, delay, latitude, longitude, geojson, spatialite, raw, **kwargs):
ctx.obj.update(database=database, table=table, location=location, delay=delay, latitude=latitude, longitude=longitude, geojson=geojson, spatialite=spatialite, raw=raw, kwargs=kwargs) |
def test_encoding_unknown_performative():
msg = OefSearchMessage(performative=OefSearchMessage.Performative.REGISTER_SERVICE, service_description=Description({'foo1': 1, 'bar1': 2}))
with pytest.raises(ValueError, match='Performative not valid:'):
with mock.patch.object(OefSearchMessage.Performative, '__eq__', return_value=False):
OefSearchMessage.serializer.encode(msg) |
class OptionPlotoptionsTilemapSonificationTracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class FuseSplitCatTestCase(unittest.TestCase):
def _test_fuse_split_cat_rearrange(self, M, N, split, remove_split=True):
dtype = 'float16'
M = IntImm(M)
N = IntImm(N)
input_1 = Tensor(shape=[M, N], name='input_1', is_input=True)
split_2 = ops.split()(input_1, split, 0)
concatenate_3 = ops.concatenate()(split_2[::(- 1)], 0)
concatenate_3._attrs['name'] = 'output_0'
concatenate_3._attrs['is_output'] = True
model = compile_model(concatenate_3, detect_target(), './tmp', self._testMethodName)
self.assertEqual(graph_has_op(model.debug_sorted_graph, 'split'), (not remove_split))
input_1 = get_random_torch_tensor((M.value(), N.value()), dtype=dtype)
split_pt = torch.split(input_1, split, 0)
y_pt = torch.cat([split_pt[1], split_pt[0]], 0)
y_ait = torch.empty_like(y_pt)
model.run_with_tensors({'input_1': input_1}, [y_ait])
torch.testing.assert_close(y_ait, y_pt, atol=0, rtol=0)
def test_fuse_split_cat_even(self):
self._test_fuse_split_cat_rearrange(512, 512, split=[256, 256], remove_split=True)
def test_fuse_split_cat_odd(self):
self._test_fuse_split_cat_rearrange(512, 512, split=[139, 373], remove_split=True)
def test_fuse_split_cat_reuse(self):
dtype = 'float16'
M = IntImm(512)
N = IntImm(512)
input_1 = Tensor(shape=[M, N], name='input_1', is_input=True)
split_2 = ops.split()(input_1, int((M.value() / 2)), 0)
concatenate_3 = ops.concatenate()([split_2[1], split_2[0], split_2[1]], 0)
concatenate_3._attrs['name'] = 'output_0'
concatenate_3._attrs['is_output'] = True
model = compile_model(concatenate_3, detect_target(), './tmp', self._testMethodName)
self.assertFalse(graph_has_op(model.debug_sorted_graph, 'split'))
input_1 = get_random_torch_tensor((M.value(), N.value()), dtype=dtype)
split_pt = torch.split(input_1, int((M.value() / 2)), 0)
y_pt = torch.cat([split_pt[1], split_pt[0], split_pt[1]], 0)
y_ait = torch.empty_like(y_pt)
model.run_with_tensors({'input_1': input_1}, [y_ait])
torch.testing.assert_close(y_ait, y_pt, atol=0, rtol=0)
def test_fuse_split_cat_dim1(self):
dtype = 'float16'
M = IntImm(512)
N = IntImm(512)
input_1 = Tensor(shape=[M, N], name='input_1', is_input=True)
split_2 = ops.split()(input_1, int((N.value() / 2)), 1)
concatenate_3 = ops.concatenate()(split_2[::(- 1)], 1)
concatenate_3._attrs['name'] = 'output_0'
concatenate_3._attrs['is_output'] = True
model = compile_model(concatenate_3, detect_target(), './tmp', self._testMethodName)
self.assertFalse(graph_has_op(model.debug_sorted_graph, 'split'))
input_1 = get_random_torch_tensor((M.value(), N.value()), dtype=dtype)
split_pt = torch.split(input_1, int((N.value() / 2)), 1)
y_pt = torch.cat(split_pt[::(- 1)], 1)
y_ait = torch.empty_like(y_pt)
model.run_with_tensors({'input_1': input_1}, [y_ait])
torch.testing.assert_close(y_ait, y_pt, atol=0, rtol=0)
def test_fuse_split_cat_different_dims(self):
dtype = 'float16'
M = IntImm(512)
N = IntImm(512)
input_1 = Tensor(shape=[M, N], name='input_1', is_input=True)
split_2 = ops.split()(input_1, int((M.value() / 2)), 0)
concatenate_3 = ops.concatenate()(split_2[::(- 1)], 1)
concatenate_3._attrs['name'] = 'output_0'
concatenate_3._attrs['is_output'] = True
model = compile_model(concatenate_3, detect_target(), './tmp', self._testMethodName)
self.assertTrue(graph_has_op(model.debug_sorted_graph, 'split'))
input_1 = get_random_torch_tensor((M.value(), N.value()), dtype=dtype)
split_pt = torch.split(input_1, int((M.value() / 2)), 0)
y_pt = torch.cat(split_pt[::(- 1)], 1)
y_ait = torch.empty_like(y_pt)
model.run_with_tensors({'input_1': input_1}, [y_ait])
torch.testing.assert_close(y_ait, y_pt, atol=0, rtol=0)
def test_fuse_split_cat_bmm(self):
dtype = 'float16'
B = 1
M = 128
N = 512
K = 512
split_size_or_sections = 256
split_dim = 2
T_A = Tensor(shape=[B, M, (K * 2)], dtype=dtype, name='input0', is_input=True)
T_B = Tensor(shape=[B, N, K], dtype=dtype, name='input1', is_input=True)
Xs = ops.split()(T_A, split_size_or_sections, split_dim)
Ys = ops.split()(T_B, split_size_or_sections, split_dim)
assert ((len(Xs) // 2) == len(Ys))
n = 2
Cs = []
for i in range(n):
X = Xs[i]
Y = Ys[i]
C = ops.bmm_rcr()(X, Y)
Cs.append(C)
extra_concat = ops.concatenate()([Xs[3], Xs[2], Xs[3], Xs[2]], dim=split_dim)
bmm_cat = ops.concatenate()(Cs, dim=split_dim)
Y = ops.elementwise(FuncEnum.ADD)(extra_concat, bmm_cat)
Y._attrs['name'] = 'output'
Y._attrs['is_output'] = True
a = get_random_torch_tensor([B, M, (K * 2)], dtype)
b = get_random_torch_tensor([B, N, K], dtype)
xs = a.split(split_size_or_sections, split_dim)
ys = b.split(split_size_or_sections, split_dim)
cs = []
for i in range(n):
x = xs[i]
y = ys[i]
c = torch.bmm(x, y.permute(0, 2, 1))
cs.append(c)
extra_concat_pt = torch.cat([xs[3], xs[2], xs[3], xs[2]], dim=split_dim)
bmm_cat_pt = torch.cat(cs, dim=split_dim)
y_pt = torch.add(extra_concat_pt, bmm_cat_pt)
target = detect_target()
model = compile_model(Y, target, './tmp', self._testMethodName)
self.assertFalse(graph_has_op(model.debug_sorted_graph, 'split'))
self.assertEqual(len(model.debug_sorted_graph), 5)
y = torch.empty_like(y_pt)
model.run_with_tensors({'input0': a, 'input1': b}, [y])
self.assertTrue(torch.allclose(y, y_pt, atol=0.01, rtol=0.01))
def test_fuse_split_cat_interleaved(self):
dtype = 'float16'
M = IntImm(20)
N = IntImm(60)
X0 = Tensor(shape=[M, N], name='x0', is_input=True)
slice_start_indices_0 = [0, 0]
slice_end_indices_0 = [None, 20]
dynamic_slice_0 = ops.dynamic_slice()(X0, slice_start_indices_0, slice_end_indices_0)
slice_start_indices_1 = [0, 20]
slice_end_indices_1 = [None, 40]
dynamic_slice_1 = ops.dynamic_slice()(X0, slice_start_indices_1, slice_end_indices_1)
(split_0_0, split_0_1) = ops.split()(dynamic_slice_0, [10, 10], 1)
(split_1_0, split_1_1) = ops.split()(dynamic_slice_1, [10, 10], 1)
Y = ops.concatenate()([split_0_0, split_1_0, split_0_1, split_1_1], 1)
Y._attrs['name'] = 'y'
Y._attrs['is_output'] = True
model = compile_model(Y, detect_target(), './tmp', 'test_fuse_split_cat_interleaved')
self.assertFalse(graph_has_op(model.debug_sorted_graph, 'split'))
x0_pt = get_random_torch_tensor((M.value(), N.value()), dtype=dtype)
slice_indices_0 = [slice(i, j) for (i, j) in zip(slice_start_indices_0, slice_end_indices_0)]
dynamic_slice_0_pt = x0_pt[slice_indices_0]
slice_indices_1 = [slice(i, j) for (i, j) in zip(slice_start_indices_1, slice_end_indices_1)]
dynamic_slice_1_pt = x0_pt[slice_indices_1]
(split_0_0_pt, split_0_1_pt) = torch.split(dynamic_slice_0_pt, [10, 10], 1)
(split_1_0_pt, split_1_1_pt) = torch.split(dynamic_slice_1_pt, [10, 10], 1)
y_pt = torch.cat([split_0_0_pt, split_1_0_pt, split_0_1_pt, split_1_1_pt], 1)
y = torch.empty_like(y_pt)
model.run_with_tensors({'x0': x0_pt}, [y])
torch.testing.assert_close(y, y_pt, atol=0, rtol=0) |
(scope='function')
def syncthing_manager(is_syncing, request, manager_nospawn, monkeypatch):
monkeypatch.setattr('qtile_extras.widget.syncthing.requests.get', is_syncing)
widget = qtile_extras.widget.syncthing.Syncthing(**{**{'api_key': 'apikey'}, **getattr(request, 'param', dict())})
class SyncthingConfig(libqtile.confreader.Config):
auto_fullscreen = True
keys = []
mouse = []
groups = [libqtile.config.Group('a')]
layouts = [libqtile.layout.Max()]
floating_layout = libqtile.resources.default_config.floating_layout
screens = [libqtile.config.Screen(top=libqtile.bar.Bar([widget], BAR_SIZE))]
manager_nospawn.start(SyncthingConfig)
(yield manager_nospawn) |
class OptionSeriesVennSonificationDefaultinstrumentoptionsMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def load_trinity_config_from_parser_args(parser: argparse.ArgumentParser, args: argparse.Namespace, app_identifier: str, sub_configs: SubConfigs) -> TrinityConfig:
try:
return TrinityConfig.from_parser_args(args, app_identifier, sub_configs)
except AmbigiousFileSystem:
parser.error(TRINITY_AMBIGIOUS_FILESYSTEM_INFO) |
class IOSAppMetadata(_AppMetadata):
def __init__(self, bundle_id, name, app_id, display_name, project_id):
super(IOSAppMetadata, self).__init__(name, app_id, display_name, project_id)
self._bundle_id = _check_is_nonempty_string(bundle_id, 'bundle_id')
def bundle_id(self):
return self._bundle_id
def __eq__(self, other):
return (super(IOSAppMetadata, self).__eq__(other) and (self.bundle_id == other.bundle_id))
def __ne__(self, other):
return (not self.__eq__(other))
def __hash__(self):
return hash((self._name, self.app_id, self.display_name, self.project_id, self.bundle_id)) |
def allow_deprecated_init(func: Callable):
(func)
def wrapper(self, *args, **kwargs):
if ((not args) and (not kwargs)):
warnings.warn("Initializing empty well is deprecated, please provide non-defaulted values, or use mywell = xtgeo.well_from_file('filename')", DeprecationWarning)
return func(self, *([0.0] * 3), '', pd.DataFrame({_AttrName.XNAME.value: [], _AttrName.YNAME.value: [], _AttrName.ZNAME.value: []}))
if (('wfile' in kwargs) or ((len(args) >= 1) and isinstance(args[0], (str, Path, xtgeo._XTGeoFile)))):
warnings.warn("Initializing directly from file name is deprecated and will be removed in xtgeo version 4.0. Use: mywell = xtgeo.well_from_file('filename') instead", DeprecationWarning)
if (len(args) >= 1):
wfile = args[0]
args = args[1:]
else:
wfile = kwargs.pop('wfile', None)
if (len(args) >= 1):
fformat = args[0]
args = args[1:]
else:
fformat = kwargs.pop('fformat', None)
mfile = xtgeo._XTGeoFile(wfile)
if ((fformat is None) or (fformat == 'guess')):
fformat = mfile.detect_fformat()
else:
fformat = mfile.generic_format_by_proposal(fformat)
kwargs = _data_reader_factory(fformat)(mfile, *args, **kwargs)
kwargs['filesrc'] = mfile.file
return func(self, **kwargs)
return func(self, *args, **kwargs)
return wrapper |
def show(func=None, stop=False):
global _gui, _stop_show
if (func is None):
if (not is_ui_running()):
g = GUI()
_gui = g
if stop:
_stop_show = StopShow()
g.start_event_loop()
return
def wrapper(*args, **kw):
global _gui, _stop_show
tk = ETSConfig.toolkit
if is_ui_running():
return func(*args, **kw)
else:
g = GUI()
if (tk == 'wx'):
a = ApplicationWindow(size=(1, 1))
GUI.invoke_later((lambda : a.close()))
a.open()
GUI.invoke_later(func, *args, **kw)
_gui = g
if stop:
_stop_show = StopShow()
g.start_event_loop()
return wrapper |
class OptionSeriesWindbarbDataDatalabels(Options):
def align(self):
return self._config_get('center')
def align(self, text: str):
self._config(text, js_type=False)
def allowOverlap(self):
return self._config_get(False)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self) -> 'OptionSeriesWindbarbDataDatalabelsAnimation':
return self._config_sub_data('animation', OptionSeriesWindbarbDataDatalabelsAnimation)
def backgroundColor(self):
return self._config_get(None)
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(0)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def crop(self):
return self._config_get(True)
def crop(self, flag: bool):
self._config(flag, js_type=False)
def defer(self):
return self._config_get(True)
def defer(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def filter(self) -> 'OptionSeriesWindbarbDataDatalabelsFilter':
return self._config_sub_data('filter', OptionSeriesWindbarbDataDatalabelsFilter)
def format(self):
return self._config_get('point.value')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get(None)
def formatter(self, value: Any):
self._config(value, js_type=False)
def inside(self):
return self._config_get(None)
def inside(self, flag: bool):
self._config(flag, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, flag: bool):
self._config(flag, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def overflow(self):
return self._config_get('justify')
def overflow(self, text: str):
self._config(text, js_type=False)
def padding(self):
return self._config_get(5)
def padding(self, num: float):
self._config(num, js_type=False)
def position(self):
return self._config_get('center')
def position(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def shape(self):
return self._config_get('square')
def shape(self, text: str):
self._config(text, js_type=False)
def style(self):
return self._config_get(None)
def style(self, value: Any):
self._config(value, js_type=False)
def textPath(self) -> 'OptionSeriesWindbarbDataDatalabelsTextpath':
return self._config_sub_data('textPath', OptionSeriesWindbarbDataDatalabelsTextpath)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('bottom')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(0)
def y(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(6)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def test_custom_field_definition_duplicate_name_rejected_update(db):
definition1 = CustomFieldDefinition.create(db=db, data={'name': 'test1', 'description': 'test', 'field_type': 'string', 'resource_type': 'system', 'field_definition': 'string'})
definition2 = CustomFieldDefinition.create(db=db, data={'name': 'Test 1', 'description': 'test', 'field_type': 'string', 'resource_type': 'system', 'field_definition': 'string'})
assert (len(CustomFieldDefinition.all(db)) == 2)
assert (definition1.id != definition2.id)
with pytest.raises(KeyOrNameAlreadyExists) as e:
definition2 = definition2.update(db=db, data={'name': 'Test1'})
db.refresh(definition2)
assert (definition2.name == 'Test 1') |
def run_demo(viz, env, args):
global input
assert viz.check_connection(timeout_seconds=3), 'No connection could be formed quickly'
text_basic(viz, env, args)
text_update(viz, env, args)
text_callbacks(viz, env, args)
text_close(viz, env, args)
image_basic(viz, env, args)
image_callback(viz, env, args)
image_save_jpeg(viz, env, args)
image_history(viz, env, args)
image_grid(viz, env, args)
plot_line_basic(viz, env, args)
plot_line_multiple(viz, env, args)
plot_line_webgl(viz, env, args)
plot_line_update_webgl(viz, env, args)
plot_line_update(viz, env, args)
plot_line_opts(viz, env, args)
plot_line_opts_update(viz, env, args)
plot_line_stackedarea(viz, env, args)
plot_line_maxsize(viz, env, args)
plot_line_doubleyaxis(viz, env, args)
plot_line_pytorch(viz, env, args)
plot_line_stem(viz, env, args)
plot_scatter_basic(viz, env, args)
plot_scatter_update_opts(viz, env, args)
plot_scatter_append(viz, env, args)
plot_scatter_3d(viz, env, args)
plot_scatter_custom_marker(viz, env, args)
plot_scatter_custom_colors(viz, env, args)
plot_scatter_add_trace(viz, env, args)
plot_scatter_text_labels_1d(viz, env, args)
plot_scatter_text_labels_2d(viz, env, args)
plot_bar_basic(viz, env, args)
plot_bar_stacked(viz, env, args)
plot_bar_nonstacked(viz, env, args)
plot_bar_histogram(viz, env, args)
plot_bar_piechart(viz, env, args)
plot_surface_basic(viz, env, args)
plot_surface_basic_withnames(viz, env, args)
plot_surface_append(viz, env, args)
plot_surface_append_withnames(viz, env, args)
plot_surface_remove(viz, env, args)
plot_surface_remove_withnames(viz, env, args)
plot_surface_replace(viz, env, args)
plot_surface_replace_withnames(viz, env, args)
plot_surface_contour(viz, env, args)
plot_surface_3d(viz, env, args)
plot_special_boxplot(viz, env, args)
plot_special_quiver(viz, env, args)
plot_special_mesh(viz, env, args)
plot_special_graph(viz, env, args)
misc_plot_matplot(viz, env, args)
misc_plot_latex(viz, env, args)
misc_plot_latex_update(viz, env, args)
misc_video_tensor(viz, env, args)
misc_video_download(viz, env, args)
misc_audio_basic(viz, env, args)
misc_audio_download(viz, env, args)
misc_arbitrary_visdom(viz, env, args)
misc_getset_state(viz, env, args) |
class CLICommand(Command):
def execute(self, session, acct):
out = io.StringIO()
nmc = NetworkManagementClient(session, acct)
for rt in nmc.route_tables.list_all():
bgp = ('N' if rt.disable_bgp_route_propagation else 'Y')
assocs = (0 if (rt.subnets is None) else len(rt.subnets))
for r in rt.routes:
next_hop = (r.next_hop_ip_address or '')
print(f'{acct}/{rt.name}: bgp={bgp} assocs={assocs} route={r.address_prefix} next_hop={r.next_hop_type} {next_hop}', file=out)
return out.getvalue() |
def test():
assert ('doc1.similarity(doc2)' or ('doc2.similarity(doc1)' in __solution__)), 'Compares-tu la similarite entre les deux docs ?'
assert (0 <= float(similarity) <= 1), "La valeur de similarite doit etre un nombre flottant. L'as-tu calcule correctement ?"
__msg__.good('Bien joue !') |
def list_mapped_windows(workspace: (int | None)=None) -> list[Window]:
if (workspace is not None):
containers = _get_workspace_object(workspace)
else:
containers = SWAY.get_tree().leaves()
windows = [Window(con) for con in containers if _is_mapped_window(con)]
return windows |
class DBLogUpdateHandler(logging.Handler):
def __init__(self, db: DBDriver, id: int, interval: float=DEFAULT_INTERVAL, retries=3):
logging.Handler.__init__(self)
self.db = db
self.id = id
self.interval = interval
self.retries = retries
self.retries_left = self.retries
self.lastreq = 0
self.log = []
self.running = True
self.dbLoggingThread = threading.Thread(target=self.startLogging)
self.dbLoggingThread.start()
def emit(self, record):
msg = self.format(record)
self.log.append(msg)
def startLogging(self):
while self.running:
if (self.log and (time.time() >= (self.lastreq + self.interval))):
try:
output = '\n'.join(self.log)
if (sys.getsizeof(output) > LOG_LIMIT):
self.running = False
output = trimLog(output)
else:
self.log = [output]
status = self.db.updateLogBenchmarks(self.id, output)
if (status != 'success'):
getLogger().error('Error updating logs.')
self.retries_left -= 1
if (self.retries_left == 0):
self.running = False
getLogger().critical('Max failed attempts reached for log updates. Stopping log update requests.')
else:
self.retries_left = self.retries
self.lastreq = time.time()
except Exception:
getLogger().exception('Error occurred in realtime logging loop.')
self.running = False
time.sleep(1)
def close(self):
self.running = False
super().close() |
class OptionSeriesSplineSonificationDefaultinstrumentoptionsMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class UnionPIDDataPreparerService(abc.ABC):
def prepare(self, input_path: str, output_path: str, log_path: Optional[pathlib.Path]=None, log_level: int=logging.INFO, storage_svc: Optional[StorageService]=None) -> None:
pass
def prepare_on_container(self, input_path: str, output_path: str, onedocker_svc: OneDockerService, binary_version: str, tmp_directory: str='/tmp/', wait_for_container: bool=True) -> ContainerInstance:
pass
async def prepare_on_container_async(self, input_path: str, output_path: str, onedocker_svc: OneDockerService, binary_version: str, tmp_directory: str='/tmp/', wait_for_container: bool=True) -> ContainerInstance:
pass |
def fy(raw_date):
if (raw_date is None):
return None
if isinstance(raw_date, str):
raw_date = parser.parse(raw_date)
try:
result = raw_date.year
if (raw_date.month > 9):
result += 1
except AttributeError:
raise TypeError('{} needs year and month attributes'.format(raw_date))
return result |
((detect_target().name() == 'rocm'), 'Not supported by ROCM.')
class ConvBiasAddReluTestCase(unittest.TestCase):
def _test_conv_bias_add_relu(self, batch=4, copy_op=False, test_name='conv2d_bias_add_relu', dtype='float16'):
target = detect_target()
(CO, HH, WW, CI) = (256, 28, 28, 128)
X = Tensor(shape=[IntImm(batch), HH, WW, CI], dtype=dtype, name='input_0', is_input=True)
W = Tensor(shape=[CO, 3, 3, CI], dtype=dtype, name='input_1', is_input=True)
B = Tensor(shape=[CO], dtype=dtype, name='input_2', is_input=True)
R = Tensor(shape=[IntImm(batch), HH, WW, CO], dtype=dtype, name='input_3', is_input=True)
OP = ops.conv2d_bias_add_relu(stride=1, pad=1, dilate=1)
if copy_op:
OP = ops.conv2d_bias_add_relu(**OP._get_op_attributes())
Y = OP(X, W, B, R)
Y._attrs['name'] = 'output_0'
Y._attrs['is_output'] = True
module = compile_model(Y, target, './tmp', test_name)
X_pt = get_random_torch_tensor([batch, CI, HH, WW], dtype=dtype)
W_pt = get_random_torch_tensor([CO, CI, 3, 3], dtype=dtype)
B_pt = get_random_torch_tensor([1, CO, 1, 1], dtype=dtype)
R_pt = get_random_torch_tensor([batch, CO, HH, WW], dtype=dtype)
Y_pt = torch.nn.functional.conv2d(X_pt.float(), W_pt.float(), padding=1).to(dtype=X_pt.dtype)
Y_pt = ((Y_pt + B_pt) + R_pt)
Y_pt = torch.nn.functional.relu(Y_pt)
x = X_pt.permute((0, 2, 3, 1)).contiguous()
w = W_pt.permute((0, 2, 3, 1)).contiguous()
r = R_pt.permute((0, 2, 3, 1)).contiguous()
inputs = {'input_0': x, 'input_1': w, 'input_2': B_pt.squeeze(), 'input_3': r}
y = torch.empty_like(Y_pt).permute((0, 2, 3, 1)).contiguous()
module.run_with_tensors(inputs, [y])
y_transpose = y.permute(0, 3, 1, 2)
if (target.name() == 'cuda'):
if (dtype == 'float32'):
torch.testing.assert_close(Y_pt, y_transpose, atol=0.05, rtol=0.01)
elif (dtype == 'float16'):
torch.testing.assert_close(Y_pt, y_transpose, atol=0.01, rtol=0.01)
elif (dtype == 'bfloat16'):
torch.testing.assert_close(Y_pt, y_transpose, atol=0.5, rtol=0.5)
else:
torch.testing.assert_close(Y_pt, y_transpose, atol=0.125, rtol=0.1)
(**filter_test_cases_by_params({TestEnv.CUDA_LESS_THAN_SM80: ['float16'], TestEnv.CUDA_SM80: ['bfloat16', 'float32']}))
def test_conv2d_bias_add_relu(self, dtype):
self._test_conv_bias_add_relu(test_name=f'conv2d_bias_add_relu_{dtype}', dtype=dtype)
self._test_conv_bias_add_relu(copy_op=True, test_name=f'conv2d_bias_add_relu_{dtype}_copy_op', dtype=dtype) |
def test_threshold(df_na):
imputer = DropMissingData(threshold=1)
X = imputer.fit_transform(df_na)
assert (list(X.index) == [0, 1, 4, 6, 7])
imputer = DropMissingData(threshold=0.01)
X = imputer.fit_transform(df_na)
assert (list(X.index) == [0, 1, 2, 3, 4, 5, 6, 7])
imputer = DropMissingData(threshold=0.5)
X = imputer.fit_transform(df_na)
assert (list(X.index) == [0, 1, 2, 4, 5, 6, 7])
imputer = DropMissingData(threshold=1, missing_only=False)
X = imputer.fit_transform(df_na)
assert (list(X.index) == [0, 1, 4, 6, 7])
imputer = DropMissingData(threshold=0.01, missing_only=False)
X = imputer.fit_transform(df_na)
assert (list(X.index) == [0, 1, 2, 3, 4, 5, 6, 7])
imputer = DropMissingData(threshold=0.5, missing_only=False)
X = imputer.fit_transform(df_na)
assert (list(X.index) == [0, 1, 2, 4, 5, 6, 7]) |
_flyte_cli.command('update-launch-plan', cls=_FlyteSubCommand)
_state_choice
_host_option
_insecure_option
_optional_urn_option
def update_launch_plan(state, host, insecure, urn=None):
_welcome_message()
client = _get_client(host, insecure)
if (urn is None):
try:
if _stat.S_ISFIFO(_os.fstat(0).st_mode):
for line in _sys.stdin.readlines():
_update_one_launch_plan(client, urn=line.rstrip(), state=state)
else:
raise _click.UsageError('Missing option "-u" / "--urn" or missing pipe inputs')
except KeyboardInterrupt:
_sys.stdout.flush()
else:
_update_one_launch_plan(client, urn=urn, state=state) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.