code stringlengths 281 23.7M |
|---|
def exposed_astor_roundtrip_parser_functions():
with db.session_context() as sess:
res = sess.query(db.RssFeedEntry).all()
for row in res:
func = row.get_func()
_ast = row._get_ast()
src = astor.to_source(_ast, indent_with='\t', pretty_source=better_pretty_source)
if (src.strip() != row.func.strip()):
try:
rfdb.str_to_function(src, 'testing_compile')
print('Compiled OK')
row.func = src
except Exception:
print('Compilation failed?')
sess.commit() |
def test_list_saml_provider_configs(sample_tenant, saml_provider):
client = tenant_mgt.auth_for_tenant(sample_tenant.tenant_id)
page = client.list_saml_provider_configs()
result = None
for provider_config in page.iterate_all():
if (provider_config.provider_id == saml_provider.provider_id):
result = provider_config
break
assert (result is not None) |
class OptionSeriesAreasplineDataDragdropGuideboxDefault(Options):
def className(self):
return self._config_get('highcharts-drag-box-default')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('rgba(0, 0, 0, 0.1)')
def color(self, text: str):
self._config(text, js_type=False)
def cursor(self):
return self._config_get('move')
def cursor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#888')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(900)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def pp_adv_phych_pdu(pdu: bytes, ch: int) -> list:
header = pdu[:2]
payload = pdu[2:]
pdu_type = ((header[0] & PDU_TYPE_MSK) >> PDU_TYPE_POS)
rfu = ((header[0] & RFU_MSK) >> RFU_POS)
ch_sel = ((header[0] & CH_SEL_MSK) >> CH_SEL_POS)
tx_add = ((header[0] & TX_ADD_MSK) >> TX_ADD_POS)
rx_add = ((header[0] & RX_ADD_MSK) >> RX_ADD_POS)
addrs = []
print('[{}] '.format(ch), end='')
if (pdu_type == ADV_IND):
adv_a = payload[:6][::(- 1)]
addrs = [{'BD_ADDR': adv_a, 'type': ('public' if (tx_add == 0) else 'random')}]
print('[{}]'.format(blue('ADV_IND')))
print('{} AdvA: {}'.format(('public' if (tx_add == 0) else 'random'), ':'.join((('%02X' % b) for b in adv_a))))
elif (pdu_type == ADV_DIRECT_IND):
adv_a = payload[:6][::(- 1)]
target_a = payload[6:][::(- 1)]
addrs = [{'BD_ADDR': adv_a, 'type': ('public' if (tx_add == 0) else 'random')}, {'BD_ADDR': target_a, 'type': ('public' if (rx_add == 0) else 'random')}]
print('[{}]'.format(blue('ADV_DIRECT_IND')))
print('{} AdvA: {}'.format(('public' if (tx_add == 0) else 'random'), ':'.join((('%02X' % b) for b in adv_a))))
print('{} TargetA: {}'.format(('public' if (rx_add == 0) else 'random'), ':'.join((('%02X' % b) for b in target_a))))
elif (pdu_type == ADV_NONCONN_IND):
adv_a = payload[:6][::(- 1)]
addrs = [{'BD_ADDR': adv_a, 'type': ('public' if (tx_add == 0) else 'random')}]
print('[{}]'.format(red('ADV_NONCONN_IND')))
print('{} AdvA: {}'.format(('public' if (tx_add == 0) else 'random'), ':'.join((('%02X' % b) for b in adv_a))))
elif (pdu_type == ADV_SCAN_IND):
adv_a = payload[:6][::(- 1)]
addrs = [{'BD_ADDR': adv_a, 'type': ('public' if (tx_add == 0) else 'random')}]
print('[{}]'.format(blue('ADV_SCAN_IND')))
print('{} AdvA: {}'.format(('public' if (tx_add == 0) else 'random'), ':'.join((('%02X' % b) for b in adv_a))))
elif (pdu_type == ADV_EXT_IND):
print('[{}]'.format(yellow('ADV_EXT_IND')))
print('raw: {}'.format(payload))
elif (pdu_type == SCAN_REQ):
scan_a = payload[:6][::(- 1)]
adv_a = payload[6:][::(- 1)]
addrs = [{'BD_ADDR': scan_a, 'type': ('public' if (tx_add == 0) else 'random')}, {'BD_ADDR': adv_a, 'type': ('public' if (rx_add == 0) else 'random')}]
print('[{}]'.format(blue('SCAN_REQ')))
print('{} ScanA: {}'.format(('public' if (tx_add == 0) else 'random'), ':'.join((('%02X' % b) for b in scan_a))))
print('{} AdvA: {}'.format(('public' if (rx_add == 0) else 'random'), ':'.join((('%02X' % b) for b in adv_a))))
elif (pdu_type == SCAN_RSP):
adv_a = payload[:6][::(- 1)]
addrs = [{'BD_ADDR': adv_a, 'type': ('public' if (tx_add == 0) else 'random')}]
print('[{}]'.format(blue('SCAN_RSP')))
print('{} AdvA: {}'.format(('public' if (tx_add == 0) else 'random'), ':'.join((('%02X' % b) for b in adv_a))))
elif (pdu_type == CONNECT_IND):
init_a = payload[:6]
adv_a = payload[6:12]
print('init_a:', ':'.join((('%02x' % b) for b in init_a)))
print('adv_a:', ':'.join((('%02x' % b) for b in adv_a)))
addrs = [{'BD_ADDR': init_a, 'type': ('public' if (tx_add == 0) else 'random')}, {'BD_ADDR': adv_a, 'type': ('public' if (rx_add == 0) else 'random')}]
print('[{}]'.format(green('CONNECT_IND')))
print('{} InitA: {}'.format(('public' if (tx_add == 0) else 'random'), ':'.join((('%02X' % b) for b in init_a))))
print('{} AdvA: {}'.format(('public' if (rx_add == 0) else 'random'), ':'.join((('%02X' % b) for b in adv_a))))
else:
logger.warning("Unknown PDU type 0x%02x'%pdu_type")
return addrs |
def test_sac_trainer_splitting_rollouts():
sac = train_function(n_epochs=2, epoch_length=2, deterministic_eval=False, eval_repeats=1, distributed_env_cls=SequentialVectorEnv, split_rollouts_into_transitions=True)
assert isinstance(sac, SAC)
sac = train_function(n_epochs=2, epoch_length=2, deterministic_eval=False, eval_repeats=0, distributed_env_cls=SequentialVectorEnv, split_rollouts_into_transitions=True)
assert isinstance(sac, SAC) |
def get_exterior_code(codes_dependance: dict, pathfile: str, previous_file_name=None, classes: str=None, relative: bool=None, jitted_dicts: dict=None):
special = []
treated = []
for (func, dep) in codes_dependance.items():
if (not dep):
continue
module_ext = extast.parse(dep)
for node in module_ext.body:
if (not isinstance(node, (ast.ImportFrom, ast.Import))):
continue
(file_name, file_path) = find_path(node, pathfile)
if (file_name == 'transonic'):
(codes_dependance[func], jitted_dicts, spe, treat) = adapt_code_dependance(func, codes_dependance[func], jitted_dicts)
special = (special + spe)
treated = (treated + treat)
for (func, dep) in codes_dependance.items():
if (not dep):
continue
module_ext = extast.parse(dep)
for node in module_ext.body:
if (not isinstance(node, (ast.ImportFrom, ast.Import))):
continue
(file_name, file_path) = find_path(node, pathfile)
if (not (file_name and (file_name not in treated))):
continue
new_file_name = f'__ext__{func}__{file_name}'
try:
with open(str(file_path), 'r') as file:
content = file.read()
except:
raise NotImplementedError((file_name + ' can not be found'))
mod = extast.parse(content)
code_ext[classes][new_file_name] = str(filter_external_code(mod, node.names))
codes_dependance[func] = change_import_name(codes_dependance[func], node, func, relative)
if code_ext[classes][new_file_name]:
get_exterior_code({func: code_ext[classes][new_file_name]}, pathfile, new_file_name, classes)
if previous_file_name:
code_ext[classes][previous_file_name] = change_import_name(code_ext[classes][previous_file_name], node, func, relative)
return (codes_dependance, code_ext, jitted_dicts, special) |
class TabularEditor(BasicEditorFactory):
klass = Property()
show_titles = Bool(True)
show_row_titles = Bool(False)
update = Str()
refresh = Str()
auto_update = Bool(False)
selected = Str()
selected_row = Str()
selectable = Bool(True)
activated = Str()
activated_row = Str()
clicked = Str()
dclicked = Str()
right_clicked = Str()
right_dclicked = Str()
column_clicked = Str()
column_right_clicked = Str()
scroll_to_row = Str()
scroll_to_column = Str()
scroll_to_row_hint = Property(Str, observe='scroll_to_position_hint')
scroll_to_position_hint = Enum('visible', 'center', 'top', 'bottom')
editable = Bool(True)
editable_labels = Bool(False)
multi_select = Bool(False)
horizontal_lines = Bool(True)
vertical_lines = Bool(True)
auto_resize = Bool(False)
auto_resize_rows = Bool(False)
stretch_last_section = Bool(True)
adapter = Instance('traitsui.tabular_adapter.TabularAdapter', ())
operations = List(Enum('delete', 'insert', 'append', 'edit', 'move'), ['delete', 'insert', 'append', 'edit', 'move'])
drag_move = Bool(True)
images = List(Image)
def _get_klass(self):
return toolkit_object('tabular_editor:TabularEditor')
def _get_scroll_to_row_hint(self):
warnings.warn('Use of scroll_to_row_hint trait is deprecated. Use scroll_to_position_hint instead.', DeprecationWarning)
return self.scroll_to_position_hint
def _set_scroll_to_row_hint(self, hint):
warnings.warn('Use of scroll_to_row_hint trait is deprecated. Use scroll_to_position_hint instead.', DeprecationWarning)
self.scroll_to_position_hint = hint |
class TestGraphApiTraceLoggingService(TestCase):
def setUp(self) -> None:
self.logger = mock.create_autospec(logging.Logger)
self.mock_requests = mock.create_autospec(requests)
self.mock_msg_queue = mock.create_autospec(SimpleQueue)
self.mock_requests.exceptions = requests.exceptions
self.svc = GraphApiTraceLoggingService(access_token=TEST_ACCESS_TOKEN, endpoint_url=TEST_ENDPOINT_URL)
self.svc.logger = self.logger
self.svc.msg_queue = self.mock_msg_queue
def test_write_checkpoint_simple(self) -> None:
self.svc.write_checkpoint(run_id='run123', instance_id='instance456', checkpoint_name='foo', status=CheckpointStatus.STARTED)
self.mock_msg_queue.put.assert_called_once()
self.logger.debug.assert_called_once()
def test_post_request_timeout(self) -> None:
self.mock_requests.post.side_effect = requests.exceptions.Timeout()
with mock.patch('fbpcs.common.service.graphapi_trace_logging_service.requests', self.mock_requests):
self.svc._post_request(params={})
self.logger.info.assert_called_once()
self.assertIn('Timeout', self.logger.info.call_args_list[0][0][0])
def test_post_request_other_exception(self) -> None:
self.mock_requests.post.side_effect = Exception('Foobar')
with mock.patch('fbpcs.common.service.graphapi_trace_logging_service.requests', self.mock_requests):
self.svc._post_request(params={})
self.logger.info.assert_called_once()
self.assertIn('Foobar', self.logger.info.call_args_list[0][0][0])
def test_write_checkpoint_custom_data(self) -> None:
data = {'bar': 'baz', 'quux': 'quuz'}
self.svc.write_checkpoint(run_id='run123', instance_id='instance456', checkpoint_name='foo', status=CheckpointStatus.STARTED, checkpoint_data=data)
self.mock_msg_queue.put.assert_called_once()
self.logger.debug.assert_called_once()
def test_flush_msg_queue(self) -> None:
msg_lists = [{'instance_id': 'instance456', 'component': 'component1', 'checkpoint_name': 'foo1', 'checkpoint_data': 'data1'}, {'instance_id': 'instance456', 'component': 'component2', 'checkpoint_name': 'foo2', 'checkpoint_data': 'data2'}, {'instance_id': 'instance789', 'component': 'component3', 'checkpoint_name': 'foo3', 'checkpoint_data': 'data3'}]
self.mock_msg_queue.get.side_effect = msg_lists
with mock.patch('fbpcs.common.service.graphapi_trace_logging_service.requests', self.mock_requests):
self.svc._flush_msg_queue(msg_queue=self.mock_msg_queue, flush_size=len(msg_lists))
self.assertEqual(2, self.mock_requests.post.call_count)
self.assertEqual(2, self.logger.info.call_count)
self.mock_requests.post.assert_has_calls([call('localhost', params={'instance_id': 'instance456', 'component': 'component1\\001component2', 'checkpoint_name': 'foo1\\001foo2', 'checkpoint_data': 'data1\\001data2'}, timeout=3.05), call('localhost', params={'instance_id': 'instance789', 'component': 'component3', 'checkpoint_name': 'foo3', 'checkpoint_data': 'data3'}, timeout=3.05)], any_order=True) |
class SidewaysShooter():
def __init__(self):
pygame.init()
self.clock = pygame.time.Clock()
self.settings = Settings()
self.screen = pygame.display.set_mode((self.settings.screen_width, self.settings.screen_height))
pygame.display.set_caption('Sideways Shooter')
self.stats = GameStats(self)
self.ship = Ship(self)
self.bullets = pygame.sprite.Group()
self.aliens = pygame.sprite.Group()
self.game_active = True
def run_game(self):
while True:
self._check_events()
if self.game_active:
self._create_alien()
self.ship.update()
self._update_bullets()
self._update_aliens()
self._update_screen()
self.clock.tick(60)
def _check_events(self):
for event in pygame.event.get():
if (event.type == pygame.QUIT):
sys.exit()
elif (event.type == pygame.KEYDOWN):
self._check_keydown_events(event)
elif (event.type == pygame.KEYUP):
self._check_keyup_events(event)
def _check_keydown_events(self, event):
if (event.key == pygame.K_UP):
self.ship.moving_up = True
elif (event.key == pygame.K_DOWN):
self.ship.moving_down = True
elif (event.key == pygame.K_SPACE):
self._fire_bullet()
elif (event.key == pygame.K_q):
sys.exit()
def _check_keyup_events(self, event):
if (event.key == pygame.K_UP):
self.ship.moving_up = False
elif (event.key == pygame.K_DOWN):
self.ship.moving_down = False
def _fire_bullet(self):
if (len(self.bullets) < self.settings.bullets_allowed):
new_bullet = Bullet(self)
self.bullets.add(new_bullet)
def _update_bullets(self):
self.bullets.update()
for bullet in self.bullets.copy():
if (bullet.rect.left >= self.screen.get_rect().right):
self.bullets.remove(bullet)
self._check_bullet_alien_collisions()
def _check_bullet_alien_collisions(self):
collisions = pygame.sprite.groupcollide(self.bullets, self.aliens, True, True)
def _create_alien(self):
if (random() < self.settings.alien_frequency):
alien = Alien(self)
self.aliens.add(alien)
def _update_aliens(self):
self.aliens.update()
if pygame.sprite.spritecollideany(self.ship, self.aliens):
self._ship_hit()
self._check_aliens_left_edge()
def _check_aliens_left_edge(self):
for alien in self.aliens.sprites():
if (alien.rect.left < 0):
self._ship_hit()
break
def _ship_hit(self):
if (self.stats.ships_left > 0):
self.stats.ships_left -= 1
self.aliens.empty()
self.bullets.empty()
self.ship.center_ship()
else:
self.game_active = False
def _update_screen(self):
self.screen.fill(self.settings.bg_color)
self.ship.blitme()
for bullet in self.bullets.sprites():
bullet.draw_bullet()
self.aliens.draw(self.screen)
pygame.display.flip() |
def convert_inputs(model: Model, X_heads: Tuple[(Floats2d, Ints1d)], is_train: bool=False):
(X, H) = X_heads
Xt = xp2torch(X, requires_grad=is_train)
Ht = xp2torch(H)
def convert_from_torch_backward(d_inputs: ArgsKwargs) -> Tuple[(Floats2d, Ints1d)]:
dX = cast(Floats2d, torch2xp(d_inputs.args[0]))
return (dX, H)
output = ArgsKwargs(args=(Xt, Ht), kwargs={})
return (output, convert_from_torch_backward) |
def mask_is_allowed(m, i):
if (type(m) is list):
assert all([((type(x) is int) or (type(x) is np.int64)) for x in m]), 'Sparse mask must be a list of int or np.int64'
return (i in m)
elif ((type(m) is int) or np.isscalar(m)):
return (i == int(m))
return np.isclose(m[i], 0, atol=1e-08) |
def test_forwarding_methods(solc_tester):
foo = solc_tester.foo
assert (foo(12, 13) == foo['uint,uint'](12, 13))
assert (foo.call(12, 13) == foo['uint,uint'].call(12, 13))
assert (foo.transact(12, 13).return_value == foo['uint,uint'].transact(12, 13).return_value)
assert (foo.encode_input(12, 13) == foo['uint,uint'].encode_input(12, 13)) |
class OFPQueueStats(ofproto_parser.namedtuple('OFPQueueStats', ('port_no', 'queue_id', 'tx_bytes', 'tx_packets', 'tx_errors'))):
def parser(cls, buf, offset):
queue = struct.unpack_from(ofproto.OFP_QUEUE_STATS_PACK_STR, buf, offset)
stats = cls(*queue)
stats.length = ofproto.OFP_QUEUE_STATS_SIZE
return stats |
def make_stock_reconciliation(list_for_entry, date, cost_center):
doc = frappe.new_doc('Stock Reconciliation')
doc.purpose = 'Stock Reconciliation'
doc.set_posting_time = 1
doc.posting_date = date
doc.posting_time = '00:00:00'
doc.cost_center = cost_center
doc.set('items', [])
doc.difference_amount = 0.0
add_items_to_reconcile(doc, list_for_entry)
items = list(filter((lambda d: changed(d, doc)), doc.items))
if items:
doc.items = items
doc.insert() |
def get_dihedral_inds(coords3d, bond_inds, bend_inds, max_deg, logger=None):
max_rad = np.deg2rad(max_deg)
bond_dict = dict()
for (from_, to_) in bond_inds:
bond_dict.setdefault(from_, list()).append(to_)
bond_dict.setdefault(to_, list()).append(from_)
proper_dihedral_inds = list()
improper_candidates = list()
improper_dihedral_inds = list()
def log_dihed_skip(inds):
log(logger, f'Skipping generation of dihedral {inds} as some of the the atoms are (close too) linear.')
def set_dihedral_index(dihedral_ind, proper=True):
dihed = tuple(dihedral_ind)
check_in = (proper_dihedral_inds if proper else improper_dihedral_inds)
if ((dihed in check_in) or (dihed[::(- 1)] in check_in)):
return
if (not dihedral_valid(coords3d, dihedral_ind, deg_thresh=max_deg)):
log_dihed_skip(dihedral_ind)
return
if proper:
proper_dihedral_inds.append(dihed)
else:
improper_dihedral_inds.append(dihed)
for (bond, bend) in it.product(bond_inds, bend_inds):
central = bend[1]
bend_set = set(bend)
bond_set = set(bond)
intersect = (bend_set & bond_set)
if (len(intersect) != 1):
continue
if (central not in bond_set):
terminal = tuple((bond_set - intersect))[0]
intersecting_atom = tuple(intersect)[0]
bend_terminal = tuple(((bend_set - {central}) - intersect))[0]
bend_rad = Bend._calculate(coords3d, bend)
if (bend_rad >= max_rad):
bend_terminal_bonds = (set(bond_dict[bend_terminal]) - bend_set)
bond_terminal_bonds = (set(bond_dict[terminal]) - bond_set)
set_dihedrals = ([(terminal, intersecting_atom, bend_terminal, betb) for betb in bend_terminal_bonds] + [(bend_terminal, intersecting_atom, terminal, botb) for botb in bond_terminal_bonds])
if (not any([dihedral_valid(coords3d, inds, deg_thresh=max_deg) for inds in set_dihedrals])):
set_dihedrals = []
for betb in bend_terminal_bonds:
bend_terminal_bonds_v2 = ((set(bond_dict[betb]) - bend_set) - bond_set)
set_dihedrals = [(terminal, intersecting_atom, betb, betb_v2) for betb_v2 in bend_terminal_bonds_v2]
for botb in bond_terminal_bonds:
bond_terminal_bonds_v2 = ((set(bond_dict[botb]) - bend_set) - bond_set)
set_dihedrals = [(bend_terminal, intersecting_atom, botb, botb_v2) for botb_v2 in bond_terminal_bonds_v2]
elif (intersecting_atom == bend[0]):
set_dihedrals = [([terminal] + list(bend))]
else:
set_dihedrals = [(list(bend) + [terminal])]
[set_dihedral_index(dihed) for dihed in set_dihedrals]
else:
fourth_atom = list((bond_set - intersect))
dihedral_ind = (list(bend) + fourth_atom)
if dihedral_valid(coords3d, dihedral_ind, deg_thresh=max_deg):
improper_candidates.append(dihedral_ind)
else:
log_dihed_skip(dihedral_ind)
if ((len(coords3d) >= 4) and (len(proper_dihedral_inds) == 0)):
log(logger, 'Could not define any proper dihedrals! Generating improper dihedrals!')
for improp in improper_candidates:
set_dihedral_index(improp, proper=False)
log(logger, 'Permutational symmetry not considerd in generation of improper dihedrals.')
return (proper_dihedral_inds, improper_dihedral_inds) |
class OptionSeriesFunnel3dSonificationTracksMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
_op([AllocCursorA, ListA(OptionalA(NewExprA('buf_cursor'))), BoolA])
def bound_alloc(proc, buf_cursor, new_bounds, unsafe_disable_checks=False):
stmt = buf_cursor._impl
if (len(stmt._node.type.hi) != len(new_bounds)):
raise ValueError(f'buffer has {len(stmt._node.type.hi)} dimensions, but only {len(new_bounds)} bounds were supplied')
new_proc_c = scheduling.DoBoundAlloc(proc, stmt, new_bounds).result()
if (not unsafe_disable_checks):
CheckEffects(new_proc_c._node)
return new_proc_c |
class Module01(Digraph.Node):
depends_on = []
def __init__(self, config):
Digraph.Node.__init__(self, 'Module01')
def get_type_set(self):
return set([InputModuleTypes.reqdeps])
def set_modules(self, mods):
pass
def rewrite(self, reqset):
return False |
def _value_export_txt(run_path: Path, export_base_name: str, values: Mapping[(str, Mapping[(str, float)])]) -> None:
path = (run_path / f'{export_base_name}.txt')
_backup_if_existing(path)
if (len(values) == 0):
return
with path.open('w') as f:
for (key, param_map) in values.items():
for (param, value) in param_map.items():
print(f'{key}:{param} {value:g}', file=f) |
class Child(HasTraits):
name = Str('child')
age = Float(10.0)
property = Instance(tvtk.Property, (), record=True)
toy = Instance(Toy, record=True)
friends = List(Str)
def grow(self, x):
self.age += x
self.f(1)
def f(self, args):
return args
def not_recordable(self):
pass |
class OptionSeriesTimelineDataAccessibility(Options):
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
class ExpiredFilter(BooleanListFilter):
title = _('Expired')
parameter_name = 'expired'
def queryset(self, request, queryset):
if (self.value() == 'yes'):
return queryset.expired().filter(expired=True)
if (self.value() == 'no'):
return queryset.expired().filter(expired=False) |
class OptionSeriesSolidgaugeDatalabels(Options):
def align(self):
return self._config_get('center')
def align(self, text: str):
self._config(text, js_type=False)
def allowOverlap(self):
return self._config_get(False)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self) -> 'OptionSeriesSolidgaugeDatalabelsAnimation':
return self._config_sub_data('animation', OptionSeriesSolidgaugeDatalabelsAnimation)
def backgroundColor(self):
return self._config_get(None)
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def borderColor(self):
return self._config_get('#cccccc')
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(3)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(1)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def crop(self):
return self._config_get(False)
def crop(self, flag: bool):
self._config(flag, js_type=False)
def defer(self):
return self._config_get(False)
def defer(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def filter(self) -> 'OptionSeriesSolidgaugeDatalabelsFilter':
return self._config_sub_data('filter', OptionSeriesSolidgaugeDatalabelsFilter)
def format(self):
return self._config_get('point.value')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get(None)
def formatter(self, value: Any):
self._config(value, js_type=False)
def inside(self):
return self._config_get(None)
def inside(self, flag: bool):
self._config(flag, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, flag: bool):
self._config(flag, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def overflow(self):
return self._config_get('justify')
def overflow(self, text: str):
self._config(text, js_type=False)
def padding(self):
return self._config_get(5)
def padding(self, num: float):
self._config(num, js_type=False)
def position(self):
return self._config_get('center')
def position(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def shape(self):
return self._config_get('square')
def shape(self, text: str):
self._config(text, js_type=False)
def style(self):
return self._config_get(None)
def style(self, value: Any):
self._config(value, js_type=False)
def textPath(self) -> 'OptionSeriesSolidgaugeDatalabelsTextpath':
return self._config_sub_data('textPath', OptionSeriesSolidgaugeDatalabelsTextpath)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('top')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(0)
def y(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(2)
def zIndex(self, num: float):
self._config(num, js_type=False) |
.usefixtures('use_tmpdir')
def test_gui_iter_num(monkeypatch, qtbot):
config_file = Path('config.ert')
config_file.write_text('NUM_REALIZATIONS 1\n', encoding='utf-8')
args_mock = Mock()
args_mock.config = str(config_file)
def _assert_iter_in_args(panel):
assert (panel.getSimulationArguments().iter_num == 10)
args_mock = Mock()
args_mock.config = 'poly.ert'
type(args_mock).config = PropertyMock(return_value='config.ert')
monkeypatch.setattr(ert.gui.simulation.simulation_panel.SimulationPanel, 'runSimulation', _assert_iter_in_args)
gui = _setup_main_window(EnKFMain(ErtConfig.from_file(str(config_file))), args_mock, GUILogHandler())
qtbot.addWidget(gui)
sim_mode = get_child(gui, QWidget, name='Simulation_mode')
qtbot.keyClick(sim_mode, Qt.Key_Down)
sim_panel = get_child(gui, QWidget, name='Simulation_panel')
ensemble_panel = get_child(gui, QWidget, name='Ensemble_experiment_panel')
qtbot.keyClick(ensemble_panel._iter_field, Qt.Key_Backspace)
qtbot.keyClicks(ensemble_panel._iter_field, '10')
qtbot.keyClick(ensemble_panel._iter_field, Qt.Key_Enter)
start_simulation = get_child(gui, QWidget, name='start_simulation')
qtbot.mouseClick(start_simulation, Qt.LeftButton)
assert (sim_panel.getSimulationArguments().iter_num == 10) |
(context_settings=dict(ignore_unknown_options=True))
('arguments', nargs=(- 1), type=click.UNPROCESSED)
('--coverage/--no-coverage', default=False)
def test(coverage, arguments):
os.environ['COPRS_ENVIRON_UNITTEST'] = '1'
if (not (('COPR_CONFIG' in os.environ) and os.environ['COPR_CONFIG'])):
os.environ['COPR_CONFIG'] = '/etc/copr/copr_unit_test.conf'
if ('PYTHONPATH' in os.environ):
os.environ['PYTHONPATH'] = (os.environ['PYTHONPATH'] + ':.')
else:
os.environ['PYTHONPATH'] = '.'
additional_args = list(arguments)
if coverage:
additional_args.extend(['--cov-report', 'term-missing', '--cov', 'coprs'])
return subprocess.call((['/usr/bin/python3', '-m', 'pytest'] + additional_args)) |
def check_libusb():
logger.info('making sure libusb is installed')
try:
import usb.core
except ImportError:
logger.error('pyusb is not installed. Install script usually fails here on first attempt. Running it again should allow installation to complete.')
raise
try:
usb.core.find()
except usb.core.NoBackendError:
logger.error('libusb is probably not installed. Refer to the instructions in README.md to install it.')
raise
except Exception as ex:
logger.error('something is not right with either pyusb or libusb. Details: %s', ex)
raise |
class CloneTestCase(unittest.TestCase):
def test_any(self):
b = ClassWithAny()
f = Foo()
f.s = 'the f'
b.x = f
bc = b.clone_traits(traits='all', copy='deep')
self.assertNotEqual(id(bc.x), id(f), 'Foo x not cloned')
def test_instance(self):
b = ClassWithInstance()
f = Foo()
f.s = 'the f'
b.x = f
bc = b.clone_traits(traits='all', copy='deep')
self.assertNotEqual(id(bc.x), id(f), 'Foo x not cloned')
def test_class_attribute_missing(self):
s = 'class defined name'
c = ClassWithClassAttribute()
self.assertEqual(s, c.name)
c2 = ClassWithClassAttribute()
self.assertEqual(s, c.name)
self.assertEqual(s, c2.name)
s2 = 'name class attribute changed via clone'
c2.name = s2
self.assertEqual(s2, c2.name)
self.assertEqual(s, c.name)
def test_Any_circular_references(self):
bar = BarAny()
baz = BazAny()
bar.other = baz
baz.other = bar
bar_copy = bar.clone_traits()
self.assertIsNot(bar_copy, bar)
self.assertIs(bar_copy.other, baz)
self.assertIs(bar_copy.other.other, bar)
def test_Any_circular_references_deep(self):
bar = BarAny()
baz = BazAny()
bar.other = baz
baz.other = bar
bar_copy = bar.clone_traits(copy='deep')
self.assertIsNot(bar_copy, bar)
self.assertIsNot(bar_copy.other, baz)
self.assertIsNot(bar_copy.other.other, bar)
self.assertIs(bar_copy.other.other, bar_copy)
def test_Instance_circular_references(self):
ref = Foo(s='ref')
bar_unique = Foo(s='bar.foo')
shared = Foo(s='shared')
baz_unique = Foo(s='baz.unique')
baz = BazInstance()
baz.unique = baz_unique
baz.shared = shared
baz.ref = ref
bar = BarInstance()
bar.unique = bar_unique
bar.shared = shared
bar.ref = ref
bar.other = baz
baz.other = bar
baz_copy = baz.clone_traits()
self.assertIsNot(baz_copy, baz)
self.assertIsNot(baz_copy.other, bar)
self.assertIsNot(baz_copy.unique, baz.unique)
self.assertIsNot(baz_copy.shared, baz.shared)
self.assertIs(baz_copy.ref, ref)
bar_copy = baz_copy.other
self.assertIsNot(bar_copy.unique, bar.unique)
self.assertIs(bar_copy.ref, ref)
self.assertIsNot(bar_copy.other, baz_copy)
self.assertIs(bar_copy.other, baz)
self.assertIsNot(bar_copy.shared, baz.shared)
self.assertIs(bar_copy.shared, baz_copy.shared)
def test_Instance_circular_references_deep(self):
ref = Foo(s='ref')
bar_unique = Foo(s='bar.foo')
shared = Foo(s='shared')
baz_unique = Foo(s='baz.unique')
baz = BazInstance()
baz.unique = baz_unique
baz.shared = shared
baz.ref = ref
bar = BarInstance()
bar.unique = bar_unique
bar.shared = shared
bar.ref = ref
bar.other = baz
baz.other = bar
baz_copy = baz.clone_traits(copy='deep')
self.assertIsNot(baz_copy, baz)
self.assertIsNot(baz_copy.other, bar)
self.assertIsNot(baz_copy.unique, baz.unique)
self.assertIsNot(baz_copy.shared, baz.shared)
bar_copy = baz_copy.other
self.assertIsNot(bar_copy.unique, bar.unique)
self.assertIs(baz_copy.ref, bar_copy.ref)
self.assertIs(bar_copy.ref, ref)
self.assertIsNot(bar_copy.other, baz_copy)
self.assertIs(bar_copy.other, baz)
self.assertIsNot(bar_copy.shared, baz.shared)
self.assertIs(bar_copy.shared, baz_copy.shared) |
def extractArchiveofmemoriesWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def boto_service_definition_files():
botocore_data_dir = resource_filename(Requirement.parse('botocore'), 'botocore/data')
files = [os.path.join(dirname, file_in_dir) for (dirname, _, files_in_dir) in os.walk(botocore_data_dir) for file_in_dir in files_in_dir if fnmatch.fnmatch(file_in_dir, 'service-*.json.gz')]
return files |
class RowReferenceMixin():
def _allocate_(self):
if (not self._refrecord):
self._refrecord = self._refmeta.fetch(self)
if (not self._refrecord):
raise RuntimeError(('Using a recursive select but encountered a broken ' + ('reference: %s %r' % (self._table, self))))
def __getattr__(self, key: str) -> Any:
if (key == self._refmeta.pk):
return self._refmeta.caster(self)
if (key in self._refmeta.table):
self._allocate_()
if self._refrecord:
return self._refrecord.get(key, None)
return None
def get(self, key: str, default: Any=None) -> Any:
return self.__getattr__(key, default)
def __setattr__(self, key: str, value: Any):
if key.startswith('_'):
self._refmeta.caster.__setattr__(self, key, value)
return
self._allocate_()
self._refrecord[key] = value
def __getitem__(self, key):
if (key == self._refmeta.pk):
return self._refmeta.caster(self)
self._allocate_()
return self._refrecord.get(key, None)
def __setitem__(self, key, value):
self._allocate_()
self._refrecord[key] = value
def __pure__(self):
return self._refmeta.caster(self)
def __repr__(self) -> str:
return repr(self._refmeta.caster(self)) |
('cuda.gen_cutlass_ops')
def gen_ops(arch, cuda_version, allow_cutlass_sm90, force_cutlass_sm90):
import cutlass_lib
args = Args(arch)
if (cuda_version is not None):
args.cuda_version = cuda_version
manifest = cutlass_lib.manifest.Manifest(args)
if (arch == '90'):
if force_cutlass_sm90:
cutlass_lib.generator.GenerateSM90(manifest, args.cuda_version)
elif allow_cutlass_sm90:
cutlass_lib.generator.GenerateSM90(manifest, args.cuda_version)
cutlass_lib.generator.GenerateSM80(manifest, args.cuda_version)
cutlass_lib.extra_operation.GenerateSM80(manifest, args)
else:
cutlass_lib.generator.GenerateSM80(manifest, args.cuda_version)
cutlass_lib.extra_operation.GenerateSM80(manifest, args)
else:
try:
func = getattr(cutlass_lib.generator, ('GenerateSM' + arch))
func(manifest, args.cuda_version)
except AttributeError as e:
raise NotImplementedError((('Arch ' + arch) + ' is not supported by current cutlass lib.')) from e
try:
func = getattr(cutlass_lib.extra_operation, ('GenerateSM' + arch))
func(manifest, args)
except AttributeError:
_LOGGER.warning((('Arch ' + arch) + ' is not supported by extra ops.'))
return manifest.operations |
def parse_qualname(value):
if ((not value) or (not isinstance(value, str))):
return (None, None, None)
(location, sep, qualname) = value.rpartition(':')
if (not is_valid_qualname(qualname)):
return (None, None, None)
if (not sep):
return (qualname, None, None)
if _looks_like_module_name(location):
return (qualname, location, None)
return (qualname, None, location) |
class meter_config_stats_reply(stats_reply):
version = 5
type = 19
stats_type = 10
def __init__(self, xid=None, flags=None, entries=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (entries != None):
self.entries = entries
else:
self.entries = []
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.stats_type))
packed.append(struct.pack('!H', self.flags))
packed.append(('\x00' * 4))
packed.append(loxi.generic_util.pack_list(self.entries))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = meter_config_stats_reply()
_version = reader.read('!B')[0]
assert (_version == 5)
_type = reader.read('!B')[0]
assert (_type == 19)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_stats_type = reader.read('!H')[0]
assert (_stats_type == 10)
obj.flags = reader.read('!H')[0]
reader.skip(4)
obj.entries = loxi.generic_util.unpack_list(reader, ofp.common.meter_config.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.flags != other.flags):
return False
if (self.entries != other.entries):
return False
return True
def pretty_print(self, q):
q.text('meter_config_stats_reply {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPSF_REPLY_MORE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('entries = ')
q.pp(self.entries)
q.breakable()
q.text('}') |
def write_signature_clusters_vcf(working_dir, clusters, version):
(deletion_signature_clusters, insertion_signature_clusters, inversion_signature_clusters, tandem_duplication_signature_clusters, insertion_from_signature_clusters, translocation_signature_clusters) = clusters
if (not os.path.exists((working_dir + '/signatures'))):
os.mkdir((working_dir + '/signatures'))
vcf_output = open((working_dir + '/signatures/all.vcf'), 'w')
print('##fileformat=VCFv4.3', file=vcf_output)
print('##source=SVIMV{0}'.format(version), file=vcf_output)
print('##ALT=<ID=DEL,Description="Deletion">', file=vcf_output)
print('##ALT=<ID=INV,Description="Inversion">', file=vcf_output)
print('##ALT=<ID=DUP,Description="Duplication">', file=vcf_output)
print('##ALT=<ID=DUP:TANDEM,Description="Tandem Duplication">', file=vcf_output)
print('##ALT=<ID=INS,Description="Insertion">', file=vcf_output)
print('##INFO=<ID=END,Number=1,Type=Integer,Description="End position of the variant described in this record">', file=vcf_output)
print('##INFO=<ID=SVTYPE,Number=1,Type=String,Description="Type of structural variant">', file=vcf_output)
print('##INFO=<ID=SVLEN,Number=.,Type=Integer,Description="Difference in length between REF and ALT alleles">', file=vcf_output)
print('#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO', file=vcf_output)
vcf_entries = []
for cluster in deletion_signature_clusters:
vcf_entries.append((cluster.get_source(), cluster.get_vcf_entry()))
for cluster in insertion_signature_clusters:
vcf_entries.append((cluster.get_source(), cluster.get_vcf_entry()))
for cluster in inversion_signature_clusters:
vcf_entries.append((cluster.get_source(), cluster.get_vcf_entry()))
for cluster in tandem_duplication_signature_clusters:
vcf_entries.append((cluster.get_source(), cluster.get_vcf_entry()))
for (source, entry) in sorted(vcf_entries, key=(lambda pair: pair[0])):
print(entry, file=vcf_output)
vcf_output.close() |
def MatrixMultiply(left, right):
result = rl.ffi.new('struct Matrix *')
result.m0 = ((((left.m0 * right.m0) + (left.m1 * right.m4)) + (left.m2 * right.m8)) + (left.m3 * right.m12))
result.m1 = ((((left.m0 * right.m1) + (left.m1 * right.m5)) + (left.m2 * right.m9)) + (left.m3 * right.m13))
result.m2 = ((((left.m0 * right.m2) + (left.m1 * right.m6)) + (left.m2 * right.m10)) + (left.m3 * right.m14))
result.m3 = ((((left.m0 * right.m3) + (left.m1 * right.m7)) + (left.m2 * right.m11)) + (left.m3 * right.m15))
result.m4 = ((((left.m4 * right.m0) + (left.m5 * right.m4)) + (left.m6 * right.m8)) + (left.m7 * right.m12))
result.m5 = ((((left.m4 * right.m1) + (left.m5 * right.m5)) + (left.m6 * right.m9)) + (left.m7 * right.m13))
result.m6 = ((((left.m4 * right.m2) + (left.m5 * right.m6)) + (left.m6 * right.m10)) + (left.m7 * right.m14))
result.m7 = ((((left.m4 * right.m3) + (left.m5 * right.m7)) + (left.m6 * right.m11)) + (left.m7 * right.m15))
result.m8 = ((((left.m8 * right.m0) + (left.m9 * right.m4)) + (left.m10 * right.m8)) + (left.m11 * right.m12))
result.m9 = ((((left.m8 * right.m1) + (left.m9 * right.m5)) + (left.m10 * right.m9)) + (left.m11 * right.m13))
result.m10 = ((((left.m8 * right.m2) + (left.m9 * right.m6)) + (left.m10 * right.m10)) + (left.m11 * right.m14))
result.m11 = ((((left.m8 * right.m3) + (left.m9 * right.m7)) + (left.m10 * right.m11)) + (left.m11 * right.m15))
result.m12 = ((((left.m12 * right.m0) + (left.m13 * right.m4)) + (left.m14 * right.m8)) + (left.m15 * right.m12))
result.m13 = ((((left.m12 * right.m1) + (left.m13 * right.m5)) + (left.m14 * right.m9)) + (left.m15 * right.m13))
result.m14 = ((((left.m12 * right.m2) + (left.m13 * right.m6)) + (left.m14 * right.m10)) + (left.m15 * right.m14))
result.m15 = ((((left.m12 * right.m3) + (left.m13 * right.m7)) + (left.m14 * right.m11)) + (left.m15 * right.m15))
return result |
def add_in_place(n: Node):
if (n.kind == Kind.NO_INPUT):
data_sources.nodes.append(n)
elif (n.kind == Kind.DATA_MANIPULATION):
data_manipulation.nodes.append(n)
elif (n.kind == Kind.DATA_SAMPLING):
data_sampling.nodes.append(n)
elif (n.kind == Kind.DATA_TRANSFORMATION):
data_transformation.nodes.append(n)
elif (n.kind == Kind.INCREMENTAL_LEARNING):
if (n.kind == SubKind.CLASSIFICATION):
classification.nodes.append(n)
elif (n.kind == SubKind.CLUSTERING):
clustering.nodes.append(n)
elif (n.kind == SubKind.REGRESSION):
regression.nodes.append(n)
elif (n.kind == Kind.MACHINE_LEARNING):
if (n.kind == SubKind.TOP):
top.nodes.append(n)
elif (n.kind == SubKind.CLASSIC):
classic.nodes.append(n)
elif (n.kind == Kind.MODEL_EVALUATION):
model_evaluation.nodes.append(n)
else:
print('create your own!') |
def map_erpnext_variant_to_shopify_variant(shopify_product: Product, erpnext_item, variant_attributes):
variant_product_id = frappe.db.get_value('Ecommerce Item', {'erpnext_item_code': erpnext_item.name, 'integration': MODULE_NAME}, 'integration_item_code')
if (not variant_product_id):
for variant in shopify_product.variants:
if ((variant.option1 == variant_attributes.get('option1')) and (variant.option2 == variant_attributes.get('option2')) and (variant.option3 == variant_attributes.get('option3'))):
variant_product_id = str(variant.id)
if (not frappe.flags.in_test):
frappe.get_doc({'doctype': 'Ecommerce Item', 'erpnext_item_code': erpnext_item.name, 'integration': MODULE_NAME, 'integration_item_code': str(shopify_product.id), 'variant_id': variant_product_id, 'sku': str(variant.sku), 'variant_of': erpnext_item.variant_of}).insert()
break
if (not variant_product_id):
msgprint(_("Shopify: Couldn't sync item variant."))
return variant_product_id |
class Window():
UP = (- 1)
DOWN = 1
def __init__(self, stdscr, pages, titles):
self.top = 0
self.position = self.top
self.pages = pages
self.page_titles = titles
self.cur_page = 0
self.window = stdscr
self.height = (curses.LINES - 1)
self.width = (curses.COLS - 1)
self.bottom = len(self.cur_page.items)
curses.init_pair(1, curses.COLOR_BLACK, curses.COLOR_WHITE)
def cur_page(self):
return self._cur_page
_page.setter
def cur_page(self, pos):
page = namedtuple('Page', 'pos items title')
self._cur_page = page(pos=pos, items=self.pages[pos], title=self.page_titles[pos])
return self._cur_page
def status_bar_render(self):
statusbarstr = f"{self.cur_page.title} | Navigate with arrows or wasd | Press 'q' to exit"
self.window.attron(curses.color_pair(1))
self.window.addstr(self.height, 0, statusbarstr)
self.window.addstr(self.height, len(statusbarstr), (' ' * (self.width - len(statusbarstr))))
self.window.attroff(curses.color_pair(1))
def run(self):
self.display()
self.user_input()
def display(self):
self.window.erase()
for (idx, item) in enumerate(self.cur_page.items[self.position:((self.position + self.height) - 1)]):
self.window.addstr(idx, 0, item)
self.status_bar_render()
self.window.refresh()
def scroll_up(self, user_select):
if ((user_select in [curses.KEY_UP, ord('w'), ord('k')]) and (self.position != self.top)):
self.position += self.UP
elif ((user_select == ord('H')) and (self.position != self.top)):
self.position = self.top
elif ((user_select == curses.KEY_PPAGE) and (self.position != self.top)):
if ((self.position - self.height) > self.top):
self.position = (self.position - self.height)
else:
self.position = self.top
def scroll_down(self, user_select):
if ((user_select in [curses.KEY_DOWN, ord('s'), ord('j')]) and ((self.position + self.height) <= self.bottom)):
self.position += self.DOWN
elif ((user_select == ord('G')) and ((self.position + self.height) != self.bottom)):
self.position = ((self.bottom - self.height) + 1)
elif (user_select == curses.KEY_NPAGE):
if ((self.position + self.height) < (self.bottom - self.height)):
self.position = (self.position + self.height)
else:
self.position = ((self.bottom - self.height) + 1)
def scroll_right(self, user_select):
if (len(self.pages) != (self.cur_page.pos + 1)):
self.cur_page = (self.cur_page.pos + 1)
self.bottom = len(self.cur_page.items)
def scroll_left(self, user_select):
if (self.cur_page.pos != 0):
self.cur_page = (self.cur_page.pos - 1)
self.bottom = len(self.cur_page.items)
def user_input(self):
user_select = self.window.getch()
while (user_select != ord('q')):
self.status_bar_render()
if (self.bottom > self.height):
if (user_select in UP_KEYS):
self.scroll_up(user_select)
elif (user_select in DOWN_KEYS):
self.scroll_down(user_select)
if (user_select in RIGHT_KEYS):
self.scroll_right(user_select)
elif (user_select in LEFT_KEYS):
self.scroll_left(user_select)
self.display()
user_select = self.window.getch() |
class OptionSeriesBubbleSonificationDefaultinstrumentoptionsMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
class FaucetTaggedVLANPCPTest(FaucetTaggedTest):
CONFIG_GLOBAL = '\nvlans:\n 100:\n description: "tagged"\nacls:\n 1:\n - rule:\n vlan_vid: 100\n vlan_pcp: 1\n actions:\n output:\n set_fields:\n - vlan_pcp: 2\n allow: 1\n - rule:\n actions:\n allow: 1\n'
CONFIG = '\n interfaces:\n %(port_1)d:\n tagged_vlans: [100]\n acl_in: 1\n %(port_2)d:\n tagged_vlans: [100]\n %(port_3)d:\n tagged_vlans: [100]\n %(port_4)d:\n tagged_vlans: [100]\n'
def test_tagged(self):
(first_host, second_host) = self.hosts_name_ordered()[:2]
first_host.run_ip_batch([('link set %s type vlan egress %u:1' % (first_host.defaultIntf(), i)) for i in range(0, 8)])
self.one_ipv4_ping(first_host, second_host.IP())
self.wait_nonzero_packet_count_flow({'vlan_vid': 100, 'vlan_pcp': 1}, table_id=self._PORT_ACL_TABLE)
tcpdump_filter = ('ether dst %s' % second_host.MAC())
tcpdump_txt = self.tcpdump_helper(second_host, tcpdump_filter, [(lambda : first_host.cmd(('ping -c3 %s' % second_host.IP())))], root_intf=True, packets=1)
self.assertTrue(re.search('vlan 100, p 2,', tcpdump_txt)) |
def load_csv(fstr, resample=None):
df = pd.read_csv(fstr)
def f(x):
return re.sub('/', '-', x)
timestr = df['timestmp'].map(f)
icp_timestmp = pd.to_datetime(timestr)
icp_data = df['p'].values
ts = pd.Series(icp_data, index=icp_timestmp)
if (resample is not None):
ts = ts.resample(resample).mean()
return ts |
class OptionPlotoptionsErrorbarSonificationContexttracksMapping(Options):
def frequency(self) -> 'OptionPlotoptionsErrorbarSonificationContexttracksMappingFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsErrorbarSonificationContexttracksMappingFrequency)
def gapBetweenNotes(self) -> 'OptionPlotoptionsErrorbarSonificationContexttracksMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionPlotoptionsErrorbarSonificationContexttracksMappingGapbetweennotes)
def highpass(self) -> 'OptionPlotoptionsErrorbarSonificationContexttracksMappingHighpass':
return self._config_sub_data('highpass', OptionPlotoptionsErrorbarSonificationContexttracksMappingHighpass)
def lowpass(self) -> 'OptionPlotoptionsErrorbarSonificationContexttracksMappingLowpass':
return self._config_sub_data('lowpass', OptionPlotoptionsErrorbarSonificationContexttracksMappingLowpass)
def noteDuration(self) -> 'OptionPlotoptionsErrorbarSonificationContexttracksMappingNoteduration':
return self._config_sub_data('noteDuration', OptionPlotoptionsErrorbarSonificationContexttracksMappingNoteduration)
def pan(self) -> 'OptionPlotoptionsErrorbarSonificationContexttracksMappingPan':
return self._config_sub_data('pan', OptionPlotoptionsErrorbarSonificationContexttracksMappingPan)
def pitch(self) -> 'OptionPlotoptionsErrorbarSonificationContexttracksMappingPitch':
return self._config_sub_data('pitch', OptionPlotoptionsErrorbarSonificationContexttracksMappingPitch)
def playDelay(self) -> 'OptionPlotoptionsErrorbarSonificationContexttracksMappingPlaydelay':
return self._config_sub_data('playDelay', OptionPlotoptionsErrorbarSonificationContexttracksMappingPlaydelay)
def rate(self) -> 'OptionPlotoptionsErrorbarSonificationContexttracksMappingRate':
return self._config_sub_data('rate', OptionPlotoptionsErrorbarSonificationContexttracksMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionPlotoptionsErrorbarSonificationContexttracksMappingTime':
return self._config_sub_data('time', OptionPlotoptionsErrorbarSonificationContexttracksMappingTime)
def tremolo(self) -> 'OptionPlotoptionsErrorbarSonificationContexttracksMappingTremolo':
return self._config_sub_data('tremolo', OptionPlotoptionsErrorbarSonificationContexttracksMappingTremolo)
def volume(self) -> 'OptionPlotoptionsErrorbarSonificationContexttracksMappingVolume':
return self._config_sub_data('volume', OptionPlotoptionsErrorbarSonificationContexttracksMappingVolume) |
def decode_snooz_v2(decompressed, last_timestamp_ms):
first_timestamp_ms = (last_timestamp_ms + )
offset = 0
while (offset < len(decompressed)):
(length, packet_length, delta_time_ms, snooz_type) = struct.unpack_from('=HHIb', decompressed, offset)
offset += ((9 + length) - 1)
first_timestamp_ms -= delta_time_ms
offset = 0
while (offset < len(decompressed)):
(length, packet_length, delta_time_ms, snooz_type) = struct.unpack_from('=HHIb', decompressed, offset)
first_timestamp_ms += delta_time_ms
offset += 9
sys.stdout.buffer.write(struct.pack('>II', packet_length, length))
sys.stdout.buffer.write(struct.pack('>II', type_to_direction(snooz_type), 0))
sys.stdout.buffer.write(struct.pack('>II', (first_timestamp_ms >> 32), (first_timestamp_ms & )))
sys.stdout.buffer.write(type_to_hci(snooz_type))
sys.stdout.buffer.write(decompressed[offset:((offset + length) - 1)])
offset += (length - 1) |
()
_and_sanitize_search_inputs
def get_healthcare_service_units(doctype, txt, searchfield, start, page_len, filters):
table = frappe.qb.DocType('Healthcare Service Unit')
query = frappe.qb.from_(table).where((table.is_group == 0)).where((table.company == filters.get('company'))).where(table.name.like('%{0}%'.format(txt))).select('name').get_sql()
if (filters and filters.get('inpatient_record')):
from healthcare.healthcare.doctype.inpatient_medication_entry.inpatient_medication_entry import get_current_healthcare_service_unit
service_unit = get_current_healthcare_service_unit(filters.get('inpatient_record'))
if service_unit:
query += ' and (allow_appointments = 1 or name = {service_unit})'.format(service_unit=frappe.db.escape(service_unit))
else:
query += ' and allow_appointments = 1'
else:
query += ' and allow_appointments = 1'
return frappe.db.sql(query, filters) |
class SettingForm(forms.ModelForm):
class Meta():
model = Setting
fields = '__all__'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if ('description' in self.fields):
self.fields['description'].widget = forms.Textarea(attrs={'rows': 3, 'cols': 51})
if ('value_text' in self.fields):
self.fields['value_text'].widget = forms.Textarea(attrs={'rows': 5, 'cols': 51})
def clean_name(self):
value = self.cleaned_data.get('name', '')
if settings.EXTRA_SETTINGS_ENFORCE_UPPERCASE_SETTINGS:
value = enforce_uppercase_setting(value)
if hasattr(settings, value):
raise forms.ValidationError(f'Invalid setting name, settings.{value} already defined in django.conf.settings.')
return value |
class OptionSeriesArcdiagramNodesDatalabelsFilter(Options):
def operator(self):
return self._config_get(None)
def operator(self, value: Any):
self._config(value, js_type=False)
def property(self):
return self._config_get(None)
def property(self, text: str):
self._config(text, js_type=False) |
class ChrootsValidator(object):
def __call__(self, form, field):
if (not field.data):
return
selected = set(field.data.split())
enabled = set(MockChrootsLogic.active_names())
if (selected - enabled):
raise wtforms.ValidationError('Such chroot is not available: {}'.format(', '.join((selected - enabled)))) |
class UnsupportedMorxLookupTest(unittest.TestCase):
def __init__(self, methodName):
unittest.TestCase.__init__(self, methodName)
if (not hasattr(self, 'assertRaisesRegex')):
self.assertRaisesRegex = self.assertRaisesRegexp
def test_unsupportedLookupType(self):
data = bytesjoin([MORX_NONCONTEXTUAL_DATA[:67], bytechr(66), MORX_NONCONTEXTUAL_DATA[69:]])
with self.assertRaisesRegex(AssertionError, "unsupported 'morx' lookup type 66"):
morx = newTable('morx')
morx.decompile(data, FakeFont(['.notdef'])) |
def ensure_charge_in_range(sample: Callable[([Union[(ArrayLike[float], SpatialDataArray)], Union[(ArrayLike[float], SpatialDataArray)]], Union[(ArrayLike[float], ArrayLike[Complex], SpatialDataArray)])]) -> Callable[([Union[(ArrayLike[float], SpatialDataArray)], Union[(ArrayLike[float], SpatialDataArray)]], Union[(ArrayLike[float], ArrayLike[Complex], SpatialDataArray)])]:
(sample)
def _sample(self, electron_density: Union[(ArrayLike[float], SpatialDataArray)], hole_density: Union[(ArrayLike[float], SpatialDataArray)]) -> Union[(ArrayLike[float], ArrayLike[Complex], SpatialDataArray)]:
if np.iscomplexobj(electron_density):
raise ValueError("Cannot pass complex 'electron_density' to 'sample()'")
if np.iscomplexobj(hole_density):
raise ValueError("Cannot pass complex 'hole_density' to 'sample()'")
(e_min, e_max) = self.electron_range
electron_numpy = np.array(electron_density)
if (np.any((electron_numpy < e_min)) or np.any((electron_numpy > e_max))):
log.warning(f"Electron density values passed to 'ChargePerturbation.sample()'is outside of 'ChargePerturbation.electron_range' = {self.electron_range}")
(h_min, h_max) = self.hole_range
hole_numpy = np.array(hole_density)
if (np.any((hole_numpy < h_min)) or np.any((hole_numpy > h_max))):
log.warning(f"Hole density values passed to 'ChargePerturbation.sample()'is outside of 'ChargePerturbation.hole_range' = {self.hole_range}")
return sample(self, electron_density, hole_density)
return _sample |
class ChatHistoryManager():
def __init__(self, chat_ctx: ChatContext, prompt_template: PromptTemplate, history_storage: BaseChatHistoryMemory, chat_retention_rounds: Optional[int]=0) -> None:
self._chat_ctx = chat_ctx
self.chat_retention_rounds = chat_retention_rounds
self.current_message: OnceConversation = OnceConversation(chat_ctx.chat_scene.value())
self.prompt_template = prompt_template
self.history_storage: BaseChatHistoryMemory = history_storage
self.history_message: List[OnceConversation] = history_storage.messages()
self.current_message.model_name = chat_ctx.model_name
if chat_ctx.select_param:
if (len(chat_ctx.chat_scene.param_types()) > 0):
self.current_message.param_type = chat_ctx.chat_scene.param_types()[0]
self.current_message.param_value = chat_ctx.select_param
def _new_chat(self, input_values: Dict) -> List[ModelMessage]:
self.current_message.chat_order = (len(self.history_message) + 1)
self.current_message.add_user_message(self._chat_ctx.current_user_input, check_duplicate_type=True)
self.current_message.start_date = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.current_message.tokens = 0
if self.prompt_template.template:
current_prompt = self.prompt_template.format(**input_values)
self.current_message.add_system_message(current_prompt)
return self._generate_llm_messages()
def _generate_llm_messages(self) -> List[ModelMessage]:
from dbgpt.app.scene.base_chat import _load_system_message, _load_example_messages, _load_history_messages, _load_user_message
messages = []
if self.prompt_template.template_define:
messages.append(ModelMessage(role=ModelMessageRoleType.SYSTEM, content=self.prompt_template.template_define))
messages += _load_system_message(self.current_message, self.prompt_template, str_message=False)
messages += _load_example_messages(self.prompt_template, str_message=False)
messages += _load_history_messages(self.prompt_template, self.history_message, self.chat_retention_rounds, str_message=False)
messages += _load_user_message(self.current_message, self.prompt_template, str_message=False)
return messages |
class TestNull(util.ColorAsserts, unittest.TestCase):
def test_null_input(self):
c = Color('hsi', [NaN, 0.5, 1], 1)
self.assertTrue(c.is_nan('hue'))
def test_none_input(self):
c = Color('color(--hsi none 0% 75% / 1)')
self.assertTrue(c.is_nan('hue'))
def test_null_normalization_min_sat(self):
c = Color('color(--hsi 270 0% 75% / 1)').normalize()
self.assertTrue(c.is_nan('hue'))
def test_null_normalization_min_intensity(self):
c = Color('color(--hsi 270 20% 0% / 1)').normalize()
self.assertTrue(c.is_nan('hue'))
def test_corner_case_null(self):
c = Color('color(srgb -2 0 2)').convert('hsl')
self.assertTrue(c.is_nan('hue')) |
def test_bucket_sort_agg():
bucket_sort_agg = aggs.BucketSort(sort=[{'total_sales': {'order': 'desc'}}], size=3)
assert (bucket_sort_agg.to_dict() == {'bucket_sort': {'sort': [{'total_sales': {'order': 'desc'}}], 'size': 3}})
a = aggs.DateHistogram(field='date', interval='month')
a.bucket('total_sales', 'sum', field='price')
a.bucket('sales_bucket_sort', 'bucket_sort', sort=[{'total_sales': {'order': 'desc'}}], size=3)
assert ({'date_histogram': {'field': 'date', 'interval': 'month'}, 'aggs': {'total_sales': {'sum': {'field': 'price'}}, 'sales_bucket_sort': {'bucket_sort': {'sort': [{'total_sales': {'order': 'desc'}}], 'size': 3}}}} == a.to_dict()) |
.parametrize('iin, jin, xcor, ycor, xinc, yinc, ncol, nrow, yflip, rota, exp_xori, exp_yori', [(0, 0, 0.0, 0.0, 50.0, 50.0, 2, 2, 1, 0.0, 0.0, 0.0), (0, 0, 0.0, 0.0, 50.0, 50.0, 2, 2, 1, 90.0, 0.0, 0.0), (0, 0, 100.0, 300.0, 50.0, 50.0, 2, 2, 1, 0.0, 100.0, 300.0), (1, 1, 100.0, 300.0, 50.0, 50.0, 2, 2, 1, 0.0, 50.0, 250.0), (1, 1, 100.0, 300.0, 50.0, 50.0, 2, 2, 1, 360.0, 50.0, 250.0), (1, 1, 100.0, 300.0, 50.0, 50.0, 2, 2, 1, 90.0, 150.0, 250.0), (1, 1, 100.0, 300.0, 50.0, 50.0, 2, 2, 1, 180.0, 150.0, 350.0), (2, 2, 100.0, 300.0, 50.0, 50.0, 3, 3, 1, 0.0, 0.0, 200.0), (2, 2, 100.0, 300.0, 50.0, 100.0, 3, 3, 1, 0.0, 0.0, 100.0), (2, 2, 100.0, 300.0, 50.0, 100.0, 3, 3, 1, 360.0, 0.0, 100.0), (2, 2, 100.0, 300.0, 50.0, 100.0, 3, 3, 1, 720.0, 0.0, 100.0), (0, 2, 0.0, 0.0, 50.0, 50.0, 3, 3, 1, 45.0, 70.7107, (- 70.7107)), (0, 2, 0.0, 0.0, 50.0, 50.0, 3, 3, 1, 225.0, (- 70.7107), 70.7107), (2, 2, 0.0, 0.0, 100.0, 50.0, 3, 3, 1, 30.0, (- 123.2051), (- 186.6025)), (2, 0, 0.0, 0.0, 50.0, 50.0, 3, 3, 1, 45.0, (- 70.7107), (- 70.7107))])
def test_xyori_from_ij(iin, jin, xcor, ycor, xinc, yinc, ncol, nrow, yflip, rota, exp_xori, exp_yori):
(xori, yori) = xcalc.xyori_from_ij(iin, jin, xcor, ycor, xinc, yinc, ncol, nrow, yflip, rota)
assert (xori == pytest.approx(exp_xori, rel=0.001))
assert (yori == pytest.approx(exp_yori, rel=0.001)) |
def test_paramters_with_custom_init():
class Point(Record, include_metadata=False):
x: int
y: int
def __init__(self, x, y, **kwargs):
self.x = x
self.y = y
p = Point(30, 10)
assert (p.x == 30)
assert (p.y == 10)
payload = p.dumps(serializer='json')
assert (payload == b'{"x":30,"y":10}')
data = json.loads(payload)
p2 = Point.from_data(data)
assert (p2.x == 30)
assert (p2.y == 10) |
def test_error_handler_after_processor_error(app, client):
app.testing = False
_request
def before_request():
if (_trigger == 'before'):
(1 // 0)
_request
def after_request(response):
if (_trigger == 'after'):
(1 // 0)
return response
('/')
def index():
return 'Foo'
(500)
def internal_server_error(e):
return ('Hello Server Error', 500)
for _trigger in ('before', 'after'):
rv = client.get('/')
assert (rv.status_code == 500)
assert (rv.data == b'Hello Server Error') |
class TargetingGeoLocationLocationExpansion(AbstractObject):
def __init__(self, api=None):
super(TargetingGeoLocationLocationExpansion, self).__init__()
self._isTargetingGeoLocationLocationExpansion = True
self._api = api
class Field(AbstractObject.Field):
allowed = 'allowed'
_field_types = {'allowed': 'bool'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
def deform_to_native(native_mesh, dest_mesh, dscalars, expected_labels, subject_id, sphere='sphere', scale=2.5):
resample_surfs_and_add_to_spec(subject_id, native_mesh, dest_mesh, current_sphere=sphere)
make_inflated_surfaces(subject_id, dest_mesh, iterations_scale=scale)
resample_metric_and_label(subject_id, dscalars, expected_labels, native_mesh, dest_mesh, sphere)
make_dense_map(subject_id, dest_mesh, dscalars, expected_labels) |
_converter(acc_ops.permute)
def acc_ops_permute(target: Target, args: Tuple[(Argument, ...)], kwargs: Dict[(str, Argument)], name: str) -> ConverterOutput:
input_val = kwargs['input']
if (not isinstance(input_val, AITTensor)):
raise ValueError(f'Unexpected input for {name}: {input_val}')
permutation = kwargs['permutation']
return permute()(input_val, permutation) |
def scatter_optimizer_state_dict(optimizer, optim_state_dict, model: FSDPWrapper):
if (model.load_state_dict_type == StateDictType.FULL_STATE_DICT):
optim_state_dict = FSDP.shard_full_optim_state_dict(optim_state_dict, model, optim=optimizer)
elif (model.load_state_dict_type == StateDictType.SHARDED_STATE_DICT):
optim_state_dict = FSDP.flatten_sharded_optim_state_dict(optim_state_dict, model, optim=optimizer)
optimizer.load_state_dict(optim_state_dict) |
def generate_password_based_auth_token(asset_name, fledge_url):
conn =
conn.request('POST', '/fledge/login', json.dumps({'username': 'user', 'password': 'fledge'}))
r = conn.getresponse()
assert (200 == r.status)
r = r.read().decode()
jdoc = json.loads(r)
assert (LOGIN_SUCCESS_MSG == jdoc['message'])
assert (not jdoc['admin'])
global PASSWORD_TOKEN
PASSWORD_TOKEN = jdoc['token'] |
class CRawEye(ctypes.Structure):
_fields_ = [('eyeballPosX', ctypes.c_double), ('eyeballPosY', ctypes.c_double), ('eyeballPosZ', ctypes.c_double), ('eyeballConfidence', ctypes.c_double), ('gazeVectorX', ctypes.c_double), ('gazeVectorY', ctypes.c_double), ('gazeVectorZ', ctypes.c_double), ('gazeVectorConfidence', ctypes.c_double), ('pupilDiameter', ctypes.c_double), ('gazePositionX', ctypes.c_double), ('gazePositionY', ctypes.c_double), ('gazePositionConfidence', ctypes.c_double)] |
def test_wf_docstring():
model_wf = get_serializable(OrderedDict(), serialization_settings, my_wf_example)
assert (len(model_wf.template.interface.outputs) == 2)
assert (model_wf.template.interface.outputs['o0'].description == 'outputs')
assert (model_wf.template.interface.outputs['o1'].description == 'outputs')
assert (len(model_wf.template.interface.inputs) == 1)
assert (model_wf.template.interface.inputs['a'].description == 'input a') |
def _check_if_user_can_edit_copr(ownername, projectname):
copr = get_copr(ownername, projectname)
if (not flask.g.user.can_edit(copr)):
raise AccessRestricted("User '{0}' can not see permissions for project '{1}' (missing admin rights)".format(flask.g.user.name, '/'.join([ownername, projectname])))
return copr |
class CustomFontEditor(Editor):
def init(self, parent):
self.control = panel = TraitsUIPanel(parent, (- 1))
sizer = wx.BoxSizer(wx.VERTICAL)
sizer2 = wx.BoxSizer(wx.HORIZONTAL)
facenames = self.factory.all_facenames()
control = self._facename = wx.Choice(panel, (- 1), wx.Point(0, 0), wx.Size((- 1), (- 1)), facenames)
sizer2.Add(control, 4, wx.EXPAND)
panel.Bind(wx.EVT_CHOICE, self.update_object_parts, id=control.GetId())
control = self._point_size = wx.Choice(panel, (- 1), wx.Point(0, 0), wx.Size((- 1), (- 1)), PointSizes)
sizer2.Add(control, 1, (wx.EXPAND | wx.LEFT), 3)
panel.Bind(wx.EVT_CHOICE, self.update_object_parts, id=control.GetId())
if self.factory.show_style:
self._style = wx.Choice(panel, (- 1), wx.Point(0, 0), wx.Size((- 1), (- 1)), Styles)
sizer2.Add(self._style, 1, (wx.EXPAND | wx.LEFT), 3)
panel.Bind(wx.EVT_CHOICE, self.update_object_parts, id=self._style.GetId())
if self.factory.show_weight:
self._weight = wx.Choice(panel, (- 1), wx.Point(0, 0), wx.Size((- 1), (- 1)), Weights)
sizer2.Add(self._weight, 1, (wx.EXPAND | wx.LEFT), 3)
panel.Bind(wx.EVT_CHOICE, self.update_object_parts, id=self._weight.GetId())
sizer.Add(sizer2, 0, wx.EXPAND)
panel.SetSizer(sizer)
self.set_tooltip()
def dispose(self):
disconnect(self._facename, wx.EVT_CHOICE)
disconnect(self._point_size, wx.EVT_CHOICE)
if self.factory.show_style:
disconnect(self._style, wx.EVT_CHOICE)
if self.factory.show_weight:
disconnect(self._weight, wx.EVT_CHOICE)
super().dispose()
def update_object_parts(self, event):
point_size = int(self._point_size.GetStringSelection())
facename = self._facename.GetStringSelection()
style = wx.FONTSTYLE_NORMAL
if self.factory.show_style:
style += self._style.GetCurrentSelection()
weight = wx.FONTWEIGHT_NORMAL
if self.factory.show_weight:
weight += self._weight.GetCurrentSelection()
font = wx.Font(point_size, wx.FONTFAMILY_DEFAULT, style, weight, faceName=facename)
self.value = self.factory.from_wx_font(font)
def update_editor(self):
font = self.factory.to_wx_font(self)
try:
self._facename.SetStringSelection(font.GetFaceName())
except:
self._facename.SetSelection(0)
try:
self._point_size.SetStringSelection(str(font.GetPointSize()))
except:
self._point_size.SetSelection(0)
def string_value(self, font):
return self.factory.str_font(font) |
class OptionPlotoptionsArearangeSonificationTracksMappingTremolo(Options):
def depth(self) -> 'OptionPlotoptionsArearangeSonificationTracksMappingTremoloDepth':
return self._config_sub_data('depth', OptionPlotoptionsArearangeSonificationTracksMappingTremoloDepth)
def speed(self) -> 'OptionPlotoptionsArearangeSonificationTracksMappingTremoloSpeed':
return self._config_sub_data('speed', OptionPlotoptionsArearangeSonificationTracksMappingTremoloSpeed) |
class ReplaygainLimiter(ElementBin):
index = 80
name = 'rglimiter'
def __init__(self):
ElementBin.__init__(self, name=self.name)
self.rglimit = Gst.ElementFactory.make('rglimiter', None)
self.elements[50] = self.rglimit
self.audioconvert = Gst.ElementFactory.make('audioconvert', None)
self.elements[60] = self.audioconvert
self.setup_elements()
event.add_ui_callback(self._on_option_set, 'replaygain_option_set')
self._on_option_set('replaygain_option_set', None, 'replaygain/clipping-protection')
def _on_option_set(self, name, object, data):
if (data == 'replaygain/clipping-protection'):
self.rglimit.set_property('enabled', settings.get_option('replaygain/clipping-protection', True)) |
def test_bytearray(use_builtin_types):
DataClass = (bytes if use_builtin_types else plistlib.Data)
pl = DataClass(b'<binary gunk\x00\x01\x02\x03>')
array = (bytearray(pl) if use_builtin_types else bytearray(pl.data))
data = plistlib.dumps(array)
pl2 = plistlib.loads(data, use_builtin_types=use_builtin_types)
assert isinstance(pl2, DataClass)
assert (pl2 == pl)
data2 = plistlib.dumps(pl2, use_builtin_types=use_builtin_types)
assert (data == data2) |
def get_contract_names(full_source: str) -> List:
comment_regex = '(?:\\s*\\/\\/[^\\n]*)|(?:\\/\\*[\\s\\S]*?\\*\\/)'
uncommented_source = re.sub(comment_regex, '', full_source)
contracts = re.findall('((?:abstract contract|contract|library|interface)\\s[^;{]*{[\\s\\S]*?})\\s*(?=(?:abstract contract|contract|library|interface|pragma|struct|enum)\\s|$)', uncommented_source)
contract_names = []
for source in contracts:
matches = re.findall('(abstract contract|contract|library|interface)\\s+(\\S*)\\s*(?:is\\s+([\\s\\S]*?)|)(?:{)', source)
if matches:
(type_, name, _) = matches[0]
contract_names.append((name, type_))
return contract_names |
_op([GapCursorA, PosIntA, BoolA])
def fission(proc, gap_cursor, n_lifts=1, unsafe_disable_checks=False):
if (gap_cursor.type() == ic.GapType.Before):
stmt = gap_cursor.anchor().prev()
else:
stmt = gap_cursor.anchor()
if ((not stmt) or (not stmt.next())):
raise ValueError('expected cursor to point to a gap between statements, not at an edge')
(ir, fwd) = scheduling.DoFissionAfterSimple(stmt._impl, n_lifts, unsafe_disable_checks)
return Procedure(ir, _provenance_eq_Procedure=proc, _forward=fwd) |
class TestExamples(unittest.TestCase):
def test_random(self):
print('\ntest random:')
with using(random_data):
results = call_bbopt(random_file, procs=1)
want = max(get_nums(results, numtype=int))
assert os.path.exists(random_data)
from bbopt.examples import random_example
random_example.bb.get_data(print_data=True)
assert (random_example.x == want)
assert (1 < random_example.x <= 10)
assert (random_example.bb.num_examples == NUM_TRIALS)
def test_skopt(self):
print('\ntest skopt:')
with using(skopt_data):
results = call_bbopt(skopt_file)
want = min(get_nums(results, numtype=float))
assert os.path.exists(skopt_data)
from bbopt.examples import skopt_example
assert_improving(skopt_example.bb.get_data(print_data=True), ave_func=middle_mean)
assert (skopt_example.y == want)
assert ((- 9) <= skopt_example.y < 21)
assert (skopt_example.bb.num_examples == NUM_TRIALS)
def test_conditional_skopt(self):
print('\ntest conditional_skopt:')
with using(conditional_skopt_data):
results = call_bbopt(conditional_skopt_file)
want = max(get_nums(results, numtype=int))
assert os.path.exists(conditional_skopt_data)
from bbopt.examples import conditional_skopt_example
assert_improving(conditional_skopt_example.bb.get_data(print_data=True))
assert (conditional_skopt_example.x == want)
assert (0 < conditional_skopt_example.x <= 20)
assert (conditional_skopt_example.bb.num_examples == NUM_TRIALS)
if (sys.version_info >= (3, 7)):
def test_bask(self):
print('\ntest bask:')
with using(bask_data):
results = call_bbopt(bask_file)
want = max(get_nums(results, numtype=float))
assert os.path.exists(bask_data)
from bbopt.examples import bask_example
assert_improving(bask_example.bb.get_data(print_data=True), ave_func=median)
assert (0 < want <= 20)
assert (0 < bask_example.x <= 20)
assert (bask_example.bb.num_examples == NUM_TRIALS)
if (sys.version_info >= (3,)):
def test_pysot(self):
print('\ntest pysot:')
with using(pysot_data):
results = call_bbopt(pysot_file, trials=2)
want = min(get_nums(results, numtype=float))
assert os.path.exists(pysot_data)
from bbopt.examples import pysot_example
assert_improving(pysot_example.bb.get_data(print_data=True))
assert (pysot_example.best_y == want)
assert ((- 9) <= pysot_example.best_y < 21)
assert (pysot_example.bb.num_examples == 20)
def test_hyperopt(self):
print('\ntest hyperopt:')
with using(hyperopt_data):
results = call_bbopt(hyperopt_file)
want = min(get_nums(results, numtype=float))
assert os.path.exists(hyperopt_data)
from bbopt.examples import hyperopt_example
assert_improving(hyperopt_example.bb.get_data(print_data=True), ave_func=None)
assert (hyperopt_example.y == want)
assert (hyperopt_example.bb.num_examples == NUM_TRIALS)
def test_conditional_hyperopt(self):
print('\ntest conditional_hyperopt:')
with using(conditional_hyperopt_data):
results = call_bbopt(conditional_hyperopt_file)
want = max(get_nums(results, numtype=int))
assert os.path.exists(conditional_hyperopt_data)
from bbopt.examples import conditional_hyperopt_example
assert_improving(conditional_hyperopt_example.bb.get_data(print_data=True))
assert (conditional_hyperopt_example.x == want)
assert (0 < conditional_hyperopt_example.x <= 20)
assert (conditional_hyperopt_example.bb.num_examples == NUM_TRIALS)
def test_numpy(self):
print('\ntest numpy:')
with using(numpy_data):
from bbopt.examples import numpy_example
assert (numpy_example.y == 0)
results = call_bbopt(numpy_file)
want = min(get_nums(results, numtype=float))
assert os.path.exists(numpy_data)
reload(numpy_example)
assert_improving(numpy_example.bb.get_data(print_data=True))
assert (numpy_example.y == want)
assert (numpy_example.bb.num_examples == NUM_TRIALS)
def test_meta(self):
print('\ntest meta:')
with using(meta_data):
results = call_bbopt(meta_file)
want = min(get_nums(results, numtype=float))
assert os.path.exists(meta_data)
from bbopt.examples import meta_example
assert_improving(meta_example.bb.get_data(print_data=True))
assert (meta_example.u == want)
assert (0 <= meta_example.u < 1)
assert (meta_example.bb.num_examples == NUM_TRIALS)
def test_any_fast(self):
print('\ntest any_fast:')
with using(any_fast_data):
results = call_bbopt(any_fast_file)
want = min(get_nums(results, numtype=float))
assert os.path.exists(any_fast_data)
from bbopt.examples import any_fast_example
assert_improving(any_fast_example.bb.get_data(print_data=True), ave_func=None)
assert (any_fast_example.u == want)
assert (any_fast_example.u < 1)
assert (any_fast_example.bb.num_examples == NUM_TRIALS)
def test_mixture(self):
print('\ntest mixture:')
with using(mixture_data):
from bbopt.examples import mixture_example
assert (mixture_example.loss == abs((sum([3, 4, 5, 6, 7]) - 10)))
results = call_bbopt(mixture_file)
want = min(get_nums(results, numtype=float))
assert os.path.exists(mixture_data)
reload(mixture_example)
assert_improving(mixture_example.bb.get_data(print_data=True))
assert (mixture_example.loss == want)
assert (0 <= mixture_example.loss < 85)
assert (len(set(map(_coconut_base_compose(_coconut.operator.itemgetter('memo'), (_coconut.operator.itemgetter('alg'), 0, False)), mixture_example.bb.get_data()['examples']))) > 1)
assert (mixture_example.bb.num_examples == NUM_TRIALS)
def test_json(self):
print('\ntest json:')
with using(json_data):
from bbopt.examples import json_example
assert (round(json_example.y, 5) == 6)
results = call_bbopt(json_file)
want = min(get_nums(results, numtype=float))
assert os.path.exists(json_data)
reload(json_example)
assert_improving(json_example.bb.get_data(print_data=True))
assert (json_example.y == want)
assert (json_example.bb.num_examples == NUM_TRIALS) |
class TestActionFileAllocation(CuratorTestCase):
def test_include(self):
alloc = 'include'
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.allocation_test.format(KEY, VALUE, alloc, False))
(idx1, idx2) = ('my_index', 'not_my_index')
self.create_index(idx1)
self.create_index(idx2)
self.invoke_runner()
assert (VALUE == self.client.indices.get_settings(index=idx1)[idx1]['settings']['index']['routing']['allocation'][alloc][KEY])
assert (TIEREDROUTING == self.client.indices.get_settings(index=idx2)[idx2]['settings']['index']['routing'])
def test_require(self):
alloc = 'require'
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.allocation_test.format(KEY, VALUE, alloc, False))
(idx1, idx2) = ('my_index', 'not_my_index')
self.create_index(idx1)
self.create_index(idx2)
self.invoke_runner()
assert (VALUE == self.client.indices.get_settings(index=idx1)[idx1]['settings']['index']['routing']['allocation'][alloc][KEY])
assert (TIEREDROUTING == self.client.indices.get_settings(index=idx2)[idx2]['settings']['index']['routing'])
def test_exclude(self):
alloc = 'exclude'
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.allocation_test.format(KEY, VALUE, alloc, False))
(idx1, idx2) = ('my_index', 'not_my_index')
self.create_index(idx1)
self.create_index(idx2)
self.invoke_runner()
assert (VALUE == self.client.indices.get_settings(index=idx1)[idx1]['settings']['index']['routing']['allocation'][alloc][KEY])
assert (TIEREDROUTING == self.client.indices.get_settings(index=idx2)[idx2]['settings']['index']['routing'])
def test_remove_exclude_with_none_value(self):
empty = ''
alloc = 'exclude'
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.allocation_test.format(KEY, empty, alloc, False))
(idx1, idx2) = ('my_index', 'not_my_index')
self.create_index(idx1)
self.create_index(idx2)
self.client.indices.put_settings(index=idx1, settings={f'index.routing.allocation.{alloc}.{KEY}': 'bar'})
assert ('bar' == self.client.indices.get_settings(index=idx1)[idx1]['settings']['index']['routing']['allocation'][alloc][KEY])
self.invoke_runner()
assert (TIEREDROUTING == self.client.indices.get_settings(index=idx1)[idx1]['settings']['index']['routing'])
assert (TIEREDROUTING == self.client.indices.get_settings(index=idx2)[idx2]['settings']['index']['routing'])
def test_invalid_allocation_type(self):
alloc = 'invalid'
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.allocation_test.format(KEY, VALUE, alloc, False))
(idx1, idx2) = ('my_index', 'not_my_index')
self.create_index(idx1)
self.create_index(idx2)
self.invoke_runner()
assert (1 == self.result.exit_code)
def test_extra_option(self):
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.bad_option_proto_test.format('allocation'))
(idx1, idx2) = ('my_index', 'not_my_index')
self.create_index(idx1)
self.create_index(idx2)
self.invoke_runner()
assert (1 == self.result.exit_code)
def test_skip_closed(self):
alloc = 'include'
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.allocation_test.format(KEY, VALUE, alloc, False))
(idx1, idx2) = ('my_index', 'not_my_index')
self.create_index(idx1)
self.client.indices.close(index=idx1)
self.create_index(idx2)
self.invoke_runner()
assert (TIEREDROUTING == self.client.indices.get_settings(index=idx1)[idx1]['settings']['index']['routing'])
assert (TIEREDROUTING == self.client.indices.get_settings(index=idx2)[idx2]['settings']['index']['routing'])
def test_wait_for_completion(self):
alloc = 'require'
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.allocation_test.format(KEY, VALUE, alloc, True))
(idx1, idx2) = ('my_index', 'not_my_index')
self.create_index(idx1)
self.create_index(idx2)
self.invoke_runner()
assert (VALUE == self.client.indices.get_settings(index=idx1)[idx1]['settings']['index']['routing']['allocation'][alloc][KEY])
assert (TIEREDROUTING == self.client.indices.get_settings(index=idx2)[idx2]['settings']['index']['routing']) |
def set_verified(verifying_client: VerifyingClient) -> VerificationsModel:
with session() as s:
expires = (now() + VERIFICATION_DURATION)
vid = verifying_client.verification_id
ip4 = verifying_client.ip4
model = VerificationsModel.from_id_ip4_expires(vid, ip4, expires)
orm_model = model.to_orm_model()
s.add(orm_model)
s.flush()
m = VerificationsModel.from_orm_model(orm_model)
s.commit()
return m |
def _get_filetype_parser(file_path, parser_type):
filetype_handlers = {'json': {'string': _parse_json_string, 'file': _parse_json_file}, 'yaml': {'string': _parse_yaml, 'file': _parse_yaml}}
file_ext = file_path.split('.')[(- 1)]
if (file_ext not in filetype_handlers):
raise util_errors.InvalidFileExtensionError('Unsupported file type: {}'.format(file_ext))
if (parser_type not in filetype_handlers[file_ext]):
raise util_errors.InvalidParserTypeError('Unsupported parser type: {}'.format(parser_type))
return filetype_handlers[file_ext][parser_type] |
class TestActionFileSnapshot(CuratorTestCase):
def test_snapshot(self):
self.create_indices(5)
self.create_repository()
snap_name = 'snapshot1'
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.snapshot_test.format(self.args['repository'], snap_name, 1, 30))
self.invoke_runner()
snapshot = get_snapshot(self.client, self.args['repository'], '*')
assert (1 == len(snapshot['snapshots']))
assert (snap_name == snapshot['snapshots'][0]['snapshot'])
def test_snapshot_datemath(self):
self.create_indices(5)
self.create_repository()
snap_name = '<snapshot-{now-1d/d}>'
snap_name_parsed = f"snapshot-{(datetime.utcnow() - timedelta(days=1)).strftime('%Y.%m.%d')}"
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.snapshot_test.format(self.args['repository'], snap_name, 1, 30))
self.invoke_runner()
snapshot = get_snapshot(self.client, self.args['repository'], '*')
assert (1 == len(snapshot['snapshots']))
assert (snap_name_parsed == snapshot['snapshots'][0]['snapshot'])
def test_snapshot_ignore_empty_list(self):
self.create_indices(5)
self.create_repository()
snap_name = 'snapshot1'
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.test_682.format(self.args['repository'], snap_name, True, 1, 30))
self.invoke_runner()
snapshot = get_snapshot(self.client, self.args['repository'], '*')
assert (0 == len(snapshot['snapshots']))
assert (0 == len(get_indices(self.client)))
def test_snapshot_do_not_ignore_empty_list(self):
self.create_indices(5)
self.create_repository()
snap_name = 'snapshot1'
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.test_682.format(self.args['repository'], snap_name, False, 1, 30))
self.invoke_runner()
snapshot = get_snapshot(self.client, self.args['repository'], '*')
assert (0 == len(snapshot['snapshots']))
assert (5 == len(get_indices(self.client)))
def test_no_repository(self):
self.create_indices(5)
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.snapshot_test.format(' ', 'snap_name', 1, 30))
self.invoke_runner()
assert (1 == self.result.exit_code)
def test_extra_option(self):
self.create_indices(5)
self.write_config(self.args['configfile'], testvars.client_config.format(HOST))
self.write_config(self.args['actionfile'], testvars.bad_option_proto_test.format('snapshot'))
self.invoke_runner()
assert (1 == self.result.exit_code) |
class PlaylistExportDialog(FileOperationDialog):
__gsignals__ = {'message': (GObject.SignalFlags.RUN_LAST, GObject.TYPE_BOOLEAN, (Gtk.MessageType, GObject.TYPE_STRING), GObject.signal_accumulator_true_handled)}
def __init__(self, playlist, parent=None):
FileOperationDialog.__init__(self, title=_('Export Current Playlist'), parent=parent, action=Gtk.FileChooserAction.SAVE, buttons=(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_SAVE, Gtk.ResponseType.OK))
self.set_current_folder_uri((settings.get_option('gui/playlist_export_dir') or GLib.filename_to_uri(xdg.homedir, None)))
self.set_local_only(False)
self.relative_checkbox = Gtk.CheckButton(_('Use relative paths to tracks'))
self.relative_checkbox.set_active(True)
self.extras_box.pack_start(self.relative_checkbox, False, False, 3)
self.relative_checkbox.show()
self.playlist = playlist
extensions = {}
for provider in providers.get('playlist-format-converter'):
extensions[provider.name] = provider.title
self.add_extensions(extensions)
self.set_current_name(('%s.m3u' % playlist.name))
self.connect('response', self.on_response)
def run(self):
response = FileOperationDialog.run(self)
self.emit('response', response)
def do_message(self, message_type, message):
if (message_type == Gtk.MessageType.INFO):
info(self.get_transient_for(), markup=message)
elif (message_type == Gtk.MessageType.ERROR):
error(self.get_transient_for(), markup=message)
def on_response(self, dialog, response):
self.hide()
if (response == Gtk.ResponseType.OK):
gfile = self.get_file()
settings.set_option('gui/playlist_export_dir', gfile.get_parent().get_uri())
path = gfile.get_uri()
if (not is_valid_playlist(path)):
path = ('%s.m3u' % path)
options = PlaylistExportOptions(relative=self.relative_checkbox.get_active())
try:
export_playlist(self.playlist, path, options)
except InvalidPlaylistTypeError as e:
self.emit('message', Gtk.MessageType.ERROR, str(e))
else:
self.emit('message', Gtk.MessageType.INFO, (_('Playlist saved as <b>%s</b>.') % path)) |
def commands():
import os
unsetenv('PYTHONHOME')
env.CGRU_LOCATION.set('/opt/cgru-{}.{}.{}'.format(env.REZ_CGRU_MAJOR_VERSION, env.REZ_CGRU_MINOR_VERSION, env.REZ_CGRU_PATCH_VERSION))
env.PATH.prepend('${CGRU_LOCATION}/bin')
env.PATH.prepend('${CGRU_LOCATION}/software_setup/bin')
env.CGRU_PYTHON = '${CGRU_LOCATION}/lib/python'
env.PYTHONPATH.prepend('${CGRU_PYTHON}')
env.CGRU_VERSION = '{}.{}.{}'.format(env.REZ_CGRU_MAJOR_VERSION, env.REZ_CGRU_MINOR_VERSION, env.REZ_CGRU_PATCH_VERSION)
env.AF_ROOT = '${CGRU_LOCATION}/afanasy'
env.PATH.prepend('${AF_ROOT}/bin')
env.AF_PYTHON = '${AF_ROOT}/python'
env.PYTHONPATH.prepend(env.AF_PYTHON)
if ('CGRU_PYTHONEXE' not in env.keys()):
env.CGRU_PYTHONEXE = 'python'
python_dir = '${CGRU_LOCATION}/python'
if os.path.isdir(os.path.expandvars(python_dir)):
print(f'Using CGRU Python: {python_dir}')
env.PYTHONHOME = python_dir
env.PATH.prepend(f'{python_dir}/bin')
env.CGRU_PYTHONDIR = f'{python_dir}'
pythonexe = f'{python_dir}/bin/python3'
if os.path.isfile(pythonexe):
env.CGRU_PYTHONEXE = f'{pythonexe}/bin/python3'
else:
env.CGRU_PYTHONEXE = 'python3'
sip = '$CGRU_LOCATION/utilities/python/sip'
if os.path.isdir(os.path.expandvars(sip)):
env.PYTHONPATH.prepend(f'{sip}')
pyqt = '$CGRU_LOCATION/utilities/python/pyqt'
if os.path.isdir(os.path.expandvars(pyqt)):
env.PYTHONPATH.prepend(f'{pyqt}')
if ('maya' in this.root):
env.MAYA_CGRU_LOCATION = '$CGRU_LOCATION/plugins/maya'
env.PYTHONPATH.prepend('${MAYA_CGRU_LOCATION}')
env.MAYA_LOCATION = '/usr/autodesk/maya{}'.format(env.REZ_MAYA_MAJOR_VERSION)
env.MAYA_VERSION = env.REZ_MAYA_MAJOR_VERSION
env.MAYA_EXEC = '${MAYA_LOCATION}/bin/maya${MAYA_VERSION}'
env.MAYA_CGRU_MENUS_NAME = 'CGRU'
env.TMPDIR = '/tmp'
env.XBMLANGPATH.prepend('${MAYA_CGRU_LOCATION}/icons/%B')
env.MAYA_SCRIPT_PATH.prepend('${MAYA_CGRU_LOCATION}/mel/AETemplates')
env.MAYA_PLUG_IN_PATH.prepend('${MAYA_CGRU_LOCATION}/mll/${MAYA_VERSION}')
env.MAYA_SCRIPT_PATH.prepend('${MAYA_CGRU_LOCATION}/afanasy')
env.APP_DIR = '${MAYA_LOCATION}'
env.APP_EXE = '${MAYA_EXEC}'
if ('houdini' in this.root):
env.HOUDINI_LOCATION = '/opt/hfs{}.{}.{}'.format(env.REZ_HOUDINI_MAJOR_VERSION, env.REZ_HOUDINI_MINOR_VERSION, env.REZ_HOUDINI_PATCH_VERSION)
env.APP_DIR = env.HOUDINI_LOCATION
env.HOUDINI_CGRU_PATH = '$CGRU_LOCATION/plugins/houdini'
env.PYTHONPATH.prepend('$HOUDINI_CGRU_PATH')
env.HOUDINI_CGRU_OTLSCAN_PATH.append('$HIH/otls')
env.HOUDINI_CGRU_OTLSCAN_PATH.append('$HOUDINI_CGRU_PATH')
env.HOUDINI_CGRU_OTLSCAN_PATH.append('$HH/otls')
if ('HOUDINI_OTLSCAN_PATH' not in env.keys()):
env.HOUDINI_OTLSCAN_PATH = '$HOUDINI_CGRU_OTLSCAN_PATH'
else:
env.HOUDINI_OTLSCAN_PATH.prepend('${HOUDINI_CGRU_OTLSCAN_PATH}')
env.APP_EXE = 'houdini'
if ('blender' in this.root):
env.BLENDER_LOCATION = '/opt/blender-{}.{}.{}'.format(env.REZ_BLENDER_MAJOR_VERSION, env.REZ_BLENDER_MINOR_VERSION, env.REZ_BLENDER_PATCH_VERSION)
env.BLENDER_CGRU_PATH = '${CGRU_LOCATION}/plugins/blender'
env.BLENDER_USER_SCRIPTS.prepend('${BLENDER_CGRU_PATH}')
env.APP_DIR = '${BLENDER_LOCATION}'
env.APP_EXE = '${BLENDER_LOCATION}/blender'
if ('fusion' in this.root):
env.FUSION_LOCATION = '/opt/BlackmagicDesign/Fusion{}/'.format(env.REZ_FUSION_MAJOR_VERSION)
env.FUSION_EXEC = 'Fusion'
env.FUSION_RENDERNODE_LOCATION = '/opt/BlackmagicDesign/FusionRenderNode{}/'.format(env.REZ_FUSION_MAJOR_VERSION)
env.FUSION_RENDERNODE_EXEC = 'FusionRenderNode'
env.FUSION_CGRU_PATH = '${CGRU_LOCATION}/plugins/fusion'
env.APP_DIR = '${FUSION_LOCATION}'
env.APP_EXE = 'Fusion'
env.RENDER_DIR = '${FUSION_LOCATION}'
env.RENDER_EXE = 'FusionRenderNode' |
class TestSuperFencesCustomDefault(util.MdCase):
extension = ['pymdownx.superfences']
extension_configs = {'pymdownx.superfences': {'custom_fences': [{'name': '*', 'class': '', 'format': default_format}, {'name': 'math', 'class': 'arithmatex', 'format': arithmatex.arithmatex_fenced_format(mode='generic')}]}}
def test_default_override(self):
self.check_markdown('\n ```math\n E(\\mathbf{v}, \\mathbf{h}) = -\\sum_{i,j}w_{ij}v_i h_j - \\sum_i b_i v_i - \\sum_j c_j h_j\n ```\n\n ```python\n test\n ```\n ', '\n <div class="arithmatex">\\[\n E(\\mathbf{v}, \\mathbf{h}) = -\\sum_{i,j}w_{ij}v_i h_j - \\sum_i b_i v_i - \\sum_j c_j h_j\n \\]</div>\n <p><custom lang="python" class_name="class-">test</custom></p>\n ', True) |
class KLDivergence():
def __init__(self, epsilon: float=1e-10):
self.epsilon = epsilon
def __call__(self, y_pred: torch.Tensor, y_true: torch.Tensor) -> torch.Tensor:
y_pred = convert_to_tensor(y_pred)
y_true = convert_to_tensor(y_true)
check_same_shape(y_pred, y_true)
y_pred = torch.clamp(y_pred, self.epsilon, 1)
y_true = torch.clamp(y_true, self.epsilon, 1)
kld = torch.sum((y_true * torch.log((y_true / y_pred))), axis=(- 1))
return torch.mean(kld) |
class OptionPlotoptionsPolygonSonificationContexttracksMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_records_stats():
env = build_dummy_maze_env()
env = LogStatsWrapper.wrap(env)
env.reset()
for i in range(5):
env.step(env.action_space.sample())
env.write_epoch_stats()
assert (env.get_stats_value(RewardEvents.reward_original, LogStatsLevel.EPOCH, name='total_step_count') == 5)
assert (env.get_stats_value(BaseEnvEvents.reward, LogStatsLevel.EPOCH, name='total_step_count') == 5) |
def edit_catfruit(save_stats: dict[(str, Any)]) -> dict[(str, Any)]:
max_cf = 128
if (save_stats['game_version']['Value'] >= 110400):
max_cf = None
catfruit = item.IntItemGroup.from_lists(names=get_fruit_names(helper.check_data_is_jp(save_stats)), values=save_stats['cat_fruit'], maxes=max_cf, group_name='Catfruit')
catfruit.edit()
save_stats['cat_fruit'] = catfruit.get_values()
return save_stats |
class InstagramShoppingMerchantReviewMessage(AbstractObject):
def __init__(self, api=None):
super(InstagramShoppingMerchantReviewMessage, self).__init__()
self._isInstagramShoppingMerchantReviewMessage = True
self._api = api
class Field(AbstractObject.Field):
help_url = 'help_url'
message = 'message'
_field_types = {'help_url': 'string', 'message': 'string'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
class TestDevice(object):
def mouse(self):
settings = mouse_settings.FakeMouseSettings(4152, 47789, prime_wireless_wired.profile)
return mouse.Mouse(usbhid.FakeDevice(), prime_wireless_wired.profile, settings)
.parametrize('value,expected_hid_report', [(100, b'\x02\x00-\x01\x00\x00'), (200, b'\x02\x00-\x01\x00\x01'), (300, b'\x02\x00-\x01\x00\x02'), (18000, b'\x02\x00-\x01\x00\xd6'), ('200,400', b'\x02\x00-\x02\x00\x01\x03'), ('200,400,800,1600', b'\x02\x00-\x04\x00\x01\x03\x08\x11')])
def test_set_sensitivity(self, mouse, value, expected_hid_report):
mouse.set_sensitivity(value)
mouse._hid_device.bytes.seek(0)
hid_report = mouse._hid_device.bytes.read()
assert (hid_report == expected_hid_report)
.parametrize('value,expected_hid_report', [(125, b'\x02\x00+\x03'), (250, b'\x02\x00+\x02'), (500, b'\x02\x00+\x01'), (1000, b'\x02\x00+\x00')])
def test_set_polling_rate(self, mouse, value, expected_hid_report):
mouse.set_polling_rate(value)
mouse._hid_device.bytes.seek(0)
hid_report = mouse._hid_device.bytes.read()
assert (hid_report == expected_hid_report)
.parametrize('value,expected_hid_report', [('#ABCDEF', b'\x02\x00!\x01\x00\xab\xcd\xef'), ('red', b'\x02\x00!\x01\x00\xff\x00\x00')])
def test_set_color(self, mouse, value, expected_hid_report):
mouse.set_color(value)
mouse._hid_device.bytes.seek(0)
hid_report = mouse._hid_device.bytes.read()
assert (hid_report == expected_hid_report)
.parametrize('value,expected_hid_report', [('default', b'\x02\x00*\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x03\x00\x00\x00\x00\x04\x00\x00\x00\x00\x05\x00\x00\x00\x000\x00\x00\x00\x001\x00\x00\x00\x002\x00\x00\x00\x00'), ('buttons(button2=button6)', b'\x02\x00*\x01\x00\x00\x00\x00\x06\x00\x00\x00\x00\x03\x00\x00\x00\x00\x04\x00\x00\x00\x00\x05\x00\x00\x00\x000\x00\x00\x00\x001\x00\x00\x00\x002\x00\x00\x00\x00'), ({'buttons': {'button2': 'button6'}}, b'\x02\x00*\x01\x00\x00\x00\x00\x06\x00\x00\x00\x00\x03\x00\x00\x00\x00\x04\x00\x00\x00\x00\x05\x00\x00\x00\x000\x00\x00\x00\x001\x00\x00\x00\x002\x00\x00\x00\x00'), ('buttons(ScrollUp=ScrollDown; ScrollDown=ScrollUp)', b'\x02\x00*\x01\x00\x00\x00\x00\x02\x00\x00\x00\x00\x03\x00\x00\x00\x00\x04\x00\x00\x00\x00\x05\x00\x00\x00\x000\x00\x00\x00\x002\x00\x00\x00\x001\x00\x00\x00\x00')])
def test_set_buttons_mapping(self, mouse, value, expected_hid_report):
mouse.set_buttons_mapping(value)
mouse._hid_device.bytes.seek(0)
hid_report = mouse._hid_device.bytes.read()
assert (hid_report == expected_hid_report)
.parametrize('value,expected_hid_report', [(0, b'\x02\x00)\x00\x00\x00'), (1, b'\x02\x00)`\xea\x00'), (5, b'\x02\x00)\xe0\x93\x04'), (20, b'\x02\x00)\x80O\x12')])
def test_set_sleep_timer(self, mouse, value, expected_hid_report):
mouse.set_sleep_timer(value)
mouse._hid_device.bytes.seek(0)
hid_report = mouse._hid_device.bytes.read()
assert (hid_report == expected_hid_report)
.parametrize('value,expected_hid_report', [(0, b'\x02\x00#\x0f\x01\x00\x00\x00\x00\x00'), (30, b'\x02\x00#\x0f\x01\x00\x000u\x00'), (60, b'\x02\x00#\x0f\x01\x00\x00`\xea\x00'), (300, b'\x02\x00#\x0f\x01\x00\x00\xe0\x93\x04'), (1200, b'\x02\x00#\x0f\x01\x00\x00\x80O\x12')])
def test_set_dim_timer(self, mouse, value, expected_hid_report):
mouse.set_dim_timer(value)
mouse._hid_device.bytes.seek(0)
hid_report = mouse._hid_device.bytes.read()
assert (hid_report == expected_hid_report)
def test_battery_level(self, mouse):
battery_info = mouse.battery
mouse._hid_device.bytes.seek(0)
hid_report = mouse._hid_device.bytes.read()
assert (hid_report == b'\x02\x00\x92')
assert ('is_charging' in battery_info)
assert ('level' in battery_info)
.parametrize('value,expected_hid_report', [('off', b"\x02\x00'\x00"), ('rainbow', b"\x02\x00'\x01")])
def test_set_default_lighting(self, mouse, value, expected_hid_report):
mouse.set_default_lighting(value)
mouse._hid_device.bytes.seek(0)
hid_report = mouse._hid_device.bytes.read()
assert (hid_report == expected_hid_report)
def test_save(self, mouse):
mouse.save()
mouse._hid_device.bytes.seek(0)
hid_report = mouse._hid_device.bytes.read()
assert (hid_report == b'\x02\x00\x11\x00') |
class OptionSeriesTimelineSonificationTracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
def extract96RoryWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Jikuu Mahou', 'Jikuu Mahou de Isekai to Chikyuu wo Ittarikitari', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesBellcurveDragdropGuideboxDefault(Options):
def className(self):
return self._config_get('highcharts-drag-box-default')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('rgba(0, 0, 0, 0.1)')
def color(self, text: str):
self._config(text, js_type=False)
def cursor(self):
return self._config_get('move')
def cursor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#888')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(900)
def zIndex(self, num: float):
self._config(num, js_type=False) |
def test_clean_up_ast():
asgraph = AbstractSyntaxInterface()
code_node_1 = asgraph._add_code_node([Assignment(var('u'), const(9))])
code_node_2 = asgraph._add_code_node([Break()])
code_node_3 = asgraph._add_code_node([Assignment(var('v'), const(9))])
root_seq_node = asgraph.factory.create_seq_node()
asgraph._add_node(root_seq_node)
seq_node_1 = asgraph.factory.create_seq_node()
asgraph._add_node(seq_node_1)
seq_node_2 = asgraph.factory.create_seq_node()
asgraph._add_node(seq_node_2)
condition_node = asgraph._add_condition_node_with(LogicCondition.initialize_symbol('a', asgraph.factory.logic_context), false_branch=seq_node_2)
asgraph._add_edges_from(((root_seq_node, seq_node_1), (root_seq_node, condition_node), (seq_node_1, code_node_1), (seq_node_1, code_node_2), (seq_node_2, code_node_3)))
asgraph._code_node_reachability_graph.add_reachability_from(((code_node_1, code_node_3), (code_node_1, code_node_2), (code_node_2, code_node_3)))
root_seq_node.sort_children()
asgraph.clean_up()
assert ((len(asgraph) == 6) and (root_seq_node.children == (code_node_1, code_node_2, condition_node)) and (condition_node.false_branch is None) and (condition_node.true_branch_child == code_node_3) and (condition_node.condition == (~ LogicCondition.initialize_symbol('a', asgraph.factory.logic_context)))) |
class BeamSyncer(Service):
def __init__(self, chain: AsyncChainAPI, db: AtomicDatabaseAPI, chain_db: BaseAsyncChainDB, peer_pool: ETHPeerPool, event_bus: EndpointAPI, metrics_registry: MetricsRegistry, checkpoint: Checkpoint=None, force_beam_block_number: BlockNumber=None, enable_backfill: bool=True, enable_state_backfill: bool=True) -> None:
self.logger = get_logger('trinity.sync.beam.chain.BeamSyncer')
self.metrics_registry = metrics_registry
self._body_for_header_exists = body_for_header_exists(chain_db, chain)
if (checkpoint is None):
self._launch_strategy: SyncLaunchStrategyAPI = FromGenesisLaunchStrategy(chain_db)
else:
self._launch_strategy = FromCheckpointLaunchStrategy(chain_db, chain, checkpoint, peer_pool)
self._header_syncer = ETHHeaderChainSyncer(chain, chain_db, peer_pool, self._launch_strategy)
self._header_persister = HeaderOnlyPersist(self._header_syncer, chain_db, force_beam_block_number, self._launch_strategy)
self._backfiller = BeamStateBackfill(db, peer_pool)
if enable_state_backfill:
self._queen_queue: QueenTrackerAPI = self._backfiller
else:
self._queen_queue = QueeningQueue(peer_pool)
self._state_downloader = BeamDownloader(db, peer_pool, self._queen_queue, event_bus)
self._data_hunter = MissingDataEventHandler(self._state_downloader, event_bus, self.metrics_registry)
self._block_importer = BeamBlockImporter(chain, db, self._state_downloader, self._backfiller, event_bus, self.metrics_registry)
self._launchpoint_header_syncer = HeaderLaunchpointSyncer(self._header_syncer)
self._body_syncer = RegularChainBodySyncer(chain, chain_db, peer_pool, self._launchpoint_header_syncer, self._block_importer)
self._manual_header_syncer = ManualHeaderSyncer()
self._fast_syncer = RigorousFastChainBodySyncer(chain, chain_db, peer_pool, self._manual_header_syncer)
self._header_backfill = SequentialHeaderChainGapSyncer(chain, chain_db, peer_pool)
self._block_backfill = BodyChainGapSyncer(chain, chain_db, peer_pool)
self._chain = chain
self._enable_backfill = enable_backfill
self._enable_state_backfill = enable_state_backfill
async def run(self) -> None:
try:
(await self._launch_strategy.fulfill_prerequisites())
except asyncio.TimeoutError as exc:
self.logger.exception(f'Timed out while trying to fulfill prerequisites of sync launch strategy: {exc} from {self._launch_strategy}')
self.manager.cancel()
return
self.manager.run_daemon_child_service(self._block_importer)
self.manager.run_daemon_child_service(self._header_syncer)
self.manager.run_daemon_child_service(self._body_syncer)
self.manager.run_daemon_child_service(self._data_hunter)
async with background_asyncio_service(self._header_persister) as manager:
(await manager.wait_finished())
final_headers = self._header_persister.get_final_headers()
(await self._download_blocks(final_headers[0]))
self._data_hunter.minimum_beam_block_number = min((header.block_number for header in final_headers))
self._launchpoint_header_syncer.set_launchpoint_headers(final_headers)
if self._enable_backfill:
self.manager.run_child_service(self._header_backfill)
self.manager.run_daemon_child_service(self._block_backfill)
self.manager.run_daemon_task(self._monitor_historical_backfill)
self.manager.run_child_service(self._queen_queue)
self.manager.run_daemon_child_service(self._state_downloader)
(await self.manager.wait_finished())
def get_block_count_lag(self) -> int:
return self._body_syncer.get_block_count_lag()
async def _download_blocks(self, before_header: BlockHeaderAPI) -> None:
parents_needed = FULL_BLOCKS_NEEDED_TO_START_BEAM
self.logger.info('Downloading %d block bodies for uncle validation, before %s', parents_needed, before_header)
parent_headers = tuple(reversed([header async for header in self._get_ancestors(parents_needed, header=before_header)]))
if (len(parent_headers) < parents_needed):
self.logger.info('Collecting %d blocks to genesis for uncle validation', len(parent_headers))
sync_from_tip = (await self._chain.coro_get_canonical_block_header_by_number(BlockNumber(0)))
uncle_conflict_headers = parent_headers
else:
sync_from_tip = parent_headers[0]
uncle_conflict_headers = parent_headers[1:]
if (await self._all_verification_bodies_present(uncle_conflict_headers)):
self.logger.debug('All needed block bodies are already available')
else:
self._manual_header_syncer.emit(uncle_conflict_headers)
self._fast_syncer.set_starting_tip(sync_from_tip)
self.logger.info('Getting recent block data for uncle validation')
async with background_asyncio_service(self._fast_syncer) as manager:
(await manager.wait_finished())
self.logger.info('Have all data needed for Beam validation, continuing...')
async def _get_ancestors(self, limit: int, header: BlockHeaderAPI) -> AsyncIterator[BlockHeaderAPI]:
headers_returned = 0
while ((header.parent_hash != GENESIS_PARENT_HASH) and (headers_returned < limit)):
parent = (await self._chain.coro_get_block_header_by_hash(header.parent_hash))
(yield parent)
headers_returned += 1
header = parent
async def _all_verification_bodies_present(self, headers_with_potential_conflicts: Iterable[BlockHeaderAPI]) -> bool:
for header in headers_with_potential_conflicts:
if (not (await self._body_for_header_exists(header))):
return False
return True
async def _monitor_historical_backfill(self) -> None:
while self.manager.is_running:
(await asyncio.sleep(PREDICTED_BLOCK_TIME))
if self._block_backfill.get_manager().is_cancelled:
return
else:
lag = self.get_block_count_lag()
if ((lag >= PAUSE_BACKFILL_AT_LAG) and (not self._block_backfill.is_paused)):
self.logger.debug('Pausing historical header/block sync because we lag %s blocks', lag)
self._block_backfill.pause()
self._header_backfill.pause()
elif ((lag <= RESUME_BACKFILL_AT_LAG) and self._block_backfill.is_paused):
self.logger.debug('Resuming historical header/block sync because we lag %s blocks', lag)
self._block_backfill.resume()
self._header_backfill.resume() |
class Dnf(Backend):
def updates(self):
if HAS_DNF_BINDINGS:
try:
with dnf.Base() as base:
base.read_all_repos()
base.fill_sack()
upgrades = base.sack.query().upgrades().run()
notif_body = ''.join([('%s: %s-%s\n' % (pkg.name, pkg.version, pkg.release)) for pkg in upgrades])
return (len(upgrades), notif_body)
except Exception as exc:
self.logger.error('DNF update check failed', exc_info=True)
return ('?', exc.__str__())
else:
return ('?', 'Failed to import DNF Python bindings') |
class Keybindings(Options):
def addRow(self, keys):
self.component.extendModule('keybindings', 'actions', 'addRow', 'function(event){event.preventDefault(); this.table.addRow()}')
self._attrs['addRow'] = keys
return self
def deleteSelectedRows(self, keys):
self.component.extendModule('keybindings', 'actions', 'addRow', 'function(){var rows = this.table.getSelectedRows(); rows.forEach(function(row){row.delete()})}')
self._attrs['deleteSelectedRows'] = keys
return self
def bespoke(self, keys, func_name, js_funcs: types.JS_FUNCS_TYPES, profile: types.PROFILE_TYPE=None, prevent_default: bool=True):
if (not isinstance(js_funcs, list)):
js_funcs = [js_funcs]
if prevent_default:
self.component.extendModule('keybindings', 'actions', func_name, ('function(event){event.preventDefault(); %s}' % JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)))
else:
self.component.extendModule('keybindings', 'actions', func_name, ('function(){%s}' % JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)))
self._attrs[func_name] = keys
return self |
class StreamStream(_MultiCallable, grpc.StreamStreamMultiCallable):
def __call__(self, request_iterator, timeout=None, metadata=None, *args, **kwargs):
with _disable_close_old_connections():
context = FakeContext()
context._invocation_metadata.extend((metadata or []))
return self._handler.stream_stream(request_iterator, context) |
class TestDownload(unittest.TestCase):
_filename = (('facebook_' + uuid.uuid4().hex) + '.html')
def run(self, result=None):
with patch('iopath.common.event_logger.EventLogger.log_event'):
super(TestDownload, self).run(result)
def test_download(self) -> None:
download(' '.', filename=self._filename, progress=False)
self.assertTrue(os.path.isfile(self._filename))
def tearDown(self) -> None:
if os.path.isfile(self._filename):
os.unlink(self._filename) |
class FaucetUntaggedHairpinTest(FaucetUntaggedTest):
NETNS = True
CONFIG = '\n interfaces:\n %(port_1)d:\n hairpin: True\n native_vlan: 100\n %(port_2)d:\n native_vlan: 100\n %(port_3)d:\n native_vlan: 100\n %(port_4)d:\n native_vlan: 100\n'
def test_untagged(self):
(first_host, second_host) = self.hosts_name_ordered()[:2]
macvlan1_intf = 'macvlan1'
macvlan1_ipv4 = '10.0.0.100'
macvlan2_intf = 'macvlan2'
macvlan2_ipv4 = '10.0.0.101'
self.add_macvlan(first_host, macvlan1_intf, ipa=macvlan1_ipv4, mode='vepa')
self.add_macvlan(first_host, macvlan2_intf, mode='vepa')
macvlan2_mac = self.get_host_intf_mac(first_host, macvlan2_intf)
netns = self.hostns(first_host)
setup_cmds = []
setup_cmds.extend([('link set %s netns %s' % (macvlan2_intf, netns))])
for exec_cmd in (('ip address add %s/24 brd + dev %s' % (macvlan2_ipv4, macvlan2_intf)), ('ip link set %s up' % macvlan2_intf)):
setup_cmds.append(('netns exec %s %s' % (netns, exec_cmd)))
first_host.run_ip_batch(setup_cmds)
self.one_ipv4_ping(first_host, macvlan2_ipv4, intf=macvlan1_intf)
self.one_ipv4_ping(first_host, second_host.IP())
self.wait_nonzero_packet_count_flow({'in_port': self.port_map['port_1'], 'dl_dst': 'ff:ff:ff:ff:ff:ff'}, table_id=self._FLOOD_TABLE, actions=['OUTPUT:IN_PORT'])
self.wait_nonzero_packet_count_flow({'in_port': self.port_map['port_1'], 'dl_dst': macvlan2_mac}, table_id=self._ETH_DST_HAIRPIN_TABLE, actions=['OUTPUT:IN_PORT']) |
def extractAsterlislockedseriesWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_deterministic_hash(tmp_path):
workflows_dir = (tmp_path / 'workflows')
workflows_dir.mkdir()
open((workflows_dir / '__init__.py'), 'a').close()
workflow_file = (workflows_dir / 'hello_world.py')
workflow_file.write_text(MAIN_WORKFLOW)
imperative_workflow_file = (workflows_dir / 'imperative_wf.py')
imperative_workflow_file.write_text(IMPERATIVE_WORKFLOW)
t1_dir = (tmp_path / 'wf1')
t1_dir.mkdir()
open((t1_dir / '__init__.py'), 'a').close()
t1_file = (t1_dir / 'test.py')
t1_file.write_text(T1_TASK)
t2_dir = (tmp_path / 'wf2')
t2_dir.mkdir()
open((t2_dir / '__init__.py'), 'a').close()
t2_file = (t2_dir / 'test.py')
t2_file.write_text(T2_TASK)
destination = (tmp_path / 'destination')
sys.path.append(str(workflows_dir.parent))
compress_scripts(str(workflows_dir.parent), str(destination), 'workflows.hello_world')
(digest, hex_digest, _) = hash_file(destination)
destination2 = (tmp_path / 'destination2')
compress_scripts(str(workflows_dir.parent), str(destination2), 'workflows.hello_world')
(digest2, hex_digest2, _) = hash_file(destination)
assert (digest == digest2)
assert (hex_digest == hex_digest2)
test_dir = (tmp_path / 'test')
test_dir.mkdir()
result = subprocess.run(['tar', '-xvf', destination, '-C', test_dir], stdout=subprocess.PIPE)
result.check_returncode()
assert (len(next(os.walk(test_dir))[1]) == 3)
compress_scripts(str(workflows_dir.parent), str(destination), 'workflows.imperative_wf') |
class ReqType(ReqTagGeneric):
types = [['master requirement', RequirementType.master_requirement], ['initial requirement', RequirementType.initial_requirement], ['design decision', RequirementType.design_decision], ['requirement', RequirementType.requirement]]
def __init__(self, config):
ReqTagGeneric.__init__(self, config, 'Type', set([InputModuleTypes.reqtag]))
self.type_keys = []
for ltype in self.types:
self.type_keys.append(ltype[0])
def find_type(self, tag):
for ltype in self.types:
if (tag == ltype[0]):
return ltype
return None
def rewrite(self, rid, req):
self.check_mandatory_tag(rid, req, 18)
tag = req[self.get_tag()].get_content()
req_tag = self.find_type(tag)
if (req_tag is None):
raise RMTException(19, ("%s: invalid type field '%s': must be one of '%s'" % (rid, tag, self.type_keys)))
del req[self.get_tag()]
return (self.get_tag(), req_tag[1]) |
def encode_branch_node(left_child_node_hash, right_child_node_hash):
validate_is_bytes(left_child_node_hash)
validate_length(left_child_node_hash, 32)
validate_is_bytes(right_child_node_hash)
validate_length(right_child_node_hash, 32)
return ((BRANCH_TYPE_PREFIX + left_child_node_hash) + right_child_node_hash) |
class OptionPlotoptionsGaugeSonificationDefaultspeechoptionsPointgrouping(Options):
def algorithm(self):
return self._config_get('last')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class OptionPlotoptionsSeriesSonificationContexttracksMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def fontawesome_role(role, rawtext, text, lineno, inliner, options={}, content=[]):
try:
(args, kwargs) = string_to_func_inputs(text)
node = create_fa_node(*args, **kwargs)
except Exception as err:
msg = inliner.reporter.error(f'FontAwesome input is invalid: {err}', line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return ([prb], [msg])
return ([node], []) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.