code stringlengths 281 23.7M |
|---|
def cifti_parcellate_to_meants(settings):
with ciftify.utils.TempDir() as tempdir:
if settings.func.path.endswith('dtseries.nii'):
tmp_parcelated = os.path.join(tempdir, 'parcellated.ptseries.nii')
if settings.func.path.endswith('dscalar.nii'):
tmp_parcelated = os.path.join(tempdir, 'parcellated.pscalar.nii')
ciftify.utils.run(['wb_command', '-cifti-parcellate', settings.func.path, settings.seed.path, 'COLUMN', tmp_parcelated, '-include-empty'])
ciftify.utils.run(['wb_command', '-cifti-convert', '-to-text', tmp_parcelated, settings.outputcsv, '-col-delim ","'])
if settings.outputlabels:
temp_wb_labels = os.path.join(tempdir, 'wb_labels.txt')
ciftify.utils.run(['wb_command', '-cifti-label-export-table', settings.seed.path, '1', temp_wb_labels])
ciftify.niio.wb_labels_to_csv(temp_wb_labels, csv_out=settings.outputlabels) |
class DeleteCategory(MethodView):
decorators = [allows.requires(IsAdmin, on_fail=FlashAndRedirect(message=_('You are not allowed to modify categories'), level='danger', endpoint='management.overview'))]
def post(self, category_id):
category = Category.query.filter_by(id=category_id).first_or_404()
involved_users = User.query.filter((Forum.category_id == category.id), (Topic.forum_id == Forum.id), (Post.user_id == User.id)).all()
category.delete(involved_users)
flash(_('Category with all associated forums deleted.'), 'success')
return redirect(url_for('management.forums')) |
class OptionPlotoptionsTreegraphSonificationDefaultinstrumentoptionsMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def table(state: StateBlock, startLine: int, endLine: int, silent: bool) -> bool:
tbodyLines = None
if ((startLine + 2) > endLine):
return False
nextLine = (startLine + 1)
if (state.sCount[nextLine] < state.blkIndent):
return False
if state.is_code_block(nextLine):
return False
pos = (state.bMarks[nextLine] + state.tShift[nextLine])
if (pos >= state.eMarks[nextLine]):
return False
first_ch = state.src[pos]
pos += 1
if (first_ch not in ('|', '-', ':')):
return False
if (pos >= state.eMarks[nextLine]):
return False
second_ch = state.src[pos]
pos += 1
if ((second_ch not in ('|', '-', ':')) and (not isStrSpace(second_ch))):
return False
if ((first_ch == '-') and isStrSpace(second_ch)):
return False
while (pos < state.eMarks[nextLine]):
ch = state.src[pos]
if ((ch not in ('|', '-', ':')) and (not isStrSpace(ch))):
return False
pos += 1
lineText = getLine(state, (startLine + 1))
columns = lineText.split('|')
aligns = []
for i in range(len(columns)):
t = columns[i].strip()
if (not t):
if ((i == 0) or (i == (len(columns) - 1))):
continue
else:
return False
if (not headerLineRe.search(t)):
return False
if (charStrAt(t, (len(t) - 1)) == ':'):
aligns.append(('center' if (charStrAt(t, 0) == ':') else 'right'))
elif (charStrAt(t, 0) == ':'):
aligns.append('left')
else:
aligns.append('')
lineText = getLine(state, startLine).strip()
if ('|' not in lineText):
return False
if state.is_code_block(startLine):
return False
columns = escapedSplit(lineText)
if (columns and (columns[0] == '')):
columns.pop(0)
if (columns and (columns[(- 1)] == '')):
columns.pop()
columnCount = len(columns)
if ((columnCount == 0) or (columnCount != len(aligns))):
return False
if silent:
return True
oldParentType = state.parentType
state.parentType = 'table'
terminatorRules = state.md.block.ruler.getRules('blockquote')
token = state.push('table_open', 'table', 1)
token.map = tableLines = [startLine, 0]
token = state.push('thead_open', 'thead', 1)
token.map = [startLine, (startLine + 1)]
token = state.push('tr_open', 'tr', 1)
token.map = [startLine, (startLine + 1)]
for i in range(len(columns)):
token = state.push('th_open', 'th', 1)
if aligns[i]:
token.attrs = {'style': ('text-align:' + aligns[i])}
token = state.push('inline', '', 0)
token.map = [startLine, (startLine + 1)]
token.content = columns[i].strip()
token.children = []
token = state.push('th_close', 'th', (- 1))
token = state.push('tr_close', 'tr', (- 1))
token = state.push('thead_close', 'thead', (- 1))
nextLine = (startLine + 2)
while (nextLine < endLine):
if (state.sCount[nextLine] < state.blkIndent):
break
terminate = False
for i in range(len(terminatorRules)):
if terminatorRules[i](state, nextLine, endLine, True):
terminate = True
break
if terminate:
break
lineText = getLine(state, nextLine).strip()
if (not lineText):
break
if state.is_code_block(nextLine):
break
columns = escapedSplit(lineText)
if (columns and (columns[0] == '')):
columns.pop(0)
if (columns and (columns[(- 1)] == '')):
columns.pop()
if (nextLine == (startLine + 2)):
token = state.push('tbody_open', 'tbody', 1)
token.map = tbodyLines = [(startLine + 2), 0]
token = state.push('tr_open', 'tr', 1)
token.map = [nextLine, (nextLine + 1)]
for i in range(columnCount):
token = state.push('td_open', 'td', 1)
if aligns[i]:
token.attrs = {'style': ('text-align:' + aligns[i])}
token = state.push('inline', '', 0)
token.map = [nextLine, (nextLine + 1)]
try:
token.content = (columns[i].strip() if columns[i] else '')
except IndexError:
token.content = ''
token.children = []
token = state.push('td_close', 'td', (- 1))
token = state.push('tr_close', 'tr', (- 1))
nextLine += 1
if tbodyLines:
token = state.push('tbody_close', 'tbody', (- 1))
tbodyLines[1] = nextLine
token = state.push('table_close', 'table', (- 1))
tableLines[1] = nextLine
state.parentType = oldParentType
state.line = nextLine
return True |
class OptionSeriesWindbarbSonificationContexttracksMappingLowpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class FullStackTest(unittest.TestCase, TestBot):
def setUp(self, extra_plugin_dir=None, extra_test_file=None, loglevel=logging.DEBUG, extra_config=None) -> None:
if ((extra_plugin_dir is None) and (extra_test_file is not None)):
extra_plugin_dir = sep.join(abspath(extra_test_file).split(sep)[:(- 2)])
self.setup(extra_plugin_dir=extra_plugin_dir, loglevel=loglevel, extra_config=extra_config)
self.start()
def tearDown(self) -> None:
self.stop() |
class NiceExpander():
def __init__(self, expanded_button, unexpanded_button, paned, child):
self.expanded_button = expanded_button
self.unexpanded_button = unexpanded_button
self.paned = paned
self.child = child
self.sensitive = True
self.expanded = False
self.callback = {}
self.parent = self.expanded_button.get_parent()
self.expanded_button.connect('clicked', self.expand_cb)
self.unexpanded_button.connect('clicked', self.unexpand_cb)
self.set_expanded(True)
def expand_cb(self, *args):
self.expanded = False
self.expanded_button.hide()
self.unexpanded_button.show()
self.child.hide()
width = self.unexpanded_button.get_allocated_width()
width += (self.parent.get_border_width() * 2)
self.paned.set_position(width)
self.call_notify_expanded()
def unexpand_cb(self, *args):
self.expanded = True
self.expanded_button.show()
self.unexpanded_button.hide()
self.child.show()
width = self.expanded_button.get_allocated_width()
width += (self.parent.get_border_width() * 2)
self.paned.set_position(width)
self.call_notify_expanded()
def set_expanded(self, flag):
self.expanded = flag
if flag:
self.unexpand_cb()
else:
self.expand_cb()
def get_expanded(self):
return self.expanded
def connect(self, name, callback, *args):
if (name == 'notify::expanded'):
self.callback[name] = (callback, args)
else:
raise ValueError(("Unknown callback name '%s'" % name))
def call_notify_expanded(self):
name = 'notify::expanded'
if (name in self.callback):
cb = self.callback[name]
try:
cb[0](*cb[1])
except Exception as msg:
print(msg)
def set_sensitive(self, value):
self.expanded_button.set_sensitive(value)
self.unexpanded_button.set_sensitive(value)
self.child.set_sensitive(value)
def get_sensitive(self):
return self.expanded_button.get_sensitive()
def is_sensitive(self):
return self.expanded_button.is_sensitive() |
class TestResolve(tests.LimitedTestCase):
def setUp(self):
base_resolver = _make_mock_base_resolver()
base_resolver.rr.address = '1.2.3.4'
self._old_resolver = greendns.resolver
greendns.resolver = base_resolver()
def tearDown(self):
greendns.resolver = self._old_resolver
def test_A(self):
ans = greendns.resolve('host.example.com', socket.AF_INET)
assert (ans[0].address == '1.2.3.4')
assert (greendns.resolver.args == ('host.example.com', dns.rdatatype.A))
def test_AAAA(self):
greendns.resolver.rr6.address = 'dead:beef::1'
ans = greendns.resolve('host.example.com', socket.AF_INET6)
assert (ans[0].address == 'dead:beef::1')
assert (greendns.resolver.args == ('host.example.com', dns.rdatatype.AAAA))
def test_unknown_rdtype(self):
with tests.assert_raises(socket.gaierror):
greendns.resolve('host.example.com', (socket.AF_INET6 + 1))
def test_timeout(self):
greendns.resolver.raises = greendns.dns.exception.Timeout
with tests.assert_raises(socket.gaierror):
greendns.resolve('host.example.com')
def test_exc(self):
greendns.resolver.raises = greendns.dns.exception.DNSException
with tests.assert_raises(socket.gaierror):
greendns.resolve('host.example.com')
def test_noraise_noanswer(self):
greendns.resolver.rrset = None
ans = greendns.resolve('example.com', raises=False)
assert (not ans.rrset)
def test_noraise_nxdomain(self):
greendns.resolver.raises = greendns.dns.resolver.NXDOMAIN
ans = greendns.resolve('example.com', raises=False)
assert (not ans.rrset) |
class Solution():
def minimumLengthEncoding(self, words: List[str]) -> int:
words.sort(key=(lambda x: (- len(x))))
cnt = 0
tree = {}
for word in words:
curr = tree
is_new = False
for i in range((len(word) - 1), (- 1), (- 1)):
c = word[i]
if (c not in curr):
curr[c] = {}
is_new = True
curr = curr[c]
if is_new:
cnt += (len(word) + 1)
return cnt |
class SupermeshProjectBlock(Block, Backend):
def __init__(self, source, target_space, target, bcs=[], **kwargs):
super(SupermeshProjectBlock, self).__init__(ad_block_tag=kwargs.pop('ad_block_tag', None))
import firedrake.supermeshing as supermesh
if (not isinstance(source, self.backend.Function)):
raise NotImplementedError(f'Source function must be a Function, not {type(source)}.')
if (bcs != []):
raise NotImplementedError('Boundary conditions not yet considered.')
mesh = kwargs.pop('mesh', None)
if (mesh is None):
mesh = target_space.mesh()
self.source_space = source.function_space()
self.target_space = target_space
self.projector = firedrake.Projector(source, target_space, **kwargs)
with stop_annotating():
self.mixed_mass = supermesh.assemble_mixed_mass_matrix(source.function_space(), target_space)
self.add_dependency(source, no_duplicates=True)
for bc in bcs:
self.add_dependency(bc, no_duplicates=True)
def apply_mixedmass(self, a):
b = self.backend.Function(self.target_space)
with a.dat.vec_ro as vsrc, b.dat.vec_wo as vrhs:
self.mixed_mass.mult(vsrc, vrhs)
return b
def recompute_component(self, inputs, block_variable, idx, prepared):
if (not isinstance(inputs[0], self.backend.Function)):
raise NotImplementedError(f'Source function must be a Function, not {type(inputs[0])}.')
target = self.backend.Function(self.target_space)
rhs = self.apply_mixedmass(inputs[0])
self.projector.apply_massinv(target, rhs)
return maybe_disk_checkpoint(target)
def _recompute_component_transpose(self, inputs):
if (not isinstance(inputs[0], self.backend.Cofunction)):
raise NotImplementedError(f'Source function must be a Cofunction, not {type(inputs[0])}.')
out = self.backend.Cofunction(self.source_space.dual())
tmp = self.backend.Function(self.target_space)
self.projector.apply_massinv(tmp, inputs[0])
with tmp.dat.vec_ro as vtmp, out.dat.vec_wo as vout:
self.mixed_mass.multTranspose(vtmp, vout)
return out
def evaluate_adj_component(self, inputs, adj_inputs, block_variable, idx, prepared=None):
if (len(adj_inputs) != 1):
raise NotImplementedError('SupermeshProjectBlock must have a single output')
return self._recompute_component_transpose(adj_inputs)
def evaluate_tlm_component(self, inputs, tlm_inputs, block_variable, idx, prepared=None):
dJdm = self.backend.Function(self.target_space)
for tlm_input in tlm_inputs:
if (tlm_input is None):
continue
dJdm += self.recompute_component([tlm_input], block_variable, idx, prepared)
return dJdm
def evaluate_hessian_component(self, inputs, hessian_inputs, adj_inputs, block_variable, idx, relevant_dependencies, prepared=None):
if (len(hessian_inputs) != 1):
raise NotImplementedError('SupermeshProjectBlock must have a single output')
return self.evaluate_adj_component(inputs, hessian_inputs, block_variable, idx)
def __str__(self):
target_string = f'{str(self.target_space.ufl_element().shortstr())}'
return f'project({self.get_dependencies()[0]}, {target_string}))' |
def reclassify_statutory_citation(title, section):
MAPPED_TITLE = '52'
if (title == '2'):
mapped_section = CITATIONS_MAP.get(section)
if mapped_section:
logger.debug('Mapping 2 U.S.C statute citation %s -> %s', (title, section), (MAPPED_TITLE, mapped_section))
return (MAPPED_TITLE, mapped_section)
logger.debug('Unmapped 2 U.S.C citation: %s', (title, section))
return (title, section) |
def stats(ts, tlog, dbal, capital):
start = ts.index[0]
end = ts.index[(- 1)]
stats = pd.Series(dtype='object')
stats['start'] = start.strftime('%Y-%m-%d')
stats['end'] = end.strftime('%Y-%m-%d')
stats['beginning_balance'] = _beginning_balance(capital)
stats['ending_balance'] = _ending_balance(dbal)
stats['total_net_profit'] = _total_net_profit(tlog)
stats['gross_profit'] = _gross_profit(tlog)
stats['gross_loss'] = _gross_loss(tlog)
stats['profit_factor'] = _profit_factor(tlog)
stats['return_on_initial_capital'] = _return_on_initial_capital(tlog, capital)
cagr = _annual_return_rate(dbal['close'].iloc[(- 1)], capital, start, end)
stats['annual_return_rate'] = cagr
stats['trading_period'] = _trading_period(start, end)
stats['pct_time_in_market'] = _pct_time_in_market(dbal)
stats['margin'] = _margin()
stats['avg_leverage'] = _avg_leverage(dbal)
stats['max_leverage'] = _max_leverage(dbal)
stats['min_leverage'] = _min_leverage(dbal)
stats['total_num_trades'] = _total_num_trades(tlog)
stats['trades_per_year'] = _trades_per_year(tlog, start, end)
stats['num_winning_trades'] = _num_winning_trades(tlog)
stats['num_losing_trades'] = _num_losing_trades(tlog)
stats['num_even_trades'] = _num_even_trades(tlog)
stats['pct_profitable_trades'] = _pct_profitable_trades(tlog)
stats['avg_profit_per_trade'] = _avg_profit_per_trade(tlog)
stats['avg_profit_per_winning_trade'] = _avg_profit_per_winning_trade(tlog)
stats['avg_loss_per_losing_trade'] = _avg_loss_per_losing_trade(tlog)
stats['ratio_avg_profit_win_loss'] = _ratio_avg_profit_win_loss(tlog)
stats['largest_profit_winning_trade'] = _largest_profit_winning_trade(tlog)
stats['largest_loss_losing_trade'] = _largest_loss_losing_trade(tlog)
stats['num_winning_points'] = _num_winning_points(tlog)
stats['num_losing_points'] = _num_losing_points(tlog)
stats['total_net_points'] = _total_net_points(tlog)
stats['avg_points'] = _avg_points(tlog)
stats['largest_points_winning_trade'] = _largest_points_winning_trade(tlog)
stats['largest_points_losing_trade'] = _largest_points_losing_trade(tlog)
stats['avg_pct_gain_per_trade'] = _avg_pct_gain_per_trade(tlog)
stats['largest_pct_winning_trade'] = _largest_pct_winning_trade(tlog)
stats['largest_pct_losing_trade'] = _largest_pct_losing_trade(tlog)
stats['expected_shortfall'] = _expected_shortfall(tlog)
stats['max_consecutive_winning_trades'] = _max_consecutive_winning_trades(tlog)
stats['max_consecutive_losing_trades'] = _max_consecutive_losing_trades(tlog)
stats['avg_bars_winning_trades'] = _avg_bars_winning_trades(ts, tlog)
stats['avg_bars_losing_trades'] = _avg_bars_losing_trades(ts, tlog)
dd = _max_closed_out_drawdown(dbal['close'])
stats['max_closed_out_drawdown'] = dd['max']
stats['max_closed_out_drawdown_peak_date'] = dd['peak_date']
stats['max_closed_out_drawdown_trough_date'] = dd['trough_date']
stats['max_closed_out_drawdown_recovery_date'] = dd['recovery_date']
(stats['drawdown_loss_period'], stats['drawdown_recovery_period']) = _drawdown_loss_recovery_period(dd['peak_date'], dd['trough_date'], dd['recovery_date'])
if (dd['max'] == 0):
stats['annualized_return_over_max_drawdown'] = 0
else:
stats['annualized_return_over_max_drawdown'] = abs((cagr / dd['max']))
dd = _max_intra_day_drawdown(dbal['high'], dbal['low'])
stats['max_intra_day_drawdown'] = dd['max']
dd = _rolling_max_dd(dbal['close'], TRADING_DAYS_PER_YEAR)
stats['avg_yearly_closed_out_drawdown'] = np.average(dd)
stats['max_yearly_closed_out_drawdown'] = min(dd)
dd = _rolling_max_dd(dbal['close'], TRADING_DAYS_PER_MONTH)
stats['avg_monthly_closed_out_drawdown'] = np.average(dd)
stats['max_monthly_closed_out_drawdown'] = min(dd)
dd = _rolling_max_dd(dbal['close'], TRADING_DAYS_PER_WEEK)
stats['avg_weekly_closed_out_drawdown'] = np.average(dd)
stats['max_weekly_closed_out_drawdown'] = min(dd)
ru = _rolling_max_ru(dbal['close'], TRADING_DAYS_PER_YEAR)
stats['avg_yearly_closed_out_runup'] = np.average(ru)
stats['max_yearly_closed_out_runup'] = ru.max()
ru = _rolling_max_ru(dbal['close'], TRADING_DAYS_PER_MONTH)
stats['avg_monthly_closed_out_runup'] = np.average(ru)
stats['max_monthly_closed_out_runup'] = max(ru)
ru = _rolling_max_ru(dbal['close'], TRADING_DAYS_PER_WEEK)
stats['avg_weekly_closed_out_runup'] = np.average(ru)
stats['max_weekly_closed_out_runup'] = max(ru)
pc = _pct_change(dbal['close'], TRADING_DAYS_PER_YEAR)
if (len(pc) > 0):
stats['pct_profitable_years'] = (((pc > 0).sum() / len(pc)) * 100)
stats['best_year'] = pc.max()
stats['worst_year'] = pc.min()
stats['avg_year'] = np.average(pc)
stats['annual_std'] = pc.std()
pc = _pct_change(dbal['close'], TRADING_DAYS_PER_MONTH)
if (len(pc) > 0):
stats['pct_profitable_months'] = (((pc > 0).sum() / len(pc)) * 100)
stats['best_month'] = pc.max()
stats['worst_month'] = pc.min()
stats['avg_month'] = np.average(pc)
stats['monthly_std'] = pc.std()
pc = _pct_change(dbal['close'], TRADING_DAYS_PER_WEEK)
if (len(pc) > 0):
stats['pct_profitable_weeks'] = (((pc > 0).sum() / len(pc)) * 100)
stats['best_week'] = pc.max()
stats['worst_week'] = pc.min()
stats['avg_week'] = np.average(pc)
stats['weekly_std'] = pc.std()
pc = _pct_change(dbal['close'], 1)
if (len(pc) > 0):
stats['pct_profitable_days'] = (((pc > 0).sum() / len(pc)) * 100)
stats['best_day'] = pc.max()
stats['worst_day'] = pc.min()
stats['avg_day'] = np.average(pc)
stats['daily_std'] = pc.std()
sr = _sharpe_ratio(dbal['close'].pct_change())
sr_std = math.sqrt(((1 + (0.5 * (sr ** 2))) / len(dbal)))
stats['sharpe_ratio'] = sr
stats['sharpe_ratio_max'] = (sr + (3 * sr_std))
stats['sharpe_ratio_min'] = (sr - (3 * sr_std))
stats['sortino_ratio'] = _sortino_ratio(dbal['close'].pct_change())
return stats |
class where(Operator):
def __init__(self) -> None:
super().__init__()
self._attrs['op'] = 'where'
def __call__(self, condition: Tensor, input_tensor: Tensor, other_tensor: Tensor, dtype: str='') -> Tensor:
assert isinstance(condition, Tensor), f'condition needs to be a tensor, but got {type(condition)}'
assert (condition.dtype() == 'bool'), f'condition needs to be a bool tensor, but got {condition.dtype()}'
output_shape = condition.shape()
args = []
inputs = []
common_dtype = None
for tensor in [input_tensor, other_tensor]:
if (isinstance(tensor, int) or isinstance(tensor, float)):
tensor = Tensor(shape=[], value=tensor, dtype=common_dtype)
else:
assert isinstance(tensor, Tensor), f'Unsupported data type: {type(tensor)}'
assert (tensor.shape() == output_shape), f'Tensor shape should be the same, {tensor.shape()} != {output_shape}'
if (common_dtype is None):
common_dtype = normalize_dtype(tensor.dtype())
else:
assert (common_dtype == normalize_dtype(tensor.dtype())), f'Expect tensor of the same dtype, got {common_dtype} and {normalize_dtype(tensor.dtype())}'
inputs.append(tensor)
args.append(tensor)
if (len(inputs) == 0):
assert (dtype != ''), 'dtype needs to be provided for scalars'
common_dtype = normalize_dtype(dtype)
for arg in args:
arg._attrs['dtype'] = common_dtype
self._attrs['args'] = [condition, *args]
self._attrs['inputs'] = [condition, *inputs]
self._set_depth()
output = Tensor(shape=output_shape, src_ops={self}, dtype=common_dtype)
self._attrs['outputs'] = [output]
return output
def gen_function(self) -> str:
target = backend.target.Target.current()
func_key = f"{target.name()}.{self._attrs['op']}.gen_function"
func = registry.get(func_key)
return func(self._attrs) |
def extractWwwDummytranslationsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
(scope='module')
def android_app(default_app):
del default_app
android_apps = project_management.list_android_apps()
for android_app in android_apps:
if _starts_with(android_app.get_metadata().display_name, TEST_APP_DISPLAY_NAME_PREFIX):
return android_app
return project_management.create_android_app(package_name=TEST_APP_PACKAGE_NAME, display_name=TEST_APP_DISPLAY_NAME_PREFIX) |
class ComputeTaskType(Enum):
NONE = 'None'
LLM_COMPLETION = 'llm_completion'
TEXT_2_IMAGE = 'text_2_image'
IMAGE_2_IMAGE = 'image_2_image'
VOICE_2_TEXT = 'voice_2_text'
TEXT_2_VOICE = 'text_2_voice'
TEXT_EMBEDDING = 'text_embedding'
IMAGE_EMBEDDING = 'image_embedding' |
class Style():
def pos_sys_msg(string):
print(((((('[' + Fore.GREEN) + '*') + St.RESET_ALL) + '] ') + string))
def neg_sys_msg(string):
print(((((('[' + Fore.RED) + '-') + St.RESET_ALL) + '] ') + string))
def client_connect_msg():
stdout.write((((((('\n[' + Fore.GREEN) + '*') + St.RESET_ALL) + '] ') + 'Client connected to the server') + '\nlistener > ')) |
class WorkflowExecutionPhase(object):
UNDEFINED = _execution_pb2.WorkflowExecution.UNDEFINED
QUEUED = _execution_pb2.WorkflowExecution.QUEUED
RUNNING = _execution_pb2.WorkflowExecution.RUNNING
SUCCEEDING = _execution_pb2.WorkflowExecution.SUCCEEDING
SUCCEEDED = _execution_pb2.WorkflowExecution.SUCCEEDED
FAILING = _execution_pb2.WorkflowExecution.FAILING
FAILED = _execution_pb2.WorkflowExecution.FAILED
ABORTED = _execution_pb2.WorkflowExecution.ABORTED
TIMED_OUT = _execution_pb2.WorkflowExecution.TIMED_OUT
ABORTING = _execution_pb2.WorkflowExecution.ABORTING
def enum_to_string(cls, int_value):
for (name, value) in cls.__dict__.items():
if (value == int_value):
return name
return str(int_value) |
.parametrize('degree', [1, 2])
def test_rtce_expansion(tpc_quad, degree):
actual = FiniteElement('RTCE', tpc_quad, degree)
C_elt = FiniteElement('CG', interval, degree)
D_elt = FiniteElement('DG', interval, (degree - 1))
expected = (HCurl(TensorProductElement(C_elt, D_elt)) + HCurl(TensorProductElement(D_elt, C_elt)))
assert (expected == actual) |
def firebase_config() -> (None | FirebaseConfig):
config_file = _os.getenv('FIREBASE_CONFIG')
if (not config_file):
return None
if config_file.startswith('{'):
json_str = config_file
else:
try:
with open(config_file, 'r', encoding='utf8') as json_file:
json_str = json_file.read()
except Exception as err:
raise ValueError(f'Unable to read file {config_file}. {err}') from err
try:
json_data: dict = _json.loads(json_str)
except Exception as err:
raise ValueError(f'FIREBASE_CONFIG JSON string "{json_str}" is not valid json. {err}') from err
return FirebaseConfig(storage_bucket=json_data.get('storageBucket')) |
class PaymentRequestTable(BaseWalletStore):
LOGGER_NAME = 'db-table-prequest'
CREATE_SQL = 'INSERT INTO PaymentRequests (paymentrequest_id, keyinstance_id, state, value, expiration, description, date_created, date_updated) VALUES (?, ?, ?, ?, ?, ?, ?, ?)'
READ_ALL_SQL = 'SELECT P.paymentrequest_id, P.keyinstance_id, P.state, P.value, P.expiration, P.description, P.date_created FROM PaymentRequests P'
READ_ACCOUNT_SQL = (READ_ALL_SQL + ' INNER JOIN KeyInstances K USING(keyinstance_id) WHERE K.account_id=?')
UPDATE_SQL = 'UPDATE PaymentRequests SET date_updated=?, state=?, value=?, expiration=?, description=? WHERE paymentrequest_id=?'
UPDATE_STATE_SQL = f'''UPDATE PaymentRequests SET date_updated=?,
state=(state&{(~ PaymentFlag.STATE_MASK)})|? WHERE keyinstance_id=?'''
DELETE_SQL = 'DELETE FROM PaymentRequests WHERE paymentrequest_id=?'
def create(self, entries: Iterable[PaymentRequestRow], completion_callback: Optional[CompletionCallbackType]=None) -> None:
datas = [(*t, t[(- 1)]) for t in entries]
def _write(db: sqlite3.Connection):
db.executemany(self.CREATE_SQL, datas)
self._db_context.queue_write(_write, completion_callback)
def read_one(self, request_id: Optional[int]=None, keyinstance_id: Optional[int]=None) -> Optional[PaymentRequestRow]:
query = self.READ_ALL_SQL
if (request_id is not None):
query += f' WHERE P.paymentrequest_id=?'
params = [request_id]
elif (keyinstance_id is not None):
query += f' WHERE P.keyinstance_id=?'
params = [keyinstance_id]
else:
raise Exception('bad read, no id')
cursor = self._db.execute(query, params)
t = cursor.fetchone()
cursor.close()
if (t is not None):
return PaymentRequestRow(t[0], t[1], PaymentFlag(t[2]), t[3], t[4], t[5], t[6])
return None
def read(self, account_id: Optional[int]=None, flags: Optional[int]=None, mask: Optional[int]=None) -> List[PaymentRequestRow]:
query = self.READ_ALL_SQL
params: List[Any] = []
conjunction = 'WHERE'
if (account_id is not None):
query = self.READ_ACCOUNT_SQL
params.append(account_id)
conjunction = 'AND'
(clause, extra_params) = flag_clause('state', flags, mask)
if clause:
query += f' {conjunction} {clause}'
params.extend(extra_params)
conjunction = 'AND'
cursor = self._db.execute(query, params)
rows = cursor.fetchall()
cursor.close()
return [PaymentRequestRow(t[0], t[1], PaymentFlag(t[2]), t[3], t[4], t[5], t[6]) for t in rows]
def update(self, entries: Iterable[Tuple[(Optional[PaymentFlag], Optional[int], int, Optional[str], int)]], date_updated: Optional[int]=None, completion_callback: Optional[CompletionCallbackType]=None) -> None:
if (date_updated is None):
date_updated = self._get_current_timestamp()
datas = [(date_updated, *entry) for entry in entries]
def _write(db: sqlite3.Connection):
db.executemany(self.UPDATE_SQL, datas)
self._db_context.queue_write(_write, completion_callback)
def update_state(self, entries: Iterable[Tuple[(Optional[PaymentFlag], int)]], date_updated: Optional[int]=None, completion_callback: Optional[CompletionCallbackType]=None) -> None:
if (date_updated is None):
date_updated = self._get_current_timestamp()
datas = [(date_updated, *entry) for entry in entries]
def _write(db: sqlite3.Connection):
db.executemany(self.UPDATE_STATE_SQL, datas)
self._db_context.queue_write(_write, completion_callback)
def delete(self, entries: Iterable[Tuple[int]], completion_callback: Optional[CompletionCallbackType]=None) -> None:
def _write(db: sqlite3.Connection):
db.executemany(self.DELETE_SQL, entries)
self._db_context.queue_write(_write, completion_callback) |
def test_adding_a_secret_mount_with_default_mode():
config = '\nsecretMounts:\n - name: elastic-certificates\n secretName: elastic-certs\n path: /usr/share/elasticsearch/config/certs\n subPath: cert.crt\n defaultMode: 0755\n'
r = helm_template(config)
s = r['statefulset'][uname]['spec']['template']['spec']
assert (s['containers'][0]['volumeMounts'][(- 1)] == {'mountPath': '/usr/share/elasticsearch/config/certs', 'subPath': 'cert.crt', 'name': 'elastic-certificates'}) |
def extractWhitemoonlight74WordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class Observation(BMGNode):
value: Any
def __init__(self, observed: BMGNode, value: Any):
self.value = value
BMGNode.__init__(self, [observed])
def observed(self) -> BMGNode:
return self.inputs[0]
def __str__(self) -> str:
return ((str(self.observed) + '=') + str(self.value)) |
class GuildOps(gh.ObjectType):
GuCreate = gh.Field(Guild, name=gh.String(required=True, description=''), slogan=gh.String(required=True, description=''), totem=gh.String(description='(URL)'), description='')
GuTransfer = gh.Boolean(guild_id=gh.Int(required=True, description='ID'), to=gh.Int(required=True, description='ID'), description='')
GuJoin = gh.Boolean(guild_id=gh.Int(required=True, description='ID'), description='')
GuApprove = gh.Boolean(player_id=gh.Int(required=True, description='ID'), description='')
GuKick = gh.Boolean(player_id=gh.Int(required=True, description='ID'), description='')
GuQuit = gh.Boolean(description='')
GuUpdate = gh.Field(Guild, slogan=gh.String(description=''), totem=gh.String(description='(URL)'), description='') |
def run(build_dir, db_dir, pip_dir, intre, sides, l, r, pip_type, seg_type, exclude_re=None, balance_wire_re=None, balance_wire_direction=None, balance_wire_cnt=None, not_endswith=None, verbose=False):
if (db_dir is None):
db_dir = ('%s/%s' % (os.getenv('XRAY_DATABASE_DIR'), os.getenv('XRAY_DATABASE')))
if (pip_dir is None):
pip_dir = ('%s/piplist/build/%s' % (os.getenv('XRAY_FUZZERS_DIR'), pip_type))
assert intre, 'RE is required'
for side in sides:
if ((side == 'l') and (not l)):
continue
if ((side == 'r') and (not r)):
continue
if (side == 'xl'):
segfile = 'l{}'.format(seg_type)
pipfile = 'l{}'.format(pip_type)
elif (side == 'xr'):
segfile = 'r{}'.format(seg_type)
pipfile = 'r{}'.format(pip_type)
elif (side != ''):
segfile = '{}_{}'.format(seg_type, side)
pipfile = '{}_{}'.format(pip_type, side)
else:
segfile = '{}'.format(seg_type)
pipfile = '{}'.format(pip_type)
maketodo(('%s/%s.txt' % (pip_dir, pipfile)), ('%s/segbits_%s.db' % (db_dir, segfile)), intre, exclude_re=exclude_re, balance_wire_re=balance_wire_re, balance_wire_direction=balance_wire_direction, balance_wire_cnt=balance_wire_cnt, not_endswith=not_endswith, verbose=verbose) |
class GraphVizNode():
in_edges: Set[str] = set()
def __init__(self, name: str) -> None:
self.__name = name
self.__grouping: str = ''
self.__in_edge: str = ''
self.__out_edges: List[str] = []
self.__upstream_node: 'GraphVizNode ' = None
def name(self) -> str:
return self.__name
def grouping(self) -> str:
return self.__grouping
def grouping(self, value) -> None:
self.__grouping = value
def in_edge(self) -> str:
return self.__in_edge
_edge.setter
def in_edge(self, value) -> None:
self.__in_edge = value
self.in_edges.add(value)
def out_edges(self) -> List[str]:
return self.__out_edges
def upstream_node(self) -> 'GraphVizNode':
return self.__upstream_node
_node.setter
def upstream_node(self, value) -> None:
self.__upstream_node = value |
class OptionSeriesScatterSonificationDefaultspeechoptionsMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('undefined')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('undefined')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('undefined')
def min(self, text: str):
self._config(text, js_type=False)
def within(self):
return self._config_get('undefined')
def within(self, text: str):
self._config(text, js_type=False) |
class Analytics(object):
def __init__(self, filters=None):
self.filters = frappe._dict((filters or {}))
self.months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
self.get_period_date_ranges()
def run(self):
self.get_columns()
self.get_data()
self.get_chart_data()
return (self.columns, self.data, None, self.chart)
def get_period_date_ranges(self):
from dateutil.relativedelta import MO, relativedelta
(from_date, to_date) = (getdate(self.filters.from_date), getdate(self.filters.to_date))
increment = {'Monthly': 1, 'Quarterly': 3, 'Half-Yearly': 6, 'Yearly': 12}.get(self.filters.range, 1)
if (self.filters.range in ['Monthly', 'Quarterly']):
from_date = from_date.replace(day=1)
elif (self.filters.range == 'Yearly'):
from_date = get_fiscal_year(from_date)[1]
else:
from_date = (from_date + relativedelta(from_date, weekday=MO((- 1))))
self.periodic_daterange = []
for dummy in range(1, 53):
if (self.filters.range == 'Weekly'):
period_end_date = add_days(from_date, 6)
else:
period_end_date = add_to_date(from_date, months=increment, days=(- 1))
if (period_end_date > to_date):
period_end_date = to_date
self.periodic_daterange.append(period_end_date)
from_date = add_days(period_end_date, 1)
if (period_end_date == to_date):
break
def get_columns(self):
self.columns = []
if (self.filters.tree_type == 'Healthcare Practitioner'):
self.columns.append({'label': _('Healthcare Practitioner'), 'options': 'Healthcare Practitioner', 'fieldname': 'practitioner', 'fieldtype': 'Link', 'width': 200})
elif (self.filters.tree_type == 'Medical Department'):
self.columns.append({'label': _('Medical Department'), 'fieldname': 'department', 'fieldtype': 'Link', 'options': 'Medical Department', 'width': 150})
for end_date in self.periodic_daterange:
period = self.get_period(end_date)
self.columns.append({'label': _(period), 'fieldname': scrub(period), 'fieldtype': 'Int', 'width': 120})
self.columns.append({'label': _('Total'), 'fieldname': 'total', 'fieldtype': 'Int', 'width': 120})
def get_data(self):
if (self.filters.tree_type == 'Healthcare Practitioner'):
self.get_appointments_based_on_healthcare_practitioner()
self.get_rows()
elif (self.filters.tree_type == 'Medical Department'):
self.get_appointments_based_on_medical_department()
self.get_rows()
def get_period(self, appointment_date):
if (self.filters.range == 'Weekly'):
period = ('Week ' + str(appointment_date.isocalendar()[1]))
elif (self.filters.range == 'Monthly'):
period = str(self.months[(appointment_date.month - 1)])
elif (self.filters.range == 'Quarterly'):
period = ('Quarter ' + str((((appointment_date.month - 1) // 3) + 1)))
else:
year = get_fiscal_year(appointment_date, company=self.filters.company)
period = str(year[0])
if (getdate(self.filters.from_date).year != getdate(self.filters.to_date).year):
period += (' ' + str(appointment_date.year))
return period
def get_appointments_based_on_healthcare_practitioner(self):
filters = self.get_common_filters()
self.entries = frappe.db.get_all('Patient Appointment', fields=['appointment_date', 'name', 'patient', 'practitioner'], filters=filters)
def get_appointments_based_on_medical_department(self):
filters = self.get_common_filters()
if (not filters.get('department')):
filters['department'] = ('!=', '')
self.entries = frappe.db.get_all('Patient Appointment', fields=['appointment_date', 'name', 'patient', 'practitioner', 'department'], filters=filters)
def get_common_filters(self):
filters = {}
filters['appointment_date'] = ('between', [self.filters.from_date, self.filters.to_date])
for entry in ['appointment_type', 'practitioner', 'department', 'status']:
if self.filters.get(entry):
filters[entry] = self.filters.get(entry)
return filters
def get_rows(self):
self.data = []
self.get_periodic_data()
for (entity, period_data) in self.appointment_periodic_data.items():
if (self.filters.tree_type == 'Healthcare Practitioner'):
row = {'practitioner': entity}
elif (self.filters.tree_type == 'Medical Department'):
row = {'department': entity}
total = 0
for end_date in self.periodic_daterange:
period = self.get_period(end_date)
amount = flt(period_data.get(period, 0.0))
row[scrub(period)] = amount
total += amount
row['total'] = total
self.data.append(row)
def get_periodic_data(self):
self.appointment_periodic_data = frappe._dict()
for d in self.entries:
period = self.get_period(d.get('appointment_date'))
if (self.filters.tree_type == 'Healthcare Practitioner'):
self.appointment_periodic_data.setdefault(d.practitioner, frappe._dict()).setdefault(period, 0.0)
self.appointment_periodic_data[d.practitioner][period] += 1
elif (self.filters.tree_type == 'Medical Department'):
self.appointment_periodic_data.setdefault(d.department, frappe._dict()).setdefault(period, 0.0)
self.appointment_periodic_data[d.department][period] += 1
def get_chart_data(self):
length = len(self.columns)
labels = [d.get('label') for d in self.columns[1:(length - 1)]]
self.chart = {'data': {'labels': labels, 'datasets': []}, 'type': 'line'} |
class CliTester():
def __init__(self, monkeypatch, mocker):
self.argv = sys.argv.copy()
self.monkeypatch = monkeypatch
self.mocker = mocker
def mock_subroutines(self, *args, **kwargs):
return
def run_and_test_parameters(self, argv=None, parameters={}):
sys.argv = ['brownie']
if argv:
sys.argv += argv.split(' ')
cli_main.main()
assert (self.mock_subroutines.call_args == parameters)
def raise_type_error_exception(self, e):
raise TypeError(e)
def close(self):
sys.argv = self.argv |
class Solution():
def eraseOverlapIntervals(self, intervals: List[List[int]]) -> int:
def overlap(a, b):
return ((a[0] < b[1]) and (b[0] < a[1]))
intervals.sort()
last = None
count = 0
for interval in intervals:
if ((not last) or (not overlap(last, interval))):
count += 1
last = interval
elif (last[1] > interval[1]):
last = interval
return (len(intervals) - count) |
def exact_solution(X, t):
radius = 0.15
if (nd == 2):
xc = 0.5
yc = 0.75
if (ct.test_case == 1):
r = np.sqrt((((X[0] - xc) ** 2) + ((X[1] - yc) ** 2)))
return (radius - r)
elif (type(X) != dict):
return zalesak_disk_per_point(X, 0)
else:
x = X[0]
y = X[1]
z = np.zeros(x.shape, 'd')
for i in range(len(x)):
j = 0
for (xq, yq) in zip(x[i], y[i]):
XX = {0: xq, 1: yq}
z[(i, j)] = zalesak_disk_per_point(XX, t)
j = (j + 1)
return z
else:
if (ct.test_case == 3):
xc = 0.5
yc = 0.75
zc = 0.25
else:
xc = 0.35
yc = 0.35
zc = 0.35
r = np.sqrt(((((X[0] - xc) ** 2) + ((X[1] - yc) ** 2)) + ((X[2] - zc) ** 2)))
return (radius - r) |
class OptionSeriesWordcloudSonificationDefaultspeechoptionsMapping(Options):
def pitch(self) -> 'OptionSeriesWordcloudSonificationDefaultspeechoptionsMappingPitch':
return self._config_sub_data('pitch', OptionSeriesWordcloudSonificationDefaultspeechoptionsMappingPitch)
def playDelay(self) -> 'OptionSeriesWordcloudSonificationDefaultspeechoptionsMappingPlaydelay':
return self._config_sub_data('playDelay', OptionSeriesWordcloudSonificationDefaultspeechoptionsMappingPlaydelay)
def rate(self) -> 'OptionSeriesWordcloudSonificationDefaultspeechoptionsMappingRate':
return self._config_sub_data('rate', OptionSeriesWordcloudSonificationDefaultspeechoptionsMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionSeriesWordcloudSonificationDefaultspeechoptionsMappingTime':
return self._config_sub_data('time', OptionSeriesWordcloudSonificationDefaultspeechoptionsMappingTime)
def volume(self) -> 'OptionSeriesWordcloudSonificationDefaultspeechoptionsMappingVolume':
return self._config_sub_data('volume', OptionSeriesWordcloudSonificationDefaultspeechoptionsMappingVolume) |
class SimpleInspectorOverlay(TextGridOverlay):
inspector = Any
field_formatters = List(List(Callable))
tooltip_mode = Bool(False)
visible = True
visibility = Enum('auto', True, False)
def _field_formatters_default(self):
return [[basic_formatter('x', 2)], [basic_formatter('y', 2)]]
def _new_value_updated(self, event):
new_value_event = event.new
if (new_value_event is None):
self.text_grid = array()
if (self.visibility == 'auto'):
self.visibility = False
elif (self.visibility == 'auto'):
self.visible = True
if self.tooltip_mode:
self.alternate_position = self.inspector.last_mouse_position
d = new_value_event
text = []
self.text_grid.string_array = array([[formatter(**d) for formatter in row] for row in self.field_formatters])
self.text_grid.request_redraw()
def _visible_changed(self):
if self.component:
self.request_redraw()
def _inspector_changed(self, old, new):
if old:
old.observe(self._new_value_updated, 'new_value', remove=True)
old.observe(self._tool_visible_changed, 'visible', remove=True)
if new:
new.observe(self._new_value_updated, 'new_value')
new.observe(self._tool_visible_changed, 'visible')
self._tool_visible_changed()
def _tool_visible_changed(self, event=None):
self.visibility = self.inspector.visible
if (self.visibility != 'auto'):
self.visible = self.visibility |
class Event(Reporter):
def __init__(self, evaluator_url, token=None, cert_path=None):
self._evaluator_url = evaluator_url
self._token = token
if (cert_path is not None):
with open(cert_path, encoding='utf-8') as f:
self._cert = f.read()
else:
self._cert = None
self._statemachine = StateMachine()
self._statemachine.add_handler((Init,), self._init_handler)
self._statemachine.add_handler((Start, Running, Exited), self._job_handler)
self._statemachine.add_handler((Finish,), self._finished_handler)
self._ens_id = None
self._real_id = None
self._event_queue = queue.Queue()
self._event_publisher_thread = threading.Thread(target=self._event_publisher)
self._sentinel = object()
self._timeout_timestamp = None
self._timestamp_lock = threading.Lock()
self._reporter_timeout = 60
def _event_publisher(self):
logger.debug('Publishing event.')
with Client(url=self._evaluator_url, token=self._token, cert=self._cert) as client:
event = None
while True:
with self._timestamp_lock:
if ((self._timeout_timestamp is not None) and (datetime.datetime.now() > self._timeout_timestamp)):
self._timeout_timestamp = None
break
if (event is None):
event = self._event_queue.get()
if (event is self._sentinel):
break
try:
client.send(to_json(event).decode())
event = None
except ClientConnectionError as exception:
logger.error(str(exception))
pass
except ClientConnectionClosedOK as exception:
logger.debug(str(exception))
break
def report(self, msg):
self._statemachine.transition(msg)
def _dump_event(self, attributes: Dict[(str, str)], data: Any=None):
if ((data is None) and (_CONTENT_TYPE in attributes)):
attributes.pop(_CONTENT_TYPE)
event = CloudEvent(attributes=attributes, data=data)
logger.debug(f"""Schedule {type(event)} "{event['type']}" for delivery""")
self._event_queue.put(event)
def _init_handler(self, msg):
self._ens_id = msg.ens_id
self._real_id = msg.real_id
self._event_publisher_thread.start()
def _job_handler(self, msg: Message):
job_name = msg.job.name()
job_msg_attrs = {_JOB_SOURCE: f'/ert/ensemble/{self._ens_id}/real/{self._real_id}/forward_model/{msg.job.index}/index/{msg.job.index}', _CONTENT_TYPE: 'application/json'}
if isinstance(msg, Start):
logger.debug(f'Job {job_name} was successfully started')
self._dump_event(attributes={_JOB_MSG_TYPE: _FORWARD_MODEL_START, **job_msg_attrs}, data={'stdout': str(Path(msg.job.std_out).resolve()), 'stderr': str(Path(msg.job.std_err).resolve())})
if (not msg.success()):
logger.error(f'Job {job_name} FAILED to start')
self._dump_event(attributes={_JOB_MSG_TYPE: _FORWARD_MODEL_FAILURE, **job_msg_attrs}, data={'error_msg': msg.error_message})
elif isinstance(msg, Exited):
data = None
if msg.success():
logger.debug(f'Job {job_name} exited successfully')
attributes = {_JOB_MSG_TYPE: _FORWARD_MODEL_SUCCESS, **job_msg_attrs}
else:
logger.error(_JOB_EXIT_FAILED_STRING.format(job_name=msg.job.name(), exit_code=msg.exit_code, error_message=msg.error_message))
attributes = {_JOB_MSG_TYPE: _FORWARD_MODEL_FAILURE, **job_msg_attrs}
data = {'exit_code': msg.exit_code, 'error_msg': msg.error_message}
self._dump_event(attributes=attributes, data=data)
elif isinstance(msg, Running):
logger.debug(f'{job_name} job is running')
self._dump_event(attributes={_JOB_MSG_TYPE: _FORWARD_MODEL_RUNNING, **job_msg_attrs}, data={'max_memory_usage': msg.max_memory_usage, 'current_memory_usage': msg.current_memory_usage})
def _finished_handler(self, msg):
self._event_queue.put(self._sentinel)
with self._timestamp_lock:
self._timeout_timestamp = (datetime.datetime.now() + datetime.timedelta(seconds=self._reporter_timeout))
if self._event_publisher_thread.is_alive():
self._event_publisher_thread.join() |
class flow_removed(message):
version = 1
type = 11
def __init__(self, xid=None, match=None, cookie=None, priority=None, reason=None, duration_sec=None, duration_nsec=None, idle_timeout=None, packet_count=None, byte_count=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (match != None):
self.match = match
else:
self.match = ofp.match()
if (cookie != None):
self.cookie = cookie
else:
self.cookie = 0
if (priority != None):
self.priority = priority
else:
self.priority = 0
if (reason != None):
self.reason = reason
else:
self.reason = 0
if (duration_sec != None):
self.duration_sec = duration_sec
else:
self.duration_sec = 0
if (duration_nsec != None):
self.duration_nsec = duration_nsec
else:
self.duration_nsec = 0
if (idle_timeout != None):
self.idle_timeout = idle_timeout
else:
self.idle_timeout = 0
if (packet_count != None):
self.packet_count = packet_count
else:
self.packet_count = 0
if (byte_count != None):
self.byte_count = byte_count
else:
self.byte_count = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(self.match.pack())
packed.append(struct.pack('!Q', self.cookie))
packed.append(struct.pack('!H', self.priority))
packed.append(struct.pack('!B', self.reason))
packed.append(('\x00' * 1))
packed.append(struct.pack('!L', self.duration_sec))
packed.append(struct.pack('!L', self.duration_nsec))
packed.append(struct.pack('!H', self.idle_timeout))
packed.append(('\x00' * 2))
packed.append(struct.pack('!Q', self.packet_count))
packed.append(struct.pack('!Q', self.byte_count))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = flow_removed()
_version = reader.read('!B')[0]
assert (_version == 1)
_type = reader.read('!B')[0]
assert (_type == 11)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
obj.match = ofp.match.unpack(reader)
obj.cookie = reader.read('!Q')[0]
obj.priority = reader.read('!H')[0]
obj.reason = reader.read('!B')[0]
reader.skip(1)
obj.duration_sec = reader.read('!L')[0]
obj.duration_nsec = reader.read('!L')[0]
obj.idle_timeout = reader.read('!H')[0]
reader.skip(2)
obj.packet_count = reader.read('!Q')[0]
obj.byte_count = reader.read('!Q')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.match != other.match):
return False
if (self.cookie != other.cookie):
return False
if (self.priority != other.priority):
return False
if (self.reason != other.reason):
return False
if (self.duration_sec != other.duration_sec):
return False
if (self.duration_nsec != other.duration_nsec):
return False
if (self.idle_timeout != other.idle_timeout):
return False
if (self.packet_count != other.packet_count):
return False
if (self.byte_count != other.byte_count):
return False
return True
def pretty_print(self, q):
q.text('flow_removed {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('match = ')
q.pp(self.match)
q.text(',')
q.breakable()
q.text('cookie = ')
q.text(('%#x' % self.cookie))
q.text(',')
q.breakable()
q.text('priority = ')
q.text(('%#x' % self.priority))
q.text(',')
q.breakable()
q.text('reason = ')
value_name_map = {0: 'OFPRR_IDLE_TIMEOUT', 1: 'OFPRR_HARD_TIMEOUT', 2: 'OFPRR_DELETE'}
if (self.reason in value_name_map):
q.text(('%s(%d)' % (value_name_map[self.reason], self.reason)))
else:
q.text(('%#x' % self.reason))
q.text(',')
q.breakable()
q.text('duration_sec = ')
q.text(('%#x' % self.duration_sec))
q.text(',')
q.breakable()
q.text('duration_nsec = ')
q.text(('%#x' % self.duration_nsec))
q.text(',')
q.breakable()
q.text('idle_timeout = ')
q.text(('%#x' % self.idle_timeout))
q.text(',')
q.breakable()
q.text('packet_count = ')
q.text(('%#x' % self.packet_count))
q.text(',')
q.breakable()
q.text('byte_count = ')
q.text(('%#x' % self.byte_count))
q.breakable()
q.text('}') |
def test():
assert ((len(doc1.ents) == 2) and (len(doc2.ents) == 2) and (len(doc3.ents) == 2)), 'Fur alle Beispiele werden zwei Entitaten erwartet.'
assert any((((e.label_ == 'PER') and (e.text == 'PewDiePie')) for e in doc2.ents)), 'Hast du das Label PER korrekt zugeordnet?'
assert any((((e.label_ == 'PER') and (e.text == 'Alexis Ohanian')) for e in doc3.ents)), 'Hast du das Label PER korrekt zugeordnet?'
__msg__.good('Bravo! Nachdem wir sowohl Beispiele der neuen WEBSITE-Entitaten, als auch vorhandene Entitaten wie PERSON in unsere Daten mitaufgenommen haben, erzielt das Modell nun deutlich bessere Ergebnisse.') |
class OptionSeriesColumnrangeSonificationDefaultinstrumentoptionsMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class AlarmClock(hass.Hass):
def initialize(self):
self.timer_handle_list = []
self.listen_event_handle_list = []
self.listen_state_handle_list = []
self.alarm_time = self.args['alarm_time']
self.wakemeup = self.args['wakemeup']
self.naturalwakeup = self.args['naturalwakeup']
self.alarmweekday = self.args['alarmweekday']
self.radiowakeup = self.args['radiowakeup']
self.isweekday = self.args['isweekday']
self.notify_name = self.args['notify_name']
self.wakeup_light = self.args['wakeup_light']
self.fade_in_time_multiplicator = self.args['fade_in_time_multiplicator']
self.message = self.args['message']
self.button = self.args['button']
self.notifier = self.get_app('Notifier')
self.brightness = 100
self.rgb_color = [255, 120, 0]
self.alarm_timer = None
self.cached_alarm_time = self.get_state(self.alarm_time)
self.cached_fade_in_time = self.get_state(self.naturalwakeup)
self.add_timer()
self.listen_state_handle_list.append(self.listen_state(self.alarm_change, self.alarm_time))
self.listen_state_handle_list.append(self.listen_state(self.naturalwakeup_change, self.naturalwakeup))
self.listen_event_handle_list.append(self.listen_event(self.button_clicked, 'xiaomi_aqara.click'))
def alarm_change(self, entity, attributes, old, new, kwargs):
if ((new is not None) and (new != old) and (new != self.cached_alarm_time)):
if (self.alarm_timer is not None):
if (self.alarm_timer in self.timer_handle_list):
self.timer_handle_list.remove(self.alarm_timer)
self.cancel_timer(self.alarm_timer)
self.log('Alarm time change: {}'.format(new))
self.cached_alarm_time = new
self.add_timer()
def naturalwakeup_change(self, entity, attributes, old, new, kwargs):
if ((new is not None) and (new != old) and (new != self.cached_fade_in_time)):
if (self.alarm_timer is not None):
if (self.alarm_timer in self.timer_handle_list):
self.timer_handle_list.remove(self.alarm_timer)
self.cancel_timer(self.alarm_timer)
self.log('Fade-In time change: {}'.format(new))
self.cached_fade_in_time = new
self.add_timer()
def add_timer(self):
self.log('cached_alarm_time: {}'.format(self.cached_alarm_time))
self.log('cached_fade_in_time: {}'.format(self.cached_fade_in_time))
offset = self.cached_fade_in_time.split('.', 1)[0]
if ((self.cached_alarm_time is not None) and (self.cached_alarm_time != '') and (self.cached_alarm_time != 'unknown')):
run_datetime = datetime.datetime.strptime(self.cached_alarm_time, '%Y-%m-%d %H:%M:%S')
event_time = (run_datetime - datetime.timedelta(minutes=int(offset)))
try:
self.alarm_timer = self.run_at(self.trigger_alarm, event_time)
self.timer_handle_list.append(self.alarm_timer)
self.log('Alarm will trigger at {}'.format(event_time))
except ValueError:
self.log('New trigger time would be in the past: {}'.format(event_time))
def trigger_alarm(self, kwargs):
if (self.get_state(self.wakemeup) == 'on'):
if ((self.get_state(self.alarmweekday) == 'off') or ((self.get_state(self.alarmweekday) == 'on') and (self.get_state(self.isweekday) == 'on'))):
if (float(self.cached_fade_in_time) > 0):
self.log('Turning on {}'.format(self.friendly_name(self.wakeup_light)))
self.call_service('light/turn_on', entity_id=self.wakeup_light, brightness_pct=1)
transition = int((float(self.cached_fade_in_time) * int(self.fade_in_time_multiplicator)))
self.log('Transitioning light in over {} seconds'.format(transition))
self.timer_handle_list.append(self.run_in(self.run_fade_in, 1, transition=transition, brightness_pct=1))
self.timer_handle_list.append(self.run_in(self.run_alarm, float(self.cached_fade_in_time)))
def button_clicked(self, event_name, data, kwargs):
if (data['entity_id'] == self.button):
if (data['click_type'] == 'single'):
if (float(self.cached_fade_in_time) > 0):
self.log('Turning on {}'.format(self.friendly_name(self.wakeup_light)))
self.call_service('light/turn_on', entity_id=self.wakeup_light, brightness_pct=1)
transition = int((float(self.cached_fade_in_time) * int(self.fade_in_time_multiplicator)))
self.log('Transitioning light in over {} seconds'.format(transition))
self.timer_handle_list.append(self.run_in(self.run_fade_in, 1, transition=transition, brightness_pct=1))
def run_fade_in(self, kwargs):
wait_factor = 1
transition = kwargs['transition']
brightness_pct = kwargs['brightness_pct']
pct_increase = (1 / transition)
self.log('pct_increase: {}'.format(pct_increase), level='DEBUG')
if (pct_increase < 0.01):
wait_factor = math.ceil((0.01 / pct_increase))
pct_increase = 0.01
self.log('pct_increase smaller than 1% next run_in in {} seconds'.format(wait_factor), level='DEBUG')
brightness_pct_old = brightness_pct
self.log('brightness_pct_old: {}'.format(brightness_pct_old), level='DEBUG')
brightness_pct_new = int((brightness_pct_old + (pct_increase * 100)))
self.log('brightness_pct_new: {}'.format(brightness_pct_new), level='DEBUG')
if (brightness_pct_new < 100):
self.call_service('light/turn_on', entity_id=self.wakeup_light, rgb_color=self.rgb_color, brightness_pct=brightness_pct_new)
self.timer_handle_list.append(self.run_in(self.run_fade_in, wait_factor, transition=transition, brightness_pct=brightness_pct_new))
def run_alarm(self, kwargs):
self.notifier.notify(self.notify_name, self.message)
def terminate(self):
for timer_handle in self.timer_handle_list:
self.cancel_timer(timer_handle)
for listen_event_handle in self.listen_event_handle_list:
self.cancel_listen_event(listen_event_handle)
for listen_state_handle in self.listen_state_handle_list:
self.cancel_listen_state(listen_state_handle) |
class AutozoomDialog(Gtk.Window):
def __init__(self, main_window, f):
super().__init__(title=_('Autozoom'), transient_for=main_window, modal=True)
self.f = f
content = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
self.set_child(content)
table = Gtk.Grid(column_spacing=5, row_spacing=5)
content.append(table)
close_button = Gtk.Button.new_with_mnemonic(label=_('_Close'))
close_button.set_halign(Gtk.Align.END)
close_button.connect('clicked', self.quit)
content.append(close_button)
self.connect('close-request', self.quit)
self.zoombutton = Gtk.ToggleButton(label=_('Start _Zooming'), tooltip_text=_('Zoom into interesting areas automatically'), use_underline=True)
self.zoombutton.connect('toggled', self.onZoomToggle)
f.connect('status-changed', self.onStatusChanged)
table.attach(self.zoombutton, 0, 0, 2, 1)
self.minsize = 1e-13
self.minsize_entry = Gtk.Entry(tooltip_text=_('Stop zooming when size of fractal is this small'))
minlabel = Gtk.Label(label=_('_Min Size'), use_underline=True, mnemonic_widget=self.minsize_entry)
table.attach(minlabel, 0, 1, 1, 1)
def set_entry(*args):
self.minsize_entry.set_text(('%g' % self.minsize))
def change_entry(*args):
m = float(self.minsize_entry.get_text())
if ((m != 0.0) and (m != self.minsize)):
self.minsize = m
set_entry()
return False
focus_controller = Gtk.EventControllerFocus()
focus_controller.connect('leave', change_entry)
self.add_controller(focus_controller)
set_entry()
table.attach(self.minsize_entry, 1, 1, 1, 1)
def onZoomToggle(self, *args):
if self.zoombutton.get_active():
self.zoombutton.get_child().set_text_with_mnemonic('Stop _Zooming')
self.select_quadrant_and_zoom()
else:
self.zoombutton.get_child().set_text_with_mnemonic('Start _Zooming')
def select_quadrant_and_zoom(self, *args):
(wby2, hby2) = ((self.f.width / 2), (self.f.height / 2))
(w, h) = (self.f.width, self.f.height)
regions = [(0, 0, wby2, hby2), (wby2, 0, w, hby2), (0, hby2, wby2, h), (wby2, hby2, w, h)]
counts = [self.f.count_colors(r) for r in regions]
m = max(counts)
i = counts.index(m)
j = random.randrange(0, 4)
if ((float(counts[j]) / counts[i]) > 0.75):
i = j
coords = [(1, 1), (3, 1), (1, 3), (3, 3)]
(x, y) = coords[i]
self.f.recenter(((x * self.f.width) / 4), ((y * self.f.height) / 4), 0.75)
def onStatusChanged(self, f, status_val):
if (status_val == 0):
if self.zoombutton.get_active():
if (self.f.get_param(self.f.MAGNITUDE) > self.minsize):
self.select_quadrant_and_zoom()
else:
self.zoombutton.set_active(False)
def quit(self, *args):
self.zoombutton.set_active(False)
self.close() |
class CORSMiddleware(object):
def process_request(self, request):
if ('HTTP_ACCESS_CONTROL_REQUEST_METHOD' in request.META):
response =
response['Access-Control-Allow-Origin'] = XS_SHARING_ALLOWED_ORIGINS
response['Access-Control-Allow-Methods'] = ','.join(XS_SHARING_ALLOWED_METHODS)
response['Access-Control-Allow-Headers'] = ','.join(XS_SHARING_ALLOWED_HEADERS)
return response
return None
def process_response(self, request, response):
if response.has_header('Access-Control-Allow-Origin'):
return response
response['Access-Control-Allow-Origin'] = XS_SHARING_ALLOWED_ORIGINS
response['Access-Control-Allow-Methods'] = ','.join(XS_SHARING_ALLOWED_METHODS)
return response |
def poisson3D(h, degree=2):
try:
from netgen.csg import CSGeometry, OrthoBrick, Pnt
import netgen
except ImportError:
pytest.skip(reason='Netgen unavailable, skipping Netgen test.')
comm = COMM_WORLD
if (comm.Get_rank() == 0):
box = OrthoBrick(Pnt(0, 0, 0), Pnt(np.pi, np.pi, np.pi))
box.bc('bcs')
geo = CSGeometry()
geo.Add(box)
ngmesh = geo.GenerateMesh(maxh=h)
labels = [(i + 1) for (i, name) in enumerate(ngmesh.GetRegionNames(codim=1)) if (name == 'bcs')]
else:
ngmesh = netgen.libngpy._meshing.Mesh(3)
labels = None
labels = comm.bcast(labels, root=0)
msh = Mesh(ngmesh)
V = FunctionSpace(msh, 'CG', degree)
u = TrialFunction(V)
v = TestFunction(V)
f = Function(V)
(x, y, z) = SpatialCoordinate(msh)
f.interpolate((((3 * sin(x)) * sin(y)) * sin(z)))
a = (inner(grad(u), grad(v)) * dx)
l = (inner(f, v) * dx)
u = Function(V)
bc = DirichletBC(V, 0.0, labels)
A = assemble(a, bcs=bc)
b = assemble(l)
bc.apply(b)
solve(A, u, b, solver_parameters={'ksp_type': 'preonly', 'pc_type': 'lu'})
f.interpolate(((sin(x) * sin(y)) * sin(z)))
S = sqrt(assemble((inner((u - f), (u - f)) * dx)))
return S |
def parse_url(url):
is_url = False
is_absolute = False
(scheme, netloc, path, params, query, fragment) = urlparse(html_parser.unescape(url))
if RE_URL.match(scheme):
is_url = True
elif ((scheme == '') and (netloc == '') and (path == '')):
is_url = True
elif ((scheme == 'file') and RE_WIN_DRIVE_PATH.match(netloc)):
path = ('/' + (netloc + path).replace('\\', '/'))
netloc = ''
is_absolute = True
elif ((scheme == 'file') and netloc.startswith('\\')):
path = (netloc + path).replace('\\', '/')
netloc = ''
is_absolute = True
elif (scheme == 'file'):
is_absolute = True
elif RE_WIN_DRIVE_LETTER.match(scheme):
path = ('/%s:%s' % (scheme, path.replace('\\', '/')))
scheme = 'file'
netloc = ''
is_absolute = True
elif ((scheme == '') and (netloc != '') and url.startswith('//')):
path = (('//' + netloc) + path)
scheme = 'file'
netloc = ''
is_absolute = True
elif ((scheme != '') and (netloc != '')):
is_url = True
elif path.startswith(('/', '\\')):
is_absolute = True
return (scheme, netloc, path, params, query, fragment, is_url, is_absolute) |
class OptionSeriesFunnel3dSonificationContexttracksMappingTremolo(Options):
def depth(self) -> 'OptionSeriesFunnel3dSonificationContexttracksMappingTremoloDepth':
return self._config_sub_data('depth', OptionSeriesFunnel3dSonificationContexttracksMappingTremoloDepth)
def speed(self) -> 'OptionSeriesFunnel3dSonificationContexttracksMappingTremoloSpeed':
return self._config_sub_data('speed', OptionSeriesFunnel3dSonificationContexttracksMappingTremoloSpeed) |
class load(bsn_tlv):
type = 213
def __init__(self, value=None):
if (value != None):
self.value = value
else:
self.value = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!H', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.value))
length = sum([len(x) for x in packed])
packed[1] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = load()
_type = reader.read('!H')[0]
assert (_type == 213)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.value = reader.read('!L')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.value != other.value):
return False
return True
def pretty_print(self, q):
q.text('load {')
with q.group():
with q.indent(2):
q.breakable()
q.text('value = ')
q.text(('%#x' % self.value))
q.breakable()
q.text('}') |
()
('infile', type=click.File('r'), default='-')
('-j', '--json', 'json_output', is_flag=True, default=False, help='JSON output')
('-u', '--unique', 'unique', is_flag=True, default=False, help='Remove duplicates')
('-v', 'verbose', is_flag=True, default=False, help='Verbose output')
def cmd_data_extract_ipv4(infile, json_output, unique, verbose):
if verbose:
logging.basicConfig(level=logging.INFO, format='%(message)s')
data = infile.read()
result = extract_ipv4(data)
if unique:
result = list(sorted(set(result)))
if (not json_output):
print('\n'.join(result))
return True
result_dict = [{'ipv4_address': ip} for ip in result]
print(json.dumps(result_dict, indent=4)) |
_os(*metadata.platforms)
def main():
msbuild = 'C:\\Users\\Public\\posh.exe'
rcedit = 'C:\\Users\\Public\\rcedit.exe'
common.copy_file(RENAMER, rcedit)
common.copy_file(EXE_FILE, msbuild)
common.log('Modifying the OriginalFileName attribute')
common.execute([rcedit, msbuild, '--set-version-string', 'OriginalFilename', 'MSBuild.exe'])
common.log('Executing modified binary with extexport.exe original file name')
common.execute([msbuild, '-Version'], timeout=10, kill=True)
common.remove_files(msbuild, rcedit) |
.skipcomplex
def test_consecutive_nonlinear_solves():
from firedrake.adjoint import ReducedFunctional, Control, taylor_test
mesh = UnitSquareMesh(1, 1)
V = FunctionSpace(mesh, 'CG', 1)
uic = Constant(2.0, domain=mesh)
u1 = Function(V).assign(uic)
u0 = Function(u1)
v = TestFunction(V)
F = (((v * (u1 ** 2)) * dx) - ((v * u0) * dx))
problem = NonlinearVariationalProblem(F, u1)
solver = NonlinearVariationalSolver(problem)
for i in range(3):
u0.assign(u1)
solver.solve()
J = assemble(((u1 ** 16) * dx))
rf = ReducedFunctional(J, Control(uic))
h = Constant(0.01, domain=mesh)
assert (taylor_test(rf, uic, h) > 1.9) |
def validate_exchange(item, k) -> None:
validate_datapoint_format(datapoint=item, kind='exchange', zone_key=k)
if (item.get('sortedZoneKeys', None) != k):
raise ValidationError(f"Sorted country codes {item.get('sortedZoneKeys', None)} and {k} don't match")
if ('datetime' not in item):
raise ValidationError(f'datetime was not returned for {k}')
if (type(item['datetime']) != datetime):
raise ValidationError(f"datetime {item['datetime']} is not valid for {k}")
validate_reasonable_time(item, k)
if ('netFlow' not in item):
raise ValidationError(f'netFlow was not returned for {k}')
if (item.get('sortedZoneKeys', None) and item.get('netFlow', None)):
zone_names: list[str] = item['sortedZoneKeys']
if (abs(item.get('netFlow', 0)) > 100000):
raise ValidationError(f"netFlow {item['netFlow']} exceeds physical plausibility (>100GW) for {k}")
if (len(zone_names) == 2):
if ((zone_names in EXCHANGES_CONFIG) and ('capacity' in EXCHANGES_CONFIG[zone_names])):
interconnector_capacities = EXCHANGES_CONFIG[zone_names]['capacity']
margin = 0.1
if (not ((min(interconnector_capacities) * (1 - margin)) <= item['netFlow'] <= (max(interconnector_capacities) * (1 + margin)))):
raise ValidationError(f"netFlow {item['netFlow']} exceeds interconnector capacity for {k}") |
class TestOefSearchHandler(ERC1155DeployTestCase):
is_agent_to_agent_messages = False
def test_setup(self):
assert (self.oef_search_handler.setup() is None)
self.assert_quantity_in_outbox(0)
def test_handle_unidentified_dialogue(self):
incorrect_dialogue_reference = ('', '')
incoming_message = cast(OefSearchMessage, self.build_incoming_message(message_type=OefSearchMessage, dialogue_reference=incorrect_dialogue_reference, performative=OefSearchMessage.Performative.OEF_ERROR, oef_error_operation=OefSearchMessage.OefErrorOperation.REGISTER_SERVICE))
with patch.object(self.logger, 'log') as mock_logger:
self.oef_search_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.INFO, f'received invalid oef_search message={incoming_message}, unidentified dialogue.')
def test_handle_error(self):
oef_search_dialogue = cast(OefSearchDialogue, self.prepare_skill_dialogue(dialogues=self.oef_search_dialogues, messages=self.list_of_messages_register_location[:1]))
incoming_message = cast(OefSearchMessage, self.build_incoming_message_for_skill_dialogue(dialogue=oef_search_dialogue, performative=OefSearchMessage.Performative.OEF_ERROR, oef_error_operation=OefSearchMessage.OefErrorOperation.REGISTER_SERVICE))
with patch.object(self.logger, 'log') as mock_logger:
self.oef_search_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.INFO, f'received oef_search error message={incoming_message} in dialogue={oef_search_dialogue}.')
def test_handle_success_i(self):
oef_dialogue = self.prepare_skill_dialogue(dialogues=self.oef_search_dialogues, messages=self.list_of_messages_register_location[:1])
incoming_message = self.build_incoming_message_for_skill_dialogue(dialogue=oef_dialogue, performative=OefSearchMessage.Performative.SUCCESS, agents_info=OefSearchMessage.AgentsInfo({'address': {'key': 'value'}}))
with patch.object(self.oef_search_handler.context.logger, 'log') as mock_logger:
with patch.object(self.registration_behaviour, 'register_service') as mock_reg:
self.oef_search_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.INFO, f'received oef_search success message={incoming_message} in dialogue={oef_dialogue}.')
mock_reg.assert_called_once()
def test_handle_success_ii(self):
oef_dialogue = self.prepare_skill_dialogue(dialogues=self.oef_search_dialogues, messages=self.list_of_messages_register_service[:1])
incoming_message = self.build_incoming_message_for_skill_dialogue(dialogue=oef_dialogue, performative=OefSearchMessage.Performative.SUCCESS, agents_info=OefSearchMessage.AgentsInfo({'address': {'key': 'value'}}))
with patch.object(self.oef_search_handler.context.logger, 'log') as mock_logger:
with patch.object(self.registration_behaviour, 'register_genus') as mock_reg:
self.oef_search_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.INFO, f'received oef_search success message={incoming_message} in dialogue={oef_dialogue}.')
mock_reg.assert_called_once()
def test_handle_success_iii(self):
oef_dialogue = self.prepare_skill_dialogue(dialogues=self.oef_search_dialogues, messages=self.list_of_messages_register_genus[:1])
incoming_message = self.build_incoming_message_for_skill_dialogue(dialogue=oef_dialogue, performative=OefSearchMessage.Performative.SUCCESS, agents_info=OefSearchMessage.AgentsInfo({'address': {'key': 'value'}}))
with patch.object(self.oef_search_handler.context.logger, 'log') as mock_logger:
with patch.object(self.registration_behaviour, 'register_classification') as mock_reg:
self.oef_search_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.INFO, f'received oef_search success message={incoming_message} in dialogue={oef_dialogue}.')
mock_reg.assert_called_once()
def test_handle_success_iv(self):
oef_dialogue = self.prepare_skill_dialogue(dialogues=self.oef_search_dialogues, messages=self.list_of_messages_register_classification[:1])
incoming_message = self.build_incoming_message_for_skill_dialogue(dialogue=oef_dialogue, performative=OefSearchMessage.Performative.SUCCESS, agents_info=OefSearchMessage.AgentsInfo({'address': {'key': 'value'}}))
with patch.object(self.oef_search_handler.context.logger, 'log') as mock_logger:
self.oef_search_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.INFO, f'received oef_search success message={incoming_message} in dialogue={oef_dialogue}.')
assert (self.registration_behaviour.is_registered is True)
assert (self.registration_behaviour.registration_in_progress is False)
mock_logger.assert_any_call(logging.INFO, 'the agent, with its genus and classification, and its service are successfully registered on the SOEF.')
def test_handle_success_v(self):
oef_dialogue = self.prepare_skill_dialogue(dialogues=self.oef_search_dialogues, messages=self.list_of_messages_register_invalid[:1])
incoming_message = self.build_incoming_message_for_skill_dialogue(dialogue=oef_dialogue, performative=OefSearchMessage.Performative.SUCCESS, agents_info=OefSearchMessage.AgentsInfo({'address': {'key': 'value'}}))
with patch.object(self.oef_search_handler.context.logger, 'log') as mock_logger:
self.oef_search_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.INFO, f'received oef_search success message={incoming_message} in dialogue={oef_dialogue}.')
mock_logger.assert_any_call(logging.WARNING, f'received soef SUCCESS message as a reply to the following unexpected message: {oef_dialogue.get_message_by_id(incoming_message.target)}')
def test_handle_invalid(self):
incoming_message = cast(OefSearchMessage, self.build_incoming_message(message_type=OefSearchMessage, performative=OefSearchMessage.Performative.REGISTER_SERVICE, service_description=self.mocked_proposal))
with patch.object(self.logger, 'log') as mock_logger:
self.oef_search_handler.handle(incoming_message)
mock_logger.assert_any_call(logging.WARNING, f'cannot handle oef_search message of performative={incoming_message.performative} in dialogue={self.oef_search_dialogues.get_dialogue(incoming_message)}.')
def test_teardown(self):
assert (self.oef_search_handler.teardown() is None)
self.assert_quantity_in_outbox(0) |
def extractAdjacentaisleHomeBlog(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('otome survival', 'The Otome Game Heroines Strongest Survival', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def compiled_query(output_variables=None, group_by=None):
if (output_variables is None):
output_variables = []
postprocessors = []
calling_frame = inspect.stack()[1]
if (group_by is not None):
postprocessors.append(GroupByPostprocessor(group_by))
def func_transformer(fct):
return LMQLQueryFunction(fct, output_variables=output_variables, postprocessors=postprocessors, scope=LMQLInputVariableScope(fct, calling_frame))
return func_transformer |
def test_proper_name_argument():
argument = ProperNameArgument()
assert argument.validate('NAME')
assert argument.validate('__NAME__')
assert argument.validate('<NAME>')
assert argument.validate('-NAME-')
assert (not argument.validate('-NA ME-'))
assert (not argument.validate('NAME*')) |
def cbFun(snmpEngine, sendRequestHandle, errorIndication, errorStatus, errorIndex, varBinds, cbCtx):
if errorIndication:
print(errorIndication)
elif errorStatus:
print(('%s at %s' % (errorStatus.prettyPrint(), ((errorIndex and varBinds[(int(errorIndex) - 1)][0]) or '?'))))
else:
for (oid, val) in varBinds:
print(('%s = %s' % (oid.prettyPrint(), val.prettyPrint()))) |
def run_migrations_online():
configuration = config.get_section(config.config_ini_section)
configuration['sqlalchemy.url'] = get_url()
connectable = engine_from_config(configuration, prefix='sqlalchemy.', poolclass=pool.NullPool)
def process_revision_directives(context, revision, directives):
if config.cmd_opts.autogenerate:
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata, process_revision_directives=process_revision_directives)
with context.begin_transaction():
context.run_migrations() |
class clamp(Operator):
def __init__(self) -> None:
super().__init__()
self._attrs['op'] = 'clamp'
self._attrs['has_profiler'] = False
def __call__(self, x: Tensor, min_value: Any=None, max_value: Any=None) -> Tensor:
if ((min_value is None) and (max_value is not None)):
return elementwise(FuncEnum.MIN)(x, max_value)
if ((max_value is None) and (min_value is not None)):
return elementwise(FuncEnum.MAX)(x, min_value)
assert (not ((max_value is None) and (max_value is None)))
return elementwise(FuncEnum.MIN)(elementwise(FuncEnum.MAX)(x, min_value), max_value) |
class RadioList():
def __init__(self, name, station=None):
self.name = name
self.station = station
def set_name(self, name):
self.name = name
def get_name(self):
return self.name
def get_items(self, no_cache=False):
return []
def __str__(self):
return self.name |
def extractIterations(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if (('SaeKano' in item['tags']) and (chp or vol)):
return buildReleaseMessageWithType(item, 'Saenai Heroine no Sodatekata', vol, chp, frag=frag, postfix=postfix)
return False |
def init_oidc_app(app):
oidc = OAuth(app)
if oidc_enabled(app.config):
client_id = app.config.get('OIDC_CLIENT')
secret = app.config.get('OIDC_SECRET')
client_kwargs = {'scope': app.config.get('OIDC_SCOPES'), 'token_endpoint_auth_method': app.config.get('OIDC_TOKEN_AUTH_METHOD')}
if app.config.get('OIDC_METADATA'):
oidc.register(name='copr', server_metadata_url=app.config.get('OIDC_METADATA'), client_id=client_id, client_secret=secret, client_kwargs=client_kwargs)
else:
oidc.register(name='copr', client_id=client_id, client_secret=secret, access_token_url=app.config.get('OIDC_TOKEN_URL'), authorize_url=app.config.get('OIDC_AUTH_URL'), userinfo_endpoint=app.config.get('OIDC_USERINFO_URL'), client_kwargs=client_kwargs)
return oidc |
class BsModals(html.Html.Html):
name = 'Bootstrap Modals'
def __init__(self, report, components, title, width, height, options, profile):
super(BsModals, self).__init__(report, [])
self.style.clear_all()
self.attr['class'].add('modal fade')
self.attr['role'] = 'dialog'
self.attr['tabindex'] = (- 1)
self.aria.hidden = True
self.aria.labelledby = ''
self.dialog = Section(self.page, 'div')
self.dialog.attr['class'].add('modal-dialog')
self.dialog.attr['role'] = 'document'
if (options.get('vertical-align') == 'middle'):
self.dialog.attr['class'].add('modal-dialog-centered')
self.dialog += Section(self.page, 'div')
self._content = self.dialog[0]
self._content.attr['class'].add('modal-content')
header = Section(self.page, 'div')
header.attr['class'].add('modal-header')
body = Section(self.page, 'div')
body.attr['class'].add('modal-body')
footer = Section(self.page, 'div')
footer.attr['class'].add('modal-footer')
self._content += header
self._content += body
self._content += footer
if (title is not None):
self.header.__add__(title)
for c in components:
self.body.__add__(c)
def __add__(self, comp):
if hasattr(comp, 'options'):
comp.options.managed = False
self.body.__add__(comp)
return self
def __getitem__(self, i):
if (not self.body.val):
return []
return self.body.val[i]
def dom(self):
if (self._dom is None):
self._dom = BsDom.Modal(self, page=self.page)
return self._dom
def header(self):
return self._content[0]
def body(self):
return self._content[1]
def footer(self):
return self._content[2]
def __str__(self):
return ('<div %s>%s</div>' % (self.get_attrs(css_class_names=self.style.get_classes()), self.dialog.html())) |
def chunk_scaffolds(log, target, size):
chromos = []
(temp_fd, temp_out) = tempfile.mkstemp(suffix='.fasta')
os.close(temp_fd)
temp_out_handle = open(temp_out, 'w')
with open(target, 'rb') as f:
tb = twobit.TwoBitFile(f)
sequence_length = 0
tb_key_len = (len(tb.keys()) - 1)
log.info('Running against {}'.format(os.path.basename(target)))
log.info('Running with the --huge option. Chunking files into {0} bp...'.format(size))
for (sequence_count, seq) in enumerate(tb.keys()):
sequence = tb[seq][0:]
sequence_length += len(sequence)
temp_out_handle.write('>{0}\n{1}\n'.format(seq.decode('utf-8'), sequence))
if (sequence_length > size):
temp_out_handle.close()
chromos.append(temp_out)
(temp_fd, temp_out) = tempfile.mkstemp(suffix='.fasta')
os.close(temp_fd)
temp_out_handle = open(temp_out, 'w')
sequence_length = 0
elif (sequence_count >= tb_key_len):
temp_out_handle.close()
chromos.append(temp_out)
else:
pass
temp_out_handle.close()
return chromos |
def test_pyscf():
pyscf_ = PySCF(basis='3-21g', pal=4)
geom = geom_from_library('hcn_iso_ts.xyz')
geom.set_calculator(pyscf_)
f = geom.forces.reshape((- 1), 3)
print('PySCF')
print(f)
H = geom.hessian
print('PySCF hessian')
print(H.reshape((- 1), 9))
ref_geom = geom.copy()
from pysisyphus.calculators.Gaussian16 import Gaussian16
g16 = Gaussian16('hf/3-21G', pal=4)
ref_geom.set_calculator(g16)
f_ref = ref_geom.forces.reshape((- 1), 3)
print('Gaussian16')
print(f_ref)
H_ref = ref_geom.hessian
print('G16 Hess')
print(H_ref) |
def _validate_enum_symbols(schema):
symbols = schema['symbols']
for symbol in symbols:
if ((not isinstance(symbol, str)) or (not SYMBOL_REGEX.fullmatch(symbol))):
raise SchemaParseException('Every symbol must match the regular expression [A-Za-z_][A-Za-z0-9_]*')
if (len(symbols) != len(set(symbols))):
raise SchemaParseException('All symbols in an enum must be unique')
if (('default' in schema) and (schema['default'] not in symbols)):
raise SchemaParseException('Default value for enum must be in symbols list') |
def tree_gen(mock_config_helper, tmp_path):
(tmp_path / 'dir1').mkdir()
((tmp_path / 'dir1') / 'file1.txt').touch()
(tmp_path / 'dir2').mkdir()
tree_gen = TreeGenerator(conf_helper=mock_config_helper, root_dir=tmp_path, repo_url=' repo_name='TestProject', max_depth=3)
return tree_gen |
def aggregate_and_return_observation_data(observations):
out_data = []
obs_length = 0
for obs in observations:
if (not obs.get('has_component')):
if obs.get('permitted_data_type'):
obs_length += 1
if ((obs.get('permitted_data_type') == 'Select') and obs.get('options')):
obs['options_list'] = obs.get('options').split('\n')
if (obs.get('observation_template') and obs.get('specimen')):
obs['received_time'] = frappe.get_value('Specimen', obs.get('specimen'), 'received_time')
out_data.append({'observation': obs})
else:
child_observations = get_child_observations(obs)
obs_dict = return_child_observation_data_as_dict(child_observations, obs, obs_length)
if (len(obs_dict) > 0):
out_data.append(obs_dict)
return (out_data, obs_length) |
def forward(model, X, is_train):
index = model.attrs['index']
shape = X.shape
dtype = X.dtype
def backprop_get_column(dY):
dX = model.ops.alloc(shape, dtype=dtype)
dX[index] = dY
return dX
if (len(X) == 0):
return (X, backprop_get_column)
Y = X[index]
return (Y, backprop_get_column) |
class OpenAIFunction():
def __init__(self, name: str, description: str, properties: Dict[(str, Dict[(str, str)])], required: Optional[List[str]]=None):
self.name = name
self.description = description
self.properties = properties
self.required = (required or [])
def to_dict(self) -> Dict[(str, Any)]:
return {'name': self.name, 'description': self.description, 'parameters': {'type': 'object', 'properties': self.properties}, 'required': self.required}
def prompt_format(self) -> str:
def param_signature(properties: Dict[(str, Dict[(str, str)])]) -> str:
return '\n'.join([f"{property_name}:{fields['type']}," for (property_name, fields) in properties.items()])
return '\n'.join([f'// {self.description}', f'type {self.name} = (_ :{{', param_signature(self.properties), '}) => any;']) |
class clsvof_init_cond(object):
def uOfXT(self, x, t):
if (IC_type == 0):
if ((x[0] == 0) and (x[1] >= 0.3) and (x[1] <= 0.35)):
return 0.0
elif ((x[1] >= 0.3) and (x[1] <= 0.35)):
return x[0]
elif (x[1] >= 0.35):
return np.sqrt(((x[0] ** 2) + ((x[1] - 0.35) ** 2)))
else:
return np.sqrt(((x[0] ** 2) + ((x[1] - 0.3) ** 2)))
elif ((x[0] < 0.01) and (x[1] >= 0.3) and (x[1] <= 0.35)):
return (- 1.0)
elif (((x[0] == 0.01) and (x[1] >= 0.3) and (x[1] <= 0.35)) or ((x[0] <= 0.01) and ((x[1] == 0.3) or (x[1] == 0.35)))):
return 0.0
else:
return 1.0 |
class EngineRichView(EngineView):
def default_traits_view(self):
view = View(HSplit(Item('engine', id='mayavi.engine_rich_view.pipeline_view', springy=True, resizable=True, editor=self.tree_editor, dock='tab', label='Pipeline'), Item('engine', id='mayavi.engine_rich_view.current_selection', editor=InstanceEditor(view='current_selection_view'), springy=True, resizable=True, style='custom'), show_labels=False, id='mayavi.engine_rich_view_group'), id='mayavi.engine_rich_view', help=False, resizable=True, undo=False, revert=False, ok=False, cancel=False, title='Mayavi pipeline', icon=self.icon, toolbar=self.toolbar, handler=EngineRichViewHandler)
return view
def _actions_default(self):
preferences_action = Action(image=ImageResource('preferences.png', search_path=self._image_path), tooltip="Modify Mayavi's preferences", checked=False, defined_when='True', perform=preference_manager_view.dialog_view)
actions = super(EngineRichView, self)._actions_default()
actions.extend((Separator(), preferences_action))
return actions
def scene_editing_view(self, scene):
for mayavi_scene in self.engine.scenes:
sc = mayavi_scene.scene
s = getattr(sc, 'scene_editor', sc)
if (s is scene):
self.engine.current_selection = mayavi_scene
return self.edit_traits() |
_os(*metadata.platforms)
def main():
powershell = 'C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe'
exe = 'C:\\Users\\Public\\a.exe'
common.copy_file(EXE_FILE, exe)
common.execute([powershell, '/c', f'Set-Content -Stream RtaTest -value Heyo -Path {exe}'], timeout=10)
common.remove_files(exe) |
def test_raises_if_username_too_long():
requirements = UsernameRequirements(min=0, max=1, blacklist=set())
validator = UsernameValidator(requirements)
registration = UserRegistrationInfo(username='no', password='no', email='', group=4, language='no')
with pytest.raises(ValidationError) as excinfo:
validator(registration)
assert (excinfo.value.attribute == 'username')
assert ('must be between' in excinfo.value.reason) |
def config_init(args):
if (not os.path.exists(NOTIFICATION_HOME)):
os.makedirs(NOTIFICATION_HOME)
config_file = (NOTIFICATION_HOME + '/notification_server.yaml')
if os.path.exists(config_file):
logger.info('Notification server config has already initialized at {}.'.format(config_file))
else:
create_server_config(NOTIFICATION_HOME, {'NOTIFICATION_HOME': NOTIFICATION_HOME})
logger.info('Notification server config generated at {}.'.format(config_file)) |
def populate_textarray(fname, feats_dir, feats_dict):
feats_array = []
f = open(fname)
for line in f:
line = line.split('\n')[0]
feats = line
for feat in feats:
feats_array.append(feats_dict[feat])
feats_array = np.array(feats_array)
return feats_array |
class BaseAutocompleteViewSet(APIView):
def get_request_payload(request):
json_request = request.data
search_text = json_request.get('search_text', None)
try:
limit = int(json_request.get('limit', 10))
except ValueError:
raise InvalidParameterException('Limit request parameter is not a valid, positive integer')
if (not search_text):
raise InvalidParameterException('Missing one or more required request parameters: search_text')
return (search_text, limit)
def agency_autocomplete(self, request):
(search_text, limit) = self.get_request_payload(request)
agency_filter = (Q(**{self.filter_field: True}) & (Q(subtier_name__icontains=search_text) | Q(subtier_abbreviation__icontains=search_text)))
agencies = AgencyAutocompleteMatview.objects.filter(agency_filter).annotate(fema_sort=Case(When(toptier_abbreviation='FEMA', subtier_abbreviation='FEMA', then=1), When(toptier_abbreviation='FEMA', then=2), default=0, output_field=IntegerField())).order_by('fema_sort', '-toptier_flag', Upper('toptier_name'), Upper('subtier_name')).values('agency_autocomplete_id', 'toptier_flag', 'toptier_code', 'toptier_abbreviation', 'toptier_name', 'subtier_abbreviation', 'subtier_name')
results = [{'id': agency['agency_autocomplete_id'], 'toptier_flag': agency['toptier_flag'], 'toptier_agency': {'toptier_code': agency['toptier_code'], 'abbreviation': agency['toptier_abbreviation'], 'name': agency['toptier_name']}, 'subtier_agency': {'abbreviation': agency['subtier_abbreviation'], 'name': agency['subtier_name']}} for agency in agencies[:limit]]
return Response({'results': results}) |
class OptionSeriesScatter3dSonificationContexttracksMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def upgrade():
op.execute('DELETE FROM comments WHERE update_id IS NULL')
op.execute('DELETE FROM comment_bug_assoc WHERE comment_id IS NULL')
op.execute('DELETE FROM comment_testcase_assoc WHERE comment_id IS NULL')
op.alter_column('comments', 'update_id', existing_type=sa.INTEGER(), nullable=False) |
def init_env_with_observation_normalization(env_factory: Callable[([], StructuredEnv)], normalization_config: Dict) -> ObservationNormalizationWrapper:
wrapped_env = ObservationNormalizationWrapper.wrap(env_factory(), **normalization_config)
wrapped_env = estimate_normalization_statistics(wrapped_env)
return wrapped_env |
()
('-j', 'json_output', is_flag=True, default=False, help='Output in JSON format')
def cmd_net_interfaces(json_output):
interfaces = get_ifaces()
if json_output:
print(json.dumps(interfaces, indent=4))
return True
print('{:<3}{:<32}{:<19}{:<17}{}'.format('#', 'NAME', 'MAC', 'INET', 'INET6'))
for interface in interfaces.values():
print('{index:<3}{name:<32}{mac:<19}{inet:<17}{inet6}'.format(index=str(interface['index']), name=str(interface['name']), mac=str(interface['mac']), inet=str(interface['inet']), inet6=str(interface['inet6']))) |
def _all_dims_in_graph(sorted_graph: List[Tensor]):
dim_idx = 0
for node in sorted_graph:
for dim in node._attrs['shape']:
(yield (dim_idx, dim))
dim_idx += 1
sorted_ops = graph_utils.get_sorted_ops(sorted_graph)
for op in sorted_ops:
input_accessors = op._attrs.get('input_accessors', None)
output_accessors = op._attrs.get('output_accessors', None)
for accessors in (input_accessors, output_accessors):
if (accessors is None):
continue
for ta in accessors:
if ta.original_shapes:
for dim in ta.original_shapes:
(yield (dim_idx, dim))
dim_idx += 1 |
def main(arguments):
if (arguments.command == 'generate'):
obfuscated = generate.generate(password=arguments.password, obfuscator=arguments.obfuscator, agent=arguments.agent)
generate.save_generated(obfuscated, arguments.path)
if (arguments.path != '-'):
log.info((messages.generate.generated_backdoor_with_password_s_in_s_size_i % (arguments.path, arguments.password, len(obfuscated))))
return
elif (arguments.command == 'terminal'):
session = SessionURL(url=arguments.url, password=arguments.password)
elif (arguments.command == 'session'):
session = SessionFile(arguments.path)
dlog.debug(pprint.pformat(session))
modules.load_modules(session)
if (not arguments.cmd):
Terminal(session).cmdloop()
else:
Terminal(session).onecmd(arguments.cmd) |
def create_panel_permissions():
(sales_panel, _) = get_or_create(PanelPermission, panel_name=SALES)
(sales_admin, _) = get_or_create(CustomSysRole, name='Sales Admin')
(marketer, _) = get_or_create(CustomSysRole, name='Marketer')
sales_panel.custom_system_roles.append(sales_admin)
sales_panel.custom_system_roles.append(marketer) |
_test
def test_zmq_send_and_poll() -> None:
class MyZMQGraphConfig(Config):
addr: str
zmq_topic: str
output_filename: str
class MyZMQGraph(Graph):
DF_SOURCE: MySource
ZMQ_SENDER: ZMQSenderNode
ZMQ_POLLER: ZMQPollerNode
DF_SINK: MySink
def setup(self) -> None:
self.DF_SOURCE.configure(MySourceConfig(should_terminate=False))
self.ZMQ_SENDER.configure(ZMQSenderConfig(write_addr=self.config.addr, zmq_topic=self.config.zmq_topic))
self.ZMQ_POLLER.configure(ZMQPollerConfig(read_addr=self.config.addr, zmq_topic=self.config.zmq_topic))
self.DF_SINK.configure(MySinkConfig(output_filename=self.config.output_filename))
def connections(self) -> Connections:
return ((self.DF_SOURCE.TOPIC, self.ZMQ_SENDER.topic), (self.ZMQ_POLLER.topic, self.DF_SINK.TOPIC))
def process_modules(self) -> Sequence[Module]:
return (self.DF_SOURCE, self.ZMQ_SENDER, self.ZMQ_POLLER, self.DF_SINK)
output_filename = get_test_filename()
graph = MyZMQGraph()
address = f'{ZMQ_ADDR}:{get_free_port()}'
graph.configure(MyZMQGraphConfig(addr=address, zmq_topic=ZMQ_TOPIC, output_filename=output_filename))
runner = ParallelRunner(graph=graph)
runner.run()
with open(output_filename, 'br') as f:
data = f.read()
assert (set(graph.DF_SOURCE.samples) == set(data.strip(DATA_DELIMITER).split(DATA_DELIMITER))) |
def getvcs(vcstype, remote, local):
if (vcstype == 'git'):
return vcs_git(remote, local)
if (vcstype == 'git-svn'):
return vcs_gitsvn(remote, local)
if (vcstype == 'hg'):
return vcs_hg(remote, local)
if (vcstype == 'bzr'):
return vcs_bzr(remote, local)
if (vcstype == 'srclib'):
if (str(local) != os.path.join('build', 'srclib', str(remote))):
raise VCSException('Error: srclib paths are hard-coded!')
return getsrclib(remote, os.path.join('build', 'srclib'), raw=True)
if (vcstype == 'svn'):
raise VCSException("Deprecated vcs type 'svn' - please use 'git-svn' instead")
raise VCSException(('Invalid vcs type ' + vcstype)) |
def upgrade():
op.create_table('activity', sa.Column('id', sa.Integer(), nullable=False), sa.Column('actor', sa.String(), nullable=True), sa.Column('time', sa.DateTime(), nullable=True), sa.Column('namespace', sa.String(), nullable=True), sa.Column('detail', sa.String(), nullable=True), sa.PrimaryKeyConstraint('id')) |
class ApodizationSpec(Tidy3dBaseModel):
start: pd.NonNegativeFloat = pd.Field(None, title='Start Interval', description='Defines the time at which the start apodization ends.', units=SECOND)
end: pd.NonNegativeFloat = pd.Field(None, title='End Interval', description='Defines the time at which the end apodization begins.', units=SECOND)
width: pd.PositiveFloat = pd.Field(None, title='Apodization Width', description='Characteristic decay length of the apodization function.', units=SECOND)
('end', always=True, allow_reuse=True)
def end_greater_than_start(cls, val, values):
start = values.get('start')
if ((val is not None) and (start is not None) and (val < start)):
raise SetupError('End apodization begins before start apodization ends.')
return val
('width', always=True, allow_reuse=True)
def width_provided(cls, val, values):
start = values.get('start')
end = values.get('end')
if (((start is not None) or (end is not None)) and (val is None)):
raise SetupError('Apodization width must be set.')
return val
_ax_if_none
def plot(self, times: ArrayFloat1D, ax: Ax=None) -> Ax:
times = np.array(times)
amp = np.ones_like(times)
if (self.start is not None):
start_ind = (times < self.start)
time_scaled = ((times[start_ind] - self.start) / self.width)
amp[start_ind] *= np.exp(((- 0.5) * (time_scaled ** 2)))
if (self.end is not None):
end_ind = (times > self.end)
time_scaled = ((times[end_ind] - self.end) / self.width)
amp[end_ind] *= np.exp(((- 0.5) * (time_scaled ** 2)))
ax.plot(times, amp, color='blueviolet')
ax.set_xlabel('time (s)')
ax.set_title('apodization function')
ax.set_aspect('auto')
return ax |
class OptionSeriesStreamgraphStatesHover(Options):
def animation(self) -> 'OptionSeriesStreamgraphStatesHoverAnimation':
return self._config_sub_data('animation', OptionSeriesStreamgraphStatesHoverAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def halo(self) -> 'OptionSeriesStreamgraphStatesHoverHalo':
return self._config_sub_data('halo', OptionSeriesStreamgraphStatesHoverHalo)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def marker(self) -> 'OptionSeriesStreamgraphStatesHoverMarker':
return self._config_sub_data('marker', OptionSeriesStreamgraphStatesHoverMarker) |
class TestServiceAsync(pb2_grpc.TestServiceServicer):
def __init__(self, *args, **kwargs) -> None:
pass
async def GetServerResponse(self, request, context):
message = request.message
result = f'Hello I am up and running received "{message}" message from you'
result = {'message': result, 'received': True}
return pb2.MessageResponse(**result)
async def GetServerResponseAbort(self, request, context) -> None:
(await context.abort(grpc.StatusCode.INTERNAL, 'foo'))
async def GetServerResponseUnavailable(self, request, context):
context.set_code(grpc.StatusCode.UNAVAILABLE)
context.set_details('Method not available')
return pb2.MessageResponse(message='foo', received=True)
async def GetServerResponseException(self, request, context):
raise Exception('oh no') |
class TestCentroidsMatrix():
class TestBuildSupportSet():
def test_should_raise_value_error_when_inputs_is_not_list():
with pytest.raises(ValueError) as error:
CentroidsMatrix(kernel=sentinel.kernel).build_support_set(sentinel.inputs)
assert (str(error.value) == f'CentroidsMatrix should be called on a list of inputs [embeddings, labels_one_hot]')
def test_should_raise_value_error_when_inputs_is_not_list_of_len_2():
with pytest.raises(ValueError) as error:
CentroidsMatrix(kernel=sentinel.kernel).build_support_set([sentinel.inputs])
assert (str(error.value) == f'CentroidsMatrix should be called on a list of inputs [embeddings, labels_one_hot]')
def test_should_return_centroids_of_input_tensors_according_to_their_class():
input_tensors = tf.cast(tf.tile(tf.expand_dims(tf.range(5), 1), [1, 4]), tf.float32)
labels_one_hot = get_dummies(tf.constant([0, 0, 0, 1, 1]))[0]
support_tensors = CentroidsMatrix(kernel=sentinel.kernel).build_support_set([input_tensors, labels_one_hot])
np.testing.assert_array_equal([[1, 1, 1, 1], [3.5, 3.5, 3.5, 3.5]], support_tensors.numpy())
class TestCall():
('keras_fsl.layers.centroids_matrix.SupportLayer.call')
def test_should_call_activation_on_super_call(mock_super_call):
layer = CentroidsMatrix(kernel=sentinel.kernel, activation=MagicMock(name='activation'))
mock_super_call.return_value = sentinel.super_return
layer.call(sentinel.inputs)
mock_super_call.assert_called_once_with(sentinel.inputs)
layer.activation.assert_called_once_with(sentinel.super_return) |
def CreateBmmCCRBillinearOperator(manifest, c_element_op):
operation_kind = library.GemmKind.BatchGemm
a_element_desc = library.TensorDesc(library.DataType.f16, library.LayoutType.ColumnMajor)
b_element_desc = library.TensorDesc(library.DataType.f16, library.LayoutType.ColumnMajor)
c_element_desc = library.TensorDesc(library.DataType.f16, library.LayoutType.RowMajor)
element_op = library.TensorOperation.PassThrough
tile_descriptions = [gemm.TileDesc(256, 256, 128, 32, 2, 8, 32, 32, 4, 2), gemm.TileDesc(256, 256, 128, 32, 8, 8, 32, 32, 4, 2), gemm.TileDesc(256, 128, 256, 32, 2, 8, 32, 32, 2, 4), gemm.TileDesc(256, 128, 256, 32, 8, 8, 32, 32, 2, 4), gemm.TileDesc(128, 128, 128, 32, 2, 8, 32, 32, 4, 2), gemm.TileDesc(128, 128, 128, 32, 8, 8, 32, 32, 4, 2), gemm.TileDesc(256, 128, 128, 32, 2, 8, 32, 32, 2, 2), gemm.TileDesc(256, 128, 128, 32, 8, 8, 32, 32, 2, 2), gemm.TileDesc(128, 128, 64, 32, 2, 8, 32, 32, 2, 2), gemm.TileDesc(128, 128, 64, 32, 8, 8, 32, 32, 2, 2), gemm.TileDesc(128, 64, 128, 32, 2, 8, 32, 32, 2, 2), gemm.TileDesc(128, 64, 128, 32, 8, 8, 32, 32, 2, 2), gemm.TileDesc(256, 128, 64, 32, 2, 8, 32, 32, 2, 1), gemm.TileDesc(256, 128, 64, 32, 8, 8, 32, 32, 2, 1), gemm.TileDesc(256, 64, 128, 32, 2, 8, 32, 32, 1, 2), gemm.TileDesc(256, 64, 128, 32, 8, 8, 32, 32, 1, 2)]
b_block_descriptions = []
c_block_descriptions = []
for t in tile_descriptions:
b_block_transfer = (- 1)
c_block_transfer = (- 1)
if (t.block_size == 256):
b_block_transfer = [4, 64, 1]
c_block_transfer = gemm.CBlockTransferDesc(1, 1, [1, 32, 1, 8], 8)
if ((t.block_size == 128) and (t.n_per_block != 64)):
b_block_transfer = [4, 32, 1]
c_block_transfer = gemm.CBlockTransferDesc(1, 1, [1, 16, 1, 8], 8)
if ((t.block_size == 128) and (t.n_per_block == 64)):
b_block_transfer = [4, 32, 1]
c_block_transfer = gemm.CBlockTransferDesc(1, 1, [1, 32, 1, 4], 8)
assert ((b_block_transfer != (- 1)) and (c_block_transfer != (- 1)) and ('Cannot determine block_transfer_size with block_size ' + str(t.block_size)))
b_block_descriptions.append(gemm.BlockTransferDesc(b_block_transfer, [1, 0, 2], [1, 0, 2], 2, 8, 8, 1, True))
c_block_descriptions.append(c_block_transfer)
a_block_descriptions = [gemm.BlockTransferDesc([4, 64, 1], [0, 2, 1], [0, 2, 1], 1, 4, 2, 0, True), gemm.BlockTransferDesc([4, 64, 1], [0, 2, 1], [0, 2, 1], 1, 4, 8, 1, True), gemm.BlockTransferDesc([8, 32, 1], [0, 2, 1], [0, 2, 1], 1, 4, 2, 0, True), gemm.BlockTransferDesc([4, 64, 1], [0, 2, 1], [0, 2, 1], 1, 2, 8, 1, True), gemm.BlockTransferDesc([4, 32, 1], [0, 2, 1], [0, 2, 1], 1, 4, 2, 0, True), gemm.BlockTransferDesc([4, 32, 1], [0, 2, 1], [0, 2, 1], 1, 4, 8, 1, True), gemm.BlockTransferDesc([8, 32, 1], [0, 2, 1], [0, 2, 1], 1, 4, 2, 0, True), gemm.BlockTransferDesc([4, 64, 1], [0, 2, 1], [0, 2, 1], 1, 2, 8, 1, True), gemm.BlockTransferDesc([4, 32, 1], [0, 2, 1], [0, 2, 1], 1, 4, 2, 0, True), gemm.BlockTransferDesc([4, 32, 1], [0, 2, 1], [0, 2, 1], 1, 4, 8, 1, True), gemm.BlockTransferDesc([8, 16, 1], [0, 2, 1], [0, 2, 1], 1, 4, 2, 0, True), gemm.BlockTransferDesc([4, 32, 1], [0, 2, 1], [0, 2, 1], 1, 2, 8, 1, True), gemm.BlockTransferDesc([8, 32, 1], [0, 2, 1], [0, 2, 1], 1, 4, 2, 0, True), gemm.BlockTransferDesc([4, 64, 1], [0, 2, 1], [0, 2, 1], 1, 2, 8, 1, True), gemm.BlockTransferDesc([16, 16, 1], [0, 2, 1], [0, 2, 1], 1, 4, 2, 0, True), gemm.BlockTransferDesc([4, 64, 1], [0, 2, 1], [0, 2, 1], 1, 1, 8, 1, True)]
gemm_specialization = [gemm.GemmSpecialization.GemmDefault, gemm.GemmSpecialization.MNKPadding]
operations = []
ds_dtype = [library.DataType.f16]
ds_layout = [library.LayoutType.RowMajor]
e_dtype = library.DataType.f16
for gemm_spec in gemm_specialization:
for (tile_desc, a_block_desc, b_block_desc, c_block_desc) in zip(tile_descriptions, a_block_descriptions, b_block_descriptions, c_block_descriptions):
new_operation = gemm.GemmOperation(operation_kind=operation_kind, extra_kind=c_element_op, xdl_op_type=gemm.XdlOpType.DeviceBatchedGemmMultiD_Xdl, A=a_element_desc, B=b_element_desc, C=c_element_desc, a_elem_op=element_op, b_elem_op=element_op, epilogue_functor=c_element_op, gemm_specialization=gemm_spec, tile_desc=tile_desc, a_block_transfer=a_block_desc, b_block_transfer=b_block_desc, c_block_transfer=c_block_desc, ds_dtype=ds_dtype, ds_layout=ds_layout, e_dtype=e_dtype)
manifest.append(new_operation)
operations.append(new_operation)
return operations |
('normalize-data')
('events-file', type=click.File('r'))
def normalize_data(events_file):
file_name = os.path.splitext(os.path.basename(events_file.name))[0]
events = RtaEvents({file_name: [json.loads(e) for e in events_file.readlines()]})
events.save(dump_dir=os.path.dirname(events_file.name)) |
def get_aggregate_flow_stats(dp, waiters, flow=None, to_user=True):
flow = (flow if flow else {})
table_id = UTIL.ofp_table_from_user(flow.get('table_id', dp.ofproto.OFPTT_ALL))
flags = str_to_int(flow.get('flags', 0))
out_port = UTIL.ofp_port_from_user(flow.get('out_port', dp.ofproto.OFPP_ANY))
out_group = UTIL.ofp_group_from_user(flow.get('out_group', dp.ofproto.OFPG_ANY))
cookie = str_to_int(flow.get('cookie', 0))
cookie_mask = str_to_int(flow.get('cookie_mask', 0))
match = to_match(dp, flow.get('match', {}))
stats = dp.ofproto_parser.OFPAggregateStatsRequest(dp, flags, table_id, out_port, out_group, cookie, cookie_mask, match)
msgs = []
ofctl_utils.send_stats_request(dp, stats, waiters, msgs, LOG)
flows = []
for msg in msgs:
stats = msg.body
s = stats.to_jsondict()[stats.__class__.__name__]
s['stats'] = stats_to_str(stats.stats)
flows.append(s)
return wrap_dpid_dict(dp, flows, to_user) |
def cs_getBeaconInfo(teamserver, user, password, cobaltstrike_directory, bid):
username = f'{user}_beacon_query'
with CSConnector(cs_host=teamserver, cs_user=username, cs_pass=password, cs_directory=cobaltstrike_directory) as cs:
query = f'return binfo({bid})'
beacon = cs.ag_get_object(query)
return beacon |
def get_connection():
connection = config.attributes.get('connection', None)
if (connection is not None):
(yield connection)
return
engine = engine_from_config(config.get_section(config.config_ini_section), prefix='sqlalchemy.', url=config.get_main_option('sqlalchemy.url'), poolclass=pool.NullPool)
with engine.begin() as connection:
(yield connection) |
def extractWatermelonHelmets(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if (('Dragon Life' in item['tags']) or ('Dragon Life: Chapter' in item['title'])):
return buildReleaseMessageWithType(item, 'Dragon Life', vol, chp, frag=frag, postfix=postfix)
return False |
class CanValidate(object):
def remote_validate(self, params=None):
params = (params or {})
data_cache = dict(self._data)
changes_cache = dict(self._changes)
params['execution_options'] = ['validate_only']
self.save(params=params)
self._data = data_cache
self._changes = changes_cache
return self |
def _fuse_expand_elementwise(sorted_graph: List[Tensor]) -> List[Tensor]:
def _is_compatible_with_broadcasting(expand_output_dim: IntVar, elementwise_input_dim: IntVar) -> bool:
return ((expand_output_dim == elementwise_input_dim) or is_singleton_dimension(expand_output_dim))
def _replace_jagged_int_var(shape: List[IntVar]):
if (shape and isinstance(shape[0], JaggedIntVar)):
return (shape[0].get_max_dense_shape() + shape[1:])
return shape
for op in graph_utils.get_sorted_ops(sorted_graph):
if (op._attrs['op'] != 'expand'):
continue
outputs = op._attrs['outputs']
assert (len(outputs) == 1), 'expand must only have 1 output'
expand_output = outputs[0]
if expand_output._attrs['is_output']:
continue
expand_output_shape = _replace_jagged_int_var(expand_output._attrs['shape'])
def _can_fuse_with(dst_op: Operator) -> bool:
if (dst_op._attrs['op'] != 'elementwise'):
return False
for elementwise_input in dst_op._attrs['inputs']:
if (elementwise_input is expand_output):
continue
elementwise_input_shape = _replace_jagged_int_var(elementwise_input._attrs['shape'])
if (not all((_is_compatible_with_broadcasting(dim_a, dim_b) for (dim_a, dim_b) in zip(expand_output_shape, elementwise_input_shape)))):
return False
return True
if (not all((_can_fuse_with(dst) for dst in expand_output._attrs['dst_ops']))):
continue
inputs = op._attrs['inputs']
assert (len(inputs) >= 1), 'expand must have at least 1 input'
expand_input = inputs[0]
for dst in list(expand_output.dst_ops()):
transform_utils.replace_tensor_for_op(dst, expand_output, expand_input)
transform_utils.remove_tensor_from_sorted_graph(expand_output) |
class _CRG(LiteXModule):
def __init__(self, platform, sys_clk_freq, sata_refclk_src='internal'):
self.cd_sys = ClockDomain()
self.cd_sata_refclk = ClockDomain()
self.pll = pll = USPMMCM(speedgrade=(- 2))
pll.register_clkin(platform.request('clk300'), .0)
pll.create_clkout(self.cd_sys, sys_clk_freq)
assert (sata_refclk_src in ['internal', 'external'])
if (sata_refclk_src == 'internal'):
pll.create_clkout(self.cd_sata_refclk, .0)
elif (sata_refclk_src == 'external'):
sata_refclk_pads = platform.request('fmc2sata_refclk', 0)
self.specials += Instance('IBUFDS_GTE4', i_CEB=0, i_I=sata_refclk_pads.p, i_IB=sata_refclk_pads.n, o_O=self.cd_sata_refclk.clk)
platform.add_period_constraint(platform.lookup_request('fmc2sata_refclk', 0, loose=True), (.0 / .0)) |
def get_registrable_entities(ctx: flyte_context.FlyteContext, options: typing.Optional[Options]=None) -> typing.List[FlyteControlPlaneEntity]:
new_api_serializable_entities = OrderedDict()
for entity in flyte_context.FlyteEntities.entities.copy():
if (isinstance(entity, PythonTask) or isinstance(entity, WorkflowBase) or isinstance(entity, LaunchPlan)):
get_serializable(new_api_serializable_entities, ctx.serialization_settings, entity, options=options)
if isinstance(entity, WorkflowBase):
lp = LaunchPlan.get_default_launch_plan(ctx, entity)
get_serializable(new_api_serializable_entities, ctx.serialization_settings, lp, options)
new_api_model_values = list(new_api_serializable_entities.values())
entities_to_be_serialized = list(filter(_should_register_with_admin, new_api_model_values))
return entities_to_be_serialized |
class OptionSeriesArcdiagramNodesDatalabels(Options):
def align(self):
return self._config_get('undefined')
def align(self, text: str):
self._config(text, js_type=False)
def allowOverlap(self):
return self._config_get(False)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self) -> 'OptionSeriesArcdiagramNodesDatalabelsAnimation':
return self._config_sub_data('animation', OptionSeriesArcdiagramNodesDatalabelsAnimation)
def backgroundColor(self):
return self._config_get('none')
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(0)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def crop(self):
return self._config_get(False)
def crop(self, flag: bool):
self._config(flag, js_type=False)
def defer(self):
return self._config_get(True)
def defer(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def filter(self) -> 'OptionSeriesArcdiagramNodesDatalabelsFilter':
return self._config_sub_data('filter', OptionSeriesArcdiagramNodesDatalabelsFilter)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get(None)
def formatter(self, value: Any):
self._config(value, js_type=False)
def inside(self):
return self._config_get(True)
def inside(self, flag: bool):
self._config(flag, js_type=False)
def nodeFormat(self):
return self._config_get('undefined')
def nodeFormat(self, text: str):
self._config(text, js_type=False)
def nodeFormatter(self):
return self._config_get(None)
def nodeFormatter(self, value: Any):
self._config(value, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, flag: bool):
self._config(flag, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def overflow(self):
return self._config_get('justify')
def overflow(self, text: str):
self._config(text, js_type=False)
def padding(self):
return self._config_get(5)
def padding(self, num: float):
self._config(num, js_type=False)
def position(self):
return self._config_get('center')
def position(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def shape(self):
return self._config_get('square')
def shape(self, text: str):
self._config(text, js_type=False)
def style(self):
return self._config_get(None)
def style(self, value: Any):
self._config(value, js_type=False)
def textPath(self) -> 'OptionSeriesArcdiagramNodesDatalabelsTextpath':
return self._config_sub_data('textPath', OptionSeriesArcdiagramNodesDatalabelsTextpath)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('undefined')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get('undefined')
def y(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(6)
def zIndex(self, num: float):
self._config(num, js_type=False) |
_meta(characters.keine.Teach)
class Teach():
name = ''
description = ',,,:<style=Desc.Li>,</style><style=Desc.Li></style>'
def clickable(self):
return (self.my_turn() and (not ttags(self.me)['teach_used']))
def is_action_valid(self, sk, tl):
cards = sk.associated_cards
if ((not cards) or (len(cards) != 1)):
return (False, '()')
if ((not tl) or (len(tl) != 1)):
return (False, '')
return (True, '<style=Skill.Name></style>')
def effect_string(self, act):
return f'{N.char(act.target)},{N.char(act.source)},196,'
def sound_effect(self, act):
return random.choice(['thb-cv-keine_teach1', 'thb-cv-keine_teach2']) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.