code stringlengths 281 23.7M |
|---|
class TestTimeSeriesOOFModel():
def test_fit_predict(self):
(X, y) = gen_ts_data(10000)
model = TimeSeriesOOFModel(TsTestModel(), time_column='date', fold_cnt=20)
model.fit(X, y['y'])
pred = model.predict(X)
info = X.copy()
info['pred'] = pred
assert info['pred'][:(len(X) // 20)].isnull().min()
assert (len(model.base_models) == 20)
assert (len(model.time_bounds) == (20 + 1))
means = []
maxs = []
for fold_id in range(1, 20):
fold_info = info[(fold_id * (len(X) // 20)):((fold_id + 1) * (len(X) // 20))]
means.append(fold_info['pred'].mean())
maxs.append(fold_info['pred'].max())
means = np.array(means)
maxs = np.array(maxs)
assert ((means[1:] - means[:(- 1)]).min() > 0)
assert ((maxs[1:] - maxs[:(- 1)]).min() > 0)
X['date'] = np.datetime64('2050-01-01')
pred = model.predict(X)
assert (np.abs((pred.mean() - 5000)) < 20)
assert (np.abs((pred.max() - 10000)) < 20)
(X, y) = gen_ts_data(10000)
X = X.set_index('date')
model = TimeSeriesOOFModel(lgbm.sklearn.LGBMClassifier(), time_column='date', fold_cnt=20)
model.fit(X, np.random.randint(0, 2, len(X)))
pred = model.predict(X)
assert (pred[(len(X) // 20):] >= 0).min()
assert (pred[(len(X) // 20):] <= 1).min() |
class FullSyncStrategy(BaseSyncStrategy):
def get_sync_mode(cls) -> str:
return SYNC_FULL
async def sync(self, args: Namespace, logger: logging.Logger, chain: AsyncChainAPI, base_db: AtomicDatabaseAPI, peer_pool: BasePeerPool, event_bus: EndpointAPI, metrics_service: MetricsServiceAPI) -> None:
syncer = FullChainSyncer(chain, AsyncChainDB(base_db), base_db, cast(ETHPeerPool, peer_pool))
(await syncer.run()) |
def valve_flowreorder(input_ofmsgs, use_barriers=True):
output_ofmsgs = []
by_kind = _partition_ofmsgs(input_ofmsgs)
delete_global_ofmsgs = by_kind.get('deleteglobal', [])
if delete_global_ofmsgs:
global_types = {type(ofmsg) for ofmsg in delete_global_ofmsgs}
new_delete = [ofmsg for ofmsg in by_kind.get('delete', []) if (type(ofmsg) not in global_types)]
by_kind['delete'] = new_delete
for (kind, random_order, _suggest_barrier, flowkey, dedupe_func) in _OFMSG_ORDER:
ofmsgs = dedupe_func(by_kind.get(kind, []), random_order, flowkey)
if ofmsgs:
by_kind[kind] = ofmsgs
deletes = by_kind.get('delete', None)
addmod = by_kind.get('flowaddmod', None)
if (deletes and addmod):
by_kind['delete'] = remove_overlap_ofmsgs(deletes, addmod)
for (kind, _random_order, suggest_barrier, _flowkey, dedupe_func) in _OFMSG_ORDER:
ofmsgs = by_kind.get(kind, [])
if ofmsgs:
output_ofmsgs.extend(ofmsgs)
if (use_barriers and suggest_barrier):
output_ofmsgs.append(barrier())
return output_ofmsgs |
def test_constraints_expression():
and_expression = And([Constraint('number', ConstraintType(ConstraintTypes.LESS_THAN, 15)), Constraint('number', ConstraintType(ConstraintTypes.GREATER_THAN, 10))])
and_expression.check_validity()
assert and_expression.check(Description({'number': 12}))
assert and_expression.is_valid(DataModel('some_name', [Attribute('number', int, True)]))
and_expression_pb = ConstraintExpr._encode(and_expression)
actual_and_expression = ConstraintExpr._decode(and_expression_pb)
assert (actual_and_expression == and_expression)
or_expression = Or([Constraint('number', ConstraintType(ConstraintTypes.EQUAL, 12)), Constraint('number', ConstraintType(ConstraintTypes.EQUAL, 13))])
or_expression.check_validity()
assert or_expression.check(Description({'number': 12}))
assert or_expression.is_valid(DataModel('some_name', [Attribute('number', int, True)]))
or_expression_pb = ConstraintExpr._encode(or_expression)
actual_or_expression = ConstraintExpr._decode(or_expression_pb)
assert (actual_or_expression == or_expression)
not_expression = Not(And([Constraint('number', ConstraintType(ConstraintTypes.EQUAL, 12)), Constraint('number', ConstraintType(ConstraintTypes.EQUAL, 12))]))
not_expression.check_validity()
assert not_expression.check(Description({'number': 13}))
assert not_expression.is_valid(DataModel('some_name', [Attribute('number', int, True)]))
not_expression_pb = ConstraintExpr._encode(not_expression)
actual_not_expression = ConstraintExpr._decode(not_expression_pb)
assert (actual_not_expression == not_expression)
constraint_expression = Constraint('author', ConstraintType('==', 'Stephen King'))
constraint_expression.check_validity()
assert constraint_expression.check(Description({'author': 'Stephen King'}))
assert constraint_expression.is_valid(DataModel('some_name', [Attribute('author', str, True)]))
constraint_expression_pb = ConstraintExpr._encode(constraint_expression)
actual_constraint_expression = ConstraintExpr._decode(constraint_expression_pb)
assert (actual_constraint_expression == constraint_expression)
incorrect_expression = Location(1.1, 2.2)
with pytest.raises(ValueError, match=f"Invalid expression type. Expected either of 'And', 'Or', 'Not', 'Constraint'. Found {type(incorrect_expression)}."):
ConstraintExpr._encode(incorrect_expression) |
class ExpGen(object):
def __init__(self):
pass
def max(a, b):
exp = '({a}>{b}?{a}:{b})'.format(a=a, b=b)
return exp
def min(a, b):
exp = '({a}<{b}?{a}:{b})'.format(a=a, b=b)
return exp
def parse(expr):
s = SvalEE()
return s.eval(expr) |
class TestReadCommands(EfuseTestCase):
def test_help(self):
self.espefuse_not_virt_py('--help', check_msg='usage: __main__.py [-h]')
self.espefuse_not_virt_py(f'--chip {arg_chip} --help')
def test_help2(self):
self.espefuse_not_virt_py('', check_msg='usage: __main__.py [-h]', ret_code=1)
def test_dump(self):
self.espefuse_py('dump -h')
self.espefuse_py('dump')
def test_summary(self):
self.espefuse_py('summary -h')
self.espefuse_py('summary')
def test_summary_json(self):
self.espefuse_py('summary --format json')
.skipif((arg_chip == 'esp32p4'), reason='No Custom MAC Address defined yet')
def test_get_custom_mac(self):
self.espefuse_py('get_custom_mac -h')
if (arg_chip == 'esp32'):
right_msg = 'Custom MAC Address is not set in the device.'
else:
right_msg = 'Custom MAC Address: 00:00:00:00:00:00 (OK)'
self.espefuse_py('get_custom_mac', check_msg=right_msg)
def test_adc_info(self):
self.espefuse_py('adc_info -h')
self.espefuse_py('adc_info')
def test_check_error(self):
self.espefuse_py('check_error -h')
self.espefuse_py('check_error')
self.espefuse_py('check_error --recovery') |
class HelpDetailTest(EvenniaWebTest):
url_name = 'help-entry-detail'
def setUp(self):
super().setUp()
create_help_entry('unit test db entry', 'unit test db entry text', category='General')
def get_kwargs(self):
return {'category': slugify('general'), 'topic': slugify('unit test db entry')}
def test_view(self):
response = self.client.get(reverse(self.url_name, kwargs=self.get_kwargs()), follow=True)
self.assertEqual(response.context['entry_text'], 'unit test db entry text')
def test_object_cache(self):
global _FILE_HELP_ENTRIES
if (_FILE_HELP_ENTRIES is None):
from evennia.help.filehelp import FILE_HELP_ENTRIES as _FILE_HELP_ENTRIES
help_module = 'evennia.web.website.tests'
self.file_help_store = _FILE_HELP_ENTRIES.__init__(help_file_modules=[help_module])
response = self.client.get(reverse(self.url_name, kwargs=self.get_kwargs()), follow=True)
self.assertEqual(response.context['entry_text'], 'unit test db entry text')
entry_two_args = {'category': slugify('general'), 'topic': slugify('unit test file entry')}
response = self.client.get(reverse(self.url_name, kwargs=entry_two_args), follow=True)
self.assertEqual(response.context['entry_text'], 'cache test file entry text') |
def getchangeserver():
return '0'
dialog = xbmcgui.Dialog()
changeserver = ''
servers = [['cdntel.115.com', 'vipcdntel.115.com', 'mzvipcdntel.115.com', 'fscdntel.115.com', 'mzcdntel.115.com'], ['cdnuni.115.com', 'vipcdnuni.115.com', 'mzvipcdnuni.115.com', 'fscdnuni.115.com', 'mzcdnuni.115.com'], ['cdngwbn.115.com', 'vipcdngwbn.115.com', 'mzvipcdngwbn.115.com', 'mzcdngwbn.115.com', 'cdnogwbn.115.com'], ['cdnctt.115.com', 'vipcdnctt.115.com', 'mzvipcdnctt.115.com']]
serverchange = int(plugin.get_setting('serverchange'))
if ((serverchange >= 1) and (serverchange <= 5)):
selectservers = []
if (serverchange == 1):
selectservers = sum(servers, [])
else:
selectservers = servers[(serverchange - 2)]
selectservers.insert(0, '')
sel = dialog.select('CDN', selectservers)
if (sel < 0):
changeserver = '-1'
if (sel > 0):
changeserver = selectservers[sel]
if (serverchange >= 6):
selectservers = sum(servers, [])
changeserver = selectservers[(serverchange - 6)]
return changeserver |
class TlsBulkCertificateResponseAttributesAllOf(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
return {'not_after': (datetime,), 'not_before': (datetime,), 'replace': (bool,)}
_property
def discriminator():
return None
attribute_map = {'not_after': 'not_after', 'not_before': 'not_before', 'replace': 'replace'}
read_only_vars = {'not_after', 'not_before', 'replace'}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class SafeDict(dict):
def __setitem__(self, key, value):
if (key in self):
if (value is self[key]):
return
raise KeyError(("Attempted to override key '%s' in a SafeDict" % key))
return dict.__setitem__(self, key, value)
def update(self, *other_dicts):
for other in other_dicts:
for (k, v) in other.items():
self[k] = v
return self |
def _build_node_section(tree):
for (cluster_name, cluster_conf) in tree['cluster'].items():
node_kind_config = dict(((key, value) for (key, value) in cluster_conf.items() if key.endswith('_nodes')))
if ('nodes' not in cluster_conf):
cluster_conf['nodes'] = {}
for key in node_kind_config.keys():
kind_name = key[:(- len('_nodes'))]
kind_values = _gather_node_kind_info(kind_name, cluster_name, cluster_conf)
cluster_conf['nodes'][kind_name] = kind_values
return tree |
def update_kpis(date, days=7):
df_summary = pd.read_sql(sql=app.session.query(stravaSummary).filter((stravaSummary.start_date_utc <= date)).statement, con=engine, index_col='start_date_local')
athlete_info = app.session.query(athlete).filter((athlete.athlete_id == 1)).first()
use_power = (True if (athlete_info.use_run_power or athlete_info.use_cycle_power) else False)
df_sleep = pd.read_sql(sql=app.session.query(ouraSleepSummary.report_date, ouraSleepSummary.score).filter((ouraSleepSummary.report_date <= date), (ouraSleepSummary.score >= 85)).statement, con=engine)
df_sleep = df_sleep.set_index(pd.to_datetime(df_sleep['report_date']))
df_activity = pd.read_sql(sql=app.session.query(ouraActivitySummary.summary_date, ouraActivitySummary.score).filter((ouraActivitySummary.summary_date <= date), (ouraActivitySummary.score >= 85)).statement, con=engine)
df_activity = df_activity.set_index(pd.to_datetime(df_activity['summary_date']))
df_readiness = pd.read_sql(sql=app.session.query(ouraReadinessSummary.report_date, ouraReadinessSummary.score).filter((ouraReadinessSummary.report_date <= date), (ouraReadinessSummary.score >= 85)).statement, con=engine)
df_readiness = df_readiness.set_index(pd.to_datetime(df_readiness['report_date']))
df_sleep = df_sleep[((df_sleep.index.date <= pd.to_datetime(date).date()) & (df_sleep.index.date > pd.to_datetime((date - timedelta(days=days))).date()))]
df_readiness = df_readiness[((df_readiness.index.date <= pd.to_datetime(date).date()) & (df_readiness.index.date > pd.to_datetime((date - timedelta(days=days))).date()))]
df_activity = df_activity[((df_activity.index.date <= pd.to_datetime(date).date()) & (df_activity.index.date > pd.to_datetime((date - timedelta(days=days))).date()))]
(current_sleep_streak, best_sleep_streak) = calculate_streak(date, df_sleep, athlete_info.weekly_sleep_score_goal)
(current_readiness_streak, best_readiness_streak) = calculate_streak(date, df_readiness, athlete_info.weekly_readiness_score_goal)
(current_activity_streak, best_activity_streak) = calculate_streak(date, df_activity, athlete_info.weekly_activity_score_goal)
tss_goal = (True if (athlete_info.weekly_workout_goal == 100) else False)
if tss_goal:
tss_df = pd.read_sql(sql=app.session.query(stravaSummary.start_day_local, stravaSummary.tss, stravaSummary.hrss, stravaSummary.trimp).filter((stravaSummary.elapsed_time > athlete_info.min_non_warmup_workout_time)).statement, con=engine, index_col='start_day_local')
if use_power:
tss_df['stress_score'] = tss_df.apply((lambda row: (row['hrss'] if np.isnan(row['tss']) else row['tss'])), axis=1).fillna(0)
else:
tss_df['stress_score'] = tss_df['trimp']
tss_df = tss_df.set_index(pd.to_datetime(tss_df.index))
(tss_streak, best_tss_streak) = calculate_streak(date, tss_df, athlete_info.weekly_tss_goal, sum_metric='stress_score')
tss_df = tss_df[((tss_df.index.date <= pd.to_datetime(date).date()) & (tss_df.index.date > pd.to_datetime((date - timedelta(days=days))).date()))]
tss_color = (orange if (tss_df['stress_score'].sum() < athlete_info.weekly_tss_goal) else None)
class_name = 'col-lg-2'
specific_donuts = [html.Div(id='tss-donut', className=class_name, children=generate_kpi_donut(kpi_name='Stress', metric=tss_df['stress_score'].sum(), goal=athlete_info.weekly_tss_goal, current_streak=tss_streak, best_streak=best_tss_streak, streak_unit='wk', color=tss_color))]
else:
df_workout = df_summary[(((df_summary['type'].str.lower().str.contains('ride') | df_summary['type'].str.lower().str.contains('run')) | df_summary['type'].str.lower().str.contains('weight')) & (df_summary['elapsed_time'] >= athlete_info.min_non_warmup_workout_time))]
athlete_weekly_workout_goal = athlete_info.weekly_workout_goal
if (athlete_info.weekly_workout_goal == 99):
(current_workout_streak, best_workout_streak, athlete_weekly_workout_goal) = calculate_streak_off_oura_readiness(date, df_workout)
else:
(current_workout_streak, best_workout_streak) = calculate_streak(date, df_workout, athlete_weekly_workout_goal)
df_workout = df_workout[((df_workout.index.date <= pd.to_datetime(date).date()) & (df_workout.index.date > pd.to_datetime((date - timedelta(days=days))).date()))]
workout_color = (orange if ((df_workout.shape[0] < athlete_weekly_workout_goal) and (athlete_weekly_workout_goal != 99)) else None)
class_name = 'col-lg-2 '
specific_donuts = [html.Div(id='workout-donut', className=class_name, children=generate_kpi_donut(kpi_name='Workout', metric=df_workout.shape[0], goal=athlete_weekly_workout_goal, current_streak=current_workout_streak, best_streak=best_workout_streak, color=workout_color))]
main_donuts = [html.Div(id='sleep-donut', className=class_name, children=generate_kpi_donut(kpi_name='Sleep', metric=df_sleep.shape[0], goal=athlete_info.weekly_sleep_score_goal, current_streak=current_sleep_streak, best_streak=best_sleep_streak)), html.Div(id='readiness-donut', className=class_name, children=generate_kpi_donut(kpi_name='Readiness', metric=df_readiness.shape[0], goal=athlete_info.weekly_readiness_score_goal, current_streak=current_readiness_streak, best_streak=best_readiness_streak)), html.Div(id='activity-donut', className=class_name, children=generate_kpi_donut(kpi_name='Activity', metric=df_activity.shape[0], goal=athlete_info.weekly_activity_score_goal, current_streak=current_activity_streak, best_streak=best_activity_streak))]
if withings_credentials_supplied:
main_donuts.extend([html.Div(id='weight-trend', className=class_name, children=generate_content_kpi_trend('withings', 'weight')), html.Div(id='body-fat-trend', className='col-lg-2', children=generate_content_kpi_trend('withings', 'fat_ratio'))])
app.session.remove()
return html.Div(className='col-lg-12', children=[dbc.Card([dbc.CardBody([html.Div(className='row', children=(specific_donuts + main_donuts))])])]) |
def _get_new_brunswick_flows(requests_obj):
url = '
response = requests_obj.get(url)
soup = BeautifulSoup(response.text, 'html.parser')
table = soup.find('table', attrs={'bordercolor': '#191970'})
rows = table.find_all('tr')
headers = rows[1].find_all('td')
values = rows[2].find_all('td')
flows = {headers[i].text.strip(): float(row.text.strip()) for (i, row) in enumerate(values)}
return flows |
def evals(n, degree=1, mesh=None):
if (mesh is None):
mesh = IntervalMesh(n, 0, pi)
V = FunctionSpace(mesh, 'CG', degree)
u = TrialFunction(V)
v = TestFunction(V)
a = (inner(grad(u), grad(v)) * dx)
bc = DirichletBC(V, 0.0, 'on_boundary')
eigenprob = LinearEigenproblem(a, bcs=bc, bc_shift=(- 6666.0))
eigensolver = LinearEigensolver(eigenprob, n, solver_parameters={'eps_largest_real': None})
ncov = eigensolver.solve()
h = (pi / n)
true_values = np.zeros((ncov - 1))
estimates = np.zeros((ncov - 1))
for k in range((ncov - 1)):
true_val = (6 / (h ** 2))
true_val *= ((1 - cos(((k + 1) * h))) / (2 + cos(((k + 1) * h))))
true_values[k] = true_val
estimates[k] = eigensolver.eigenvalue(k).real
true_values[(- 1)] = eigenprob.bc_shift
return (sorted(true_values), sorted(estimates)) |
class OptionPlotoptionsTreegraphLevelsColorvariation(Options):
def key(self):
return self._config_get(None)
def key(self, value: Any):
self._config(value, js_type=False)
def to(self):
return self._config_get(None)
def to(self, num: float):
self._config(num, js_type=False) |
class OptionSeriesWindbarbDragdropGuideboxDefault(Options):
def className(self):
return self._config_get('highcharts-drag-box-default')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('rgba(0, 0, 0, 0.1)')
def color(self, text: str):
self._config(text, js_type=False)
def cursor(self):
return self._config_get('move')
def cursor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#888')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(900)
def zIndex(self, num: float):
self._config(num, js_type=False) |
.parametrize('calcs, ref_energy, ref_force_norm', [pytest.param({'real': {'type': 'g16', 'route': 'hf sto-3g'}, 'high': {'type': 'g16', 'route': 'b3lyp d95v'}}, (- 153.), 0., marks=using('gaussian16')), pytest.param({'real': {'type': 'g16', 'route': 'hf sto-3g'}, 'high': {'type': 'g16', 'route': 'b3lyp 3-21g'}}, (- 152.), 0., marks=using('gaussian16')), pytest.param({'real': {'type': 'pyscf', 'basis': 'sto3g'}, 'high': {'type': 'pyscf', 'xc': 'b3lypg', 'basis': '321g'}}, (- 152.), 0., marks=using('pyscf'))])
def test_gradient(calcs, ref_energy, ref_force_norm):
geom = geom_loader('lib:acetaldehyd_oniom.xyz', coord_type='redund')
high = [4, 5, 6]
for (key, calc) in calcs.items():
calc['pal'] = 2
calc['mult'] = 1
calc['charge'] = 0
models = {'high': {'inds': high, 'calc': 'high'}}
layers = None
oniom = ONIOM(calcs, models, geom, layers)
assert (oniom.layer_num == 2)
geom.set_calculator(oniom)
forces = geom.forces
energy = geom.energy
assert (np.linalg.norm(forces) == pytest.approx(ref_force_norm, abs=1e-05))
assert (energy == pytest.approx(ref_energy, abs=1e-05)) |
def parse_arguments() -> argparse.Namespace():
parser = argparse.ArgumentParser()
parser.add_argument('-g', '--ghidra_path', nargs='?', default='/opt/ghidra', help='path to Ghidra')
parser.add_argument('file_path', help='path to binary/firmware')
parser.add_argument('result_path', nargs='?', default='', help='result path')
args = parser.parse_args()
return args |
def print_comparatives(omorfi, word, upos, comp, outfile):
if (comp == 'POS'):
print('#### Nominal cases', file=outfile)
tags = '[CMP=POS]'
elif (comp == 'CMP'):
print('#### Comparative cases', file=outfile)
tags = '[DRV=MPI][CMP=CMP]'
elif (comp == 'SUP'):
print('#### Superlative cases', file=outfile)
tags = '[DRV=IN2][CMP=SUP]'
print(file=outfile)
print_nominals_(omorfi, word, upos, outfile, tags) |
class StridedConvolutionDenseBlock(PerceptionBlock):
def __init__(self, in_keys: Union[(str, List[str])], out_keys: Union[(str, List[str])], in_shapes: Union[(Sequence[int], List[Sequence[int]])], hidden_channels: List[int], hidden_kernels: List[Union[(int, Tuple[(int, ...)])]], convolution_dimension: int, hidden_strides: Optional[List[Union[(int, Tuple[(int, ...)])]]], hidden_dilations: Optional[List[Union[(int, Tuple[(int, ...)])]]], hidden_padding: Optional[List[Union[(int, Tuple[(int, ...)])]]], padding_mode: Optional[str], hidden_units: List[int], non_lin: Union[(str, type(nn.Module))]):
super().__init__(in_keys=in_keys, out_keys=out_keys, in_shapes=in_shapes)
out_keys_conv = [f'{k}_conv' for k in self.out_keys]
self.conv_block = StridedConvolutionBlock(in_keys=in_keys, out_keys=out_keys_conv, in_shapes=in_shapes, hidden_channels=hidden_channels, hidden_kernels=hidden_kernels, convolution_dimension=convolution_dimension, hidden_strides=hidden_strides, hidden_dilations=hidden_dilations, hidden_padding=hidden_padding, padding_mode=padding_mode, non_lin=non_lin)
out_keys_flatten = ([f'{k}_flat' for k in out_keys_conv] if (len(hidden_units) > 0) else out_keys)
self.flatten_block = FlattenBlock(in_keys=out_keys_conv, out_keys=out_keys_flatten, in_shapes=self.conv_block.out_shapes(), num_flatten_dims=3)
if (len(hidden_units) > 0):
self.dense_block = DenseBlock(in_keys=out_keys_flatten, out_keys=out_keys, in_shapes=self.flatten_block.out_shapes(), hidden_units=hidden_units, non_lin=non_lin)
else:
self.dense_block = None
(PerceptionBlock)
def forward(self, block_input: Dict[(str, torch.Tensor)]) -> Dict[(str, torch.Tensor)]:
block_output = self.conv_block(block_input)
block_output = self.flatten_block(block_output)
if self.dense_block:
block_output = self.dense_block(block_output)
return block_output
def __repr__(self):
txt = f'{self.__class__.__name__}:'
txt += f'''
{str(self.conv_block)}'''
txt += f'''
{str(self.flatten_block)}'''
txt += f'''
{str(self.dense_block)}'''
return txt |
def register_operators(op_dict: Dict[(str, Type[OperatorInterface])]):
global op_map
for (name, operator_class) in op_dict.items():
logger.debug(f'register op: {name}')
if (name not in op_map):
op_map[name] = operator_class
else:
raise ValueError(f'Duplicate operator registration name: {name}') |
class Char_Array_Literal(Literal):
def __init__(self, t_string):
super().__init__()
assert isinstance(t_string, MATLAB_Token)
assert (t_string.kind in ('CARRAY', 'BANG'))
self.t_string = t_string
self.t_string.set_ast(self)
def __str__(self):
return (("'" + self.t_string.value) + "'")
def loc(self):
return self.t_string.location |
def drawcirc(r, w, du, dv, N):
w = np.maximum(w, 1)
x = (np.ones([N, 1]) * (((np.arange(0, N, 1, dtype='float') - ((N + 1) / 2)) - dv) / r))
y = ((((np.arange(0, N, 1, dtype='float') - ((N + 1) / 2)) - du) / r) * np.ones([1, N])).T
p = (0.5 + (0.5 * np.sin(np.minimum(np.maximum(((np.exp((np.array([(- 0.5)]) * ((x ** 2) + (y ** 2)))).T - np.exp((- 0.5))) * ((r * 3) / w)), (np.pi / (- 2))), (np.pi / 2)))))
return p |
def upgrade():
bind: Connection = op.get_bind()
bind.execute(text('\n UPDATE privacydeclaration \n SET flexible_legal_basis_for_processing = false \n WHERE flexible_legal_basis_for_processing IS NULL;\n '))
op.alter_column('privacydeclaration', 'flexible_legal_basis_for_processing', existing_type=sa.BOOLEAN(), server_default='t', nullable=False) |
class _Model():
__db_manager__: ClassVar[DatabaseManager]
query_class = BaseQuery
def __repr__(self):
identity = inspect(self).identity
if (identity is None):
pk = '(transient {0})'.format(id(self))
else:
pk = ', '.join((_to_str(value) for value in identity))
return '<{0} {1}>'.format(type(self).__name__, pk) |
def test_sync_checkpoint_save_file(tmpdir):
td_path = Path(tmpdir)
cp = SyncCheckpoint(checkpoint_dest=tmpdir)
dst_path = td_path.joinpath(SyncCheckpoint.TMP_DST_PATH)
assert (not dst_path.exists())
inp = td_path.joinpath('test')
with inp.open('wb') as f:
f.write(b'blah')
with inp.open('rb') as f:
cp.save(f)
assert dst_path.exists()
with pytest.raises(ValueError):
cp.save(SyncCheckpoint) |
def test_read_arrow_optional_polars(requests_mock: Mocker):
response_data = b'A\xff\xff\xff\xff\xb0\x01\x00\x00\x10\x00\x00\x00\x00\x00\n\x00\x0e\x00\x06\x00\r\x00\x08\x00\n\x00\x00\x00\x00\x00\x04\x00\x10\x00\x00\x00\x00\x01\n\x00\x0c\x00\x00\x00\x08\x00\x04\x00\n\x00\x00\x00\x08\x00\x00\x00,\x01\x00\x00\x01\x00\x00\x00\x0c\x00\x00\x00\x08\x00\x0c\x00\x08\x00\x04\x00\x08\x00\x00\x00\x08\x00\x00\x00\x00\x01\x00\x00\xf5\x00\x00\x00{"computedTime":,"resultComputationMetadata":null,"computedVersion":null,"warningMessage":null,"resultId":null,"rowCount":null,"columns":["MATNR"],"columnTypes":[{"type":"STRING","name":"MATNR","nullable":true,"customMetadata":{}}]}\x00\x00\x00\x08\x00\x00\x00metadata\x00\x00\x00\x00\x01\x00\x00\x00\x18\x00\x00\x00\x00\x00\x12\x00\x18\x00\x14\x00\x13\x00\x12\x00\x0c\x00\x00\x00\x08\x00\x04\x00\x12\x00\x00\x00\x14\x00\x00\x00\x14\x00\x00\x00\x18\x00\x00\x00\x00\x00\x05\x01\x14\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x04\x00\x04\x00\x00\x00\x05\x00\x00\x00MATNR\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x98\x00\x00\x00\x14\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x16\x00\x0e\x00\x15\x00\x10\x00\x04\x00\x0c\x00\x00\x00 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x10\x00\x00\x00\x00\x03\n\x00\x18\x00\x0c\x00\x08\x00\x04\x00\n\x00\x00\x00\x14\x00\x00\x00H\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x-RNA-5UG\xff\xff\xff\xff\x00\x00\x00\x00E'
client = FoundryRestClient()
requests_mock.get(url=ANY, content=response_data)
arrow_stream = client._read_fsql_query_results_arrow('queryId')
pdf = arrow_stream.read_pandas()
assert (pdf.shape == (1, 1))
try:
import polars as pl
arrow_stream = client._read_fsql_query_results_arrow('queryId')
pa_table = arrow_stream.read_all()
df = pl.from_arrow(pa_table)
assert (df.shape == (1, 1))
except ImportError:
pytest.skip('Polars not installed') |
def evaluate(data: List[List[Tuple[(str, str)]]], *args):
(total, correct) = (0, 0)
for sentence in data:
(tokens, gold) = tuple(zip(*sentence))
pred = [t[0] for t in predict(tokens, args)]
total += len(tokens)
correct += len([1 for (g, p) in zip(gold, pred) if (g == p)])
accuracy = ((100.0 * correct) / total)
return accuracy |
def start_command():
parser = argparse.ArgumentParser(description='EmbedChain WhatsAppBot command line interface')
parser.add_argument('--host', default='0.0.0.0', help='Host IP to bind')
parser.add_argument('--port', default=5000, type=int, help='Port to bind')
args = parser.parse_args()
whatsapp_bot = WhatsAppBot()
whatsapp_bot.start(host=args.host, port=args.port) |
class ANTLRCommand(Command):
description = 'Run ANTLR'
user_options: List[str] = []
def run(self) -> None:
root_dir = abspath(dirname(__file__))
project_root = abspath(dirname(basename(__file__)))
for grammar in ['hydra/grammar/OverrideLexer.g4', 'hydra/grammar/OverrideParser.g4']:
command = ['java', '-jar', join(root_dir, 'bin/antlr-4.9.3-complete.jar'), '-Dlanguage=Python3', '-o', join(project_root, 'hydra/grammar/gen/'), '-Xexact-output-dir', '-visitor', join(project_root, grammar)]
log.info(f'Generating parser for Python3: {command}')
subprocess.check_call(command)
def initialize_options(self) -> None:
pass
def finalize_options(self) -> None:
pass |
def fetch_production_capacity_for_all_zones(target_datetime: datetime, session: Session) -> (dict[(str, Any)] | None):
all_capacity = get_capacity_data_for_all_zones(target_datetime, session)
all_capacity = {k: v for (k, v) in all_capacity.items() if (k in IRENA_ZONES)}
logger.info(f'Fetched capacity data from IRENA for {target_datetime.year}')
return all_capacity |
class IntegrationRuleDetailMDX():
def __init__(self, rule_id: str, rule: dict, changelog: Dict[(str, dict)], package_str: str):
self.rule_id = rule_id
self.rule = rule
self.changelog = changelog
self.package = package_str
self.rule_title = f"prebuilt-rule-{self.package}-{name_to_title(self.rule['name'])}"
self.rule.setdefault('max_signals', 100)
self.rule.setdefault('interval', '5m')
def generate(self) -> str:
page = [MDX.title(1, self.rule['name']), '', self.rule['description'], '', self.metadata_str(), '']
if ('note' in self.rule):
page.extend([self.guide_str(), ''])
if ('query' in self.rule):
page.extend([self.query_str(), ''])
if ('threat' in self.rule):
page.extend([self.threat_mapping_str(), ''])
return '\n'.join(page)
def metadata_str(self) -> str:
date_math_doc = '
loopback_doc = '
fields = {'type': 'Rule type', 'index': 'Rule indices', 'severity': 'Severity', 'risk_score': 'Risk score', 'interval': 'Runs every', 'from': 'Searches indices from', 'max_signals': 'Maximum alerts per execution', 'references': 'References', 'tags': 'Tags', 'version': 'Version', 'author': 'Rule authors', 'license': 'Rule license'}
values = []
for (field, friendly_name) in fields.items():
value = (self.rule.get(field) or self.changelog.get(field))
if isinstance(value, list):
str_value = MDX.bulleted_list(value)
else:
str_value = str(value)
if (field == 'from'):
str_value += f' ([Date Math format]({date_math_doc}), [Additional look-back time]({loopback_doc}))'
values.append(MDX.bold_kv(friendly_name, str_value))
return '\n\n'.join(values)
def guide_str(self) -> str:
return f'''{MDX.title(2, 'Investigation guide')}
{MDX.code(self.rule['note'], 'markdown')}'''
def query_str(self) -> str:
return f'''{MDX.title(2, 'Rule query')}
{MDX.code(self.rule['query'], 'sql')}'''
def threat_mapping_str(self) -> str:
values = [MDX.bold_kv('Framework', 'MITRE ATT&CK^TM^')]
for entry in self.rule['threat']:
tactic = entry['tactic']
entry_values = [MDX.bulleted(MDX.bold('Tactic:')), MDX.bulleted(f"Name: {tactic['name']}", depth=2), MDX.bulleted(f"ID: {tactic['id']}", depth=2), MDX.bulleted(f"Reference URL: {tactic['reference']}", depth=2)]
techniques = entry.get('technique', [])
for technique in techniques:
entry_values.extend([MDX.bulleted('Technique:'), MDX.bulleted(f"Name: {technique['name']}", depth=3), MDX.bulleted(f"ID: {technique['id']}", depth=3), MDX.bulleted(f"Reference URL: {technique['reference']}", depth=3)])
subtechniques = technique.get('subtechnique', [])
for subtechnique in subtechniques:
entry_values.extend([MDX.bulleted('Sub-technique:'), MDX.bulleted(f"Name: {subtechnique['name']}", depth=3), MDX.bulleted(f"ID: {subtechnique['id']}", depth=3), MDX.bulleted(f"Reference URL: {subtechnique['reference']}", depth=4)])
values.extend(entry_values)
return '\n'.join(values) |
.parametrize('cmd_args', [['generate', '--output-path', 'generated/'], ['generate', '--output-path', 'generated/', '--progress'], ['generate', '--output-path', 'generated/', '--export-matches', 'generated/matches.yml'], ['generate', '--output-path', 'generated/', '-m', 'examples/mapping.yml']])
.usefixtures('_mock_dag')
def test_generate(cmd_args):
result = runner.invoke(cli.app, cmd_args)
assert (result.exit_code == 0)
assert (strip_white_space(*STDOUT_LINES) == strip_white_space(result.stdout)) |
class Migration(migrations.Migration):
dependencies = [('frontend', '0067_auto__1354')]
operations = [migrations.AddField(model_name='measure', name='denominator_bnf_codes_filter', field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=16), null=True, size=None)), migrations.AddField(model_name='measure', name='numerator_bnf_codes_filter', field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=16), null=True, size=None))] |
def get_clusters_from_doc(doc: Doc, *, use_heads: bool=False, prefix: Optional[str]=None) -> List[List[Tuple[(int, int)]]]:
out = []
keys = sorted(list(doc.spans.keys()))
for key in keys:
if ((prefix is not None) and (not matches_coref_prefix(prefix, key))):
continue
val = doc.spans[key]
cluster = []
for span in val:
if use_heads:
head_i = span.root.i
head = doc[head_i]
char_span = (head.idx, (head.idx + len(head)))
else:
char_span = (span[0].idx, (span[(- 1)].idx + len(span[(- 1)])))
cluster.append(char_span)
cluster = list(set(cluster))
out.append(cluster)
return out |
.django_db
def test_uei_keyword_filter(client, monkeypatch, spending_by_award_test_data, elasticsearch_award_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_award_index)
resp = client.post('/api/v2/search/spending_by_award', content_type='application/json', data=json.dumps({'filters': {'award_type_codes': ['A', 'B', 'C', 'D'], 'keywords': ['testuei'], 'time_period': [{'start_date': '2007-10-01', 'end_date': '2020-09-30'}]}, 'fields': ['Award ID'], 'page': 1, 'limit': 60, 'sort': 'Award ID', 'order': 'desc', 'subawards': False}))
expected_result = [{'internal_id': 1, 'Award ID': 'abc111', 'generated_internal_id': 'CONT_AWD_TESTING_1'}]
assert (resp.status_code == status.HTTP_200_OK)
assert (len(resp.json().get('results')) == 1)
assert (resp.json().get('results') == expected_result), 'UEI filter does not match expected result' |
def test_attention_sequential():
in_dict = build_multi_input_dict(dims=[(2, 7, 10), (2, 7, 10), (2, 7, 10)])
self_attn_block = MultiHeadAttentionBlock(in_keys=['in_key_0', 'in_key_1', 'in_key_2'], out_keys='self_attention', in_shapes=[(7, 10), (7, 10), (7, 10)], num_heads=10, dropout=0.0, bias=False, add_input_to_output=True, add_bias_kv=False, add_zero_attn=False, kdim=None, vdim=None, use_key_padding_mask=False)
str(self_attn_block)
out_dict = self_attn_block(in_dict)
assert (self_attn_block.get_num_of_parameters() == 401)
assert (len(out_dict.keys()) == len(self_attn_block.out_keys) == 1)
assert (out_dict[self_attn_block.out_keys[0]].shape == (2, 7, 10)) |
class OptionPlotoptionsDependencywheelSonificationTracksMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class conn_tracking_zone_masked(oxm):
type_len = 120068
def __init__(self, value=None, value_mask=None):
if (value != None):
self.value = value
else:
self.value = 0
if (value_mask != None):
self.value_mask = value_mask
else:
self.value_mask = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!L', self.type_len))
packed.append(struct.pack('!H', self.value))
packed.append(struct.pack('!H', self.value_mask))
return ''.join(packed)
def unpack(reader):
obj = conn_tracking_zone_masked()
_type_len = reader.read('!L')[0]
assert (_type_len == 120068)
obj.value = reader.read('!H')[0]
obj.value_mask = reader.read('!H')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.value != other.value):
return False
if (self.value_mask != other.value_mask):
return False
return True
def pretty_print(self, q):
q.text('conn_tracking_zone_masked {')
with q.group():
with q.indent(2):
q.breakable()
q.text('value = ')
q.text(('%#x' % self.value))
q.text(',')
q.breakable()
q.text('value_mask = ')
q.text(('%#x' % self.value_mask))
q.breakable()
q.text('}') |
def test_data_model():
params = dict(name='test', type_=str, is_required=True)
data_model = DataModel('test', [Attribute(**params)])
data_model._check_validity()
with pytest.raises(ValueError, match="Invalid input value for type 'DataModel': duplicated attribute name."):
data_model = DataModel('test', [Attribute(**params), Attribute(**params)])
data_model._check_validity()
assert (data_model == DataModel('test', [Attribute(**params)]))
assert (data_model != DataModel('not test', [Attribute(**params)]))
assert (str(data_model) == 'DataModel(name=test,attributes={\'test\': "Attribute(name=test,type=<class \'str\'>,is_required=True)"},description=)')
data_model_pb = data_model.encode()
actual_data_model = DataModel.decode(data_model_pb)
assert (actual_data_model == data_model) |
def extractBlancabloggingblockWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Mahousekai', 'Mahousekai no Uketsukejou ni Naritaidesu', 'translated'), ('Matsurikaden', 'The Legend of Bureaucrat Matsurika', 'translated'), ('Matsurika Kanriden', 'The Legend of Bureaucrat Matsurika', 'translated'), ('Tsurugi no Joou to Rakuin no Ko', 'Tsurugi no Joou to Rakuin no Ko', 'translated'), ('shounen onmyouji', 'Shounen Onmyouji', 'translated'), ('Kyoto Holmes', 'Kyoto Teramachi Sanjou no Holmes', 'translated'), ('Kyoholmes', 'Kyoto Teramachi Sanjou no Holmes', 'translated'), ('Kaminai', 'Kamisama no Inai Nichiyoubi', 'translated'), ('Jungfrau', 'Kenkoku no Jungfrau', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesBubbleSonificationDefaultinstrumentoptionsMappingTremolo(Options):
def depth(self) -> 'OptionSeriesBubbleSonificationDefaultinstrumentoptionsMappingTremoloDepth':
return self._config_sub_data('depth', OptionSeriesBubbleSonificationDefaultinstrumentoptionsMappingTremoloDepth)
def speed(self) -> 'OptionSeriesBubbleSonificationDefaultinstrumentoptionsMappingTremoloSpeed':
return self._config_sub_data('speed', OptionSeriesBubbleSonificationDefaultinstrumentoptionsMappingTremoloSpeed) |
def get_commands(wf, query):
result = execute(wf, ['brew', 'commands']).splitlines()
commands = [x for x in result if (' ' not in x)]
query_filter = query.split()
if (len(query_filter) > 1):
return wf.filter(query_filter[1], commands, match_on=MATCH_SUBSTRING)
return commands |
def test_ros2_decoder_msg_eq():
with generate_sample_data() as m:
reader = make_reader(m, decoder_factories=[DecoderFactory()])
decoded_messages = reader.iter_decoded_messages('/chatter')
(_, _, _, msg0) = next(decoded_messages)
(_, _, _, msg1) = next(decoded_messages)
assert (msg0.data == 'string message 0')
assert (msg1.data == 'string message 1')
assert ((msg0 == msg0) and (msg1 == msg1))
assert ((msg0 != msg1) and (msg1 != msg0)) |
_list
def block_hashes_in_range(w3: 'Web3', block_range: Tuple[(BlockNumber, BlockNumber)]) -> Iterable[Hash32]:
(from_block, to_block) = block_range
if ((from_block is None) or (to_block is None)):
return
for block_number in range(from_block, (to_block + 1)):
(yield getattr(w3.eth.get_block(BlockNumber(block_number)), 'hash', None)) |
def findCert():
curFile = os.path.abspath(__file__)
curDir = os.path.split(curFile)[0]
caCert = os.path.abspath(os.path.join(curDir, './certs/cacert.pem'))
cert = os.path.abspath(os.path.join(curDir, './certs/cert.pem'))
keyf = os.path.abspath(os.path.join(curDir, './certs/key.pem'))
assert os.path.exists(caCert), ("No certificates found on path '%s'" % caCert)
assert os.path.exists(cert), ("No certificates found on path '%s'" % cert)
assert os.path.exists(keyf), ("No certificates found on path '%s'" % keyf)
return {'cert_reqs': ssl.CERT_REQUIRED, 'ca_certs': caCert, 'keyfile': keyf, 'certfile': cert} |
def test_idv_transactions_csv_sources(db):
original = VALUE_MAPPINGS['idv_transaction_history']['filter_function']
VALUE_MAPPINGS['idv_transaction_history']['filter_function'] = MagicMock(returned_value='')
csv_sources = download_generation.get_download_sources({'download_types': ['idv_transaction_history'], 'filters': {'award_id': 0, 'award_type_codes': tuple((set(contract_type_mapping) | set(idv_type_mapping)))}})
assert (len(csv_sources) == 1)
VALUE_MAPPINGS['idv_transaction_history']['filter_function'] = original
assert (csv_sources[0].file_type == 'd1')
assert (csv_sources[0].source_type == 'idv_transaction_history') |
def generate_aggregated_capacity_config_dict(capacity_config: list[dict[(str, Any)]], parent_zone: ZoneKey) -> (dict[(str, Any)] | None):
datetime_values = set([capacity_config['datetime'] for capacity_config in capacity_config])
sources = set([capacity_config['source'] for capacity_config in capacity_config])
if (len(datetime_values) != 1):
logger.warning(f'{parent_zone} capacity could not be updated because all capacity configs must have the same datetime values')
return None
else:
updated_capacity = {}
aggregated_value = compute_aggregated_value(capacity_config)
updated_capacity['datetime'] = list(datetime_values)[0]
updated_capacity['value'] = aggregated_value
updated_capacity['source'] = ', '.join([elem for elem in sources if (elem is not None)])
return sort_config_keys(updated_capacity) |
class RankingHistory(models.Model):
class Meta():
verbose_name = ''
verbose_name_plural = ''
constraints = [models.UniqueConstraint(fields=['game', 'player', 'category'], name='ranking_history_uniq')]
id = models.IntegerField(**_('ID'), primary_key=True)
game = models.ForeignKey(Game, **_(''), on_delete=models.CASCADE, db_constraint=False, related_name='ranking')
player = models.ForeignKey(Player, **_(''), on_delete=models.CASCADE, related_name='+')
category = models.CharField(**_(''), max_length=20)
season = models.IntegerField(**_(''))
score_before = models.IntegerField(**_(''))
score_after = models.IntegerField(**_(''))
changes = models.IntegerField(**_(''), default=0)
def __str__(self):
return f'[#{self.id}][{self.game.id}:{self.player.name}:S{self.season}] {self.score_before} -> {self.score_after}' |
.parametrize('sampling', ['None', 'bilinear'])
.parametrize('coords, expected_val', [pytest.param(((10.0 - 0.009), 10.0), None, id='(xori - 9e-3, yori)'), pytest.param((10.0, (20.0 + 0.009)), None, id='(xori, ymax - 9e-3)'), pytest.param(((20.0 + 0.009), 10.0), None, id='(xmax + 9e-3, yori)'), pytest.param(((20.0 + 0.009), 20.0), None, id='(xmax + 9e-3, ymax)'), pytest.param((9.0, 9.0), None, id='(xori - 1, yori - 1)'), pytest.param((21.0, (21.0 - 1e-10)), None, id='(xmax + 1, ymax + 1)')])
def test_ijk_outside(coords, expected_val, sampling):
surface = Surface()
result = xtgeo.surface.regular_surface._regsurf_oper.get_value_from_xy(surface, coords, sampling=sampling)
assert (result == expected_val) |
class CustomSysRole(db.Model):
__tablename__ = 'custom_sys_roles'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String, unique=True)
def can_access(self, panel_name):
panel = PanelPermission.query.filter_by(panel_name=panel_name).first()
for role in panel.custom_system_roles:
if (role.id == self.id):
return panel.can_access
return False
def __repr__(self):
return ('<CustomSysRole %r>' % self.name) |
class TestIterativeMethods(proteus.test_utils.TestTools.BasicTest):
def setup_method(self, method):
self.petsc_options = PETSc.Options()
self.petsc_options.clear()
for k in self.petsc_options.getAll():
self.petsc_options.delValue(k)
self._scriptdir = os.path.dirname(__file__)
self.quad_mass_matrix = np.load(os.path.join(self._scriptdir, 'import_modules/quad_mass_matrix.npy'))
def teardown_method(self, method):
pass
.LinearAlgebraTools
def test_dense_numpy_2_petsc4py(self):
A_petsc = LAT.dense_numpy_2_petsc4py(self.quad_mass_matrix)
A_new = LAT.petsc4py_sparse_2_dense(A_petsc)
assert (np.linalg.norm((self.quad_mass_matrix - A_new)) == 0)
.LinearSolvers
def test_chebyshev_iteration_1(self):
A = self.quad_mass_matrix
n = self.quad_mass_matrix.shape[0]
alpha = old_div(1.0, 4)
beta = old_div(9.0, 4)
x0 = np.zeros(n)
b1 = np.ones(n)
for i in range(0, n, 2):
b1[i] = 0.0
A_petsc = LAT.dense_numpy_2_petsc4py(A)
x0_petsc = PETSc.Vec().createWithArray(x0)
b1_petsc = PETSc.Vec().createWithArray(b1)
solver = LS.ChebyshevSemiIteration(A_petsc, alpha, beta, True)
solver.apply(b1_petsc, x0_petsc, 20)
expected = np.load(os.path.join(self._scriptdir, 'import_modules/sol_10.npy'))
actual = x0_petsc
assert np.allclose(expected, actual.getArray())
.LinearSolvers
def test_chebyshev_iteration_2(self):
A = np.diag(old_div(1.0, np.diag(self.quad_mass_matrix))).dot(self.quad_mass_matrix)
n = self.quad_mass_matrix.shape[0]
alpha = old_div(1.0, 4)
beta = old_div(9.0, 4)
x0 = np.zeros(n)
b1 = np.zeros(n)
for i in range(0, n):
b1[i] = i
A_petsc = LAT.dense_numpy_2_petsc4py(A)
x0_petsc = PETSc.Vec().createWithArray(x0)
b1_petsc = PETSc.Vec().createWithArray(b1)
solver = LS.ChebyshevSemiIteration(A_petsc, alpha, beta, save_iterations=True)
solver.apply(b1_petsc, x0_petsc, 20)
expected = np.load(os.path.join(self._scriptdir, 'import_modules/sol_20_lst.npy'))
for (i, item) in enumerate(expected):
assert np.allclose(item, solver.iteration_results[i], 1e-12) |
class conn_tracking_state(oxm):
type_len = 119300
def __init__(self, value=None):
if (value != None):
self.value = value
else:
self.value = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!L', self.type_len))
packed.append(struct.pack('!L', self.value))
return ''.join(packed)
def unpack(reader):
obj = conn_tracking_state()
_type_len = reader.read('!L')[0]
assert (_type_len == 119300)
obj.value = reader.read('!L')[0]
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.value != other.value):
return False
return True
def pretty_print(self, q):
q.text('conn_tracking_state {')
with q.group():
with q.indent(2):
q.breakable()
q.text('value = ')
q.text(('%#x' % self.value))
q.breakable()
q.text('}') |
class Case(object):
def __init__(self, cs: ConditionalSection, expr: Optional[Union[(ComparisonExpression, ConjunctionExpression)]], stmt: str='elif'):
self._cs = cs
if (expr is not None):
if isinstance(expr, bool):
raise AssertionError(f'Logical (and/or/is/not) operations are not supported. Expressions Comparison (<,<=,>,>=,==,!=) or Conjunction (&/|) are supported.Received an evaluated expression with val {expr} in {cs.name}.{stmt}')
if isinstance(expr, Promise):
raise AssertionError(f'Flytekit does not support unary expressions of the form `if_(x) - where x is an input value or output of a previous node. Received var {expr} in condition {cs.name}.{stmt}')
if (not (isinstance(expr, ConjunctionExpression) or isinstance(expr, ComparisonExpression))):
raise AssertionError(f'Flytekit only supports Comparison (<,<=,>,>=,==,!=) or Conjunction (&/|) expressions, Received var {expr} in condition {cs.name}.{stmt}')
self._expr = expr
self._output_promise: Optional[Union[(Tuple[Promise], Promise)]] = None
self._err: Optional[str] = None
self._stmt = stmt
self._output_node = None
def output_node(self) -> Optional[Node]:
return self._output_node
def expr(self) -> Optional[Union[(ComparisonExpression, ConjunctionExpression)]]:
return self._expr
def output_promise(self) -> Optional[Union[(Tuple[Promise], Promise)]]:
return self._output_promise
def err(self) -> Optional[str]:
return self._err
def then(self, p: Union[(Promise, Tuple[Promise])]) -> Optional[Union[(Condition, Promise, Tuple[Promise], VoidPromise)]]:
self._output_promise = p
if isinstance(p, Promise):
if (not p.is_ready):
self._output_node = p.ref.node
elif isinstance(p, VoidPromise):
if (p.ref is not None):
self._output_node = p.ref.node
elif hasattr(p, '_fields'):
for f in p._fields:
prom = getattr(p, f)
if (not prom.is_ready):
self._output_node = prom.ref.node
break
return self._cs.end_branch()
def fail(self, err: str) -> Promise:
self._err = err
return cast(Promise, self._cs.end_branch())
def __repr__(self):
return f'{self._stmt}({self.expr.__repr__()})'
def __str__(self):
return self.__repr__() |
class TestProposedPESQ(unittest.TestCase):
def setUp(self):
self.ref_path = get_file_path('speech.wav')
self.deg_path = get_file_path('speech_bab_0dB.wav')
self.ref_array = pb.io.load_audio(self.ref_path)
self.deg_array = pb.io.load_audio(self.deg_path)
def test_wb_scores_with_lists_of_paths_length_one(self):
return
scores = pesq([self.ref_path], [self.deg_path], sample_rate=16000)
np.testing.assert_allclose(scores, np.asarray([1.083]))
def test_wb_scores_with_lists_of_paths_length_two(self):
return
scores = pesq([self.ref_path, self.ref_path], [self.deg_path, self.ref_path], sample_rate=16000)
np.testing.assert_allclose(scores, np.asarray([1.083, 4.644]))
def test_wb_scores_with_lists_of_arrays_length_one(self):
scores = pesq([self.ref_array], [self.deg_array], sample_rate=16000)
np.testing.assert_allclose(scores, np.asarray([1.083234]), rtol=1e-06)
def test_wb_scores_with_lists_of_arrays_length_two(self):
scores = pesq([self.ref_array, self.ref_array], [self.deg_array, self.ref_array], sample_rate=16000)
np.testing.assert_allclose(scores, np.asarray([1.083234, 4.643888]), rtol=1e-06)
def test_nb_scores_with_lists_of_paths_length_one(self):
return
scores = pesq([self.ref_path], [self.deg_path], sample_rate=16000, mode='nb')
np.testing.assert_allclose(scores, np.asarray([1.607]))
def test_nb_scores_with_lists_of_paths_length_two(self):
return
scores = pesq([self.ref_path, self.ref_path], [self.deg_path, self.ref_path], sample_rate=16000, mode='nb')
np.testing.assert_allclose(scores, np.asarray([1.607, 4.549]))
def test_nb_scores_with_lists_of_arrays_length_one(self):
scores = pesq([self.ref_array], [self.deg_array], sample_rate=16000, mode='nb')
np.testing.assert_allclose(scores, np.asarray([1.607208]))
def test_nb_scores_with_lists_of_arrays_length_two(self):
scores = pesq([self.ref_array, self.ref_array], [self.deg_array, self.ref_array], sample_rate=16000, mode='nb')
np.testing.assert_allclose(scores, np.asarray([1.607208, 4.548638]))
def test_wb_scores_with_paths_directly(self):
return
scores = pesq(self.ref_path, self.deg_path, sample_rate=16000)
np.testing.assert_allclose(scores, np.asarray([1.083]))
def test_wrong_file(self):
return
with self.assertRaisesRegex(ChildProcessError, 'An error of type 2 \\(Reference or Degraded below 1/4 second - processing stopped \\) occurred during processing.'):
pesq(__file__, self.deg_path) |
.parametrize('max_count,max_size', [(3, None), (None, (3 * SIZEOF_TEST_TX_SMALL)), (3, (3 * SIZEOF_TEST_TX_SMALL))])
def test_lrucache_add_past_limit_lru_ordering(max_count: int, max_size: int, test_tx_small) -> None:
cache = LRUCache(max_count=max_count, max_size=max_size)
cache.set(b'1', test_tx_small)
cache.set(b'2', test_tx_small)
cache.set(b'3', test_tx_small)
assert (cache.get(b'1') == test_tx_small)
assert (cache.get(b'3') == test_tx_small)
(added, removals) = cache.set(b'4', test_tx_small)
assert added
assert (removals == [(b'2', test_tx_small)])
assert (cache.get(b'3') == test_tx_small)
(added, removals) = cache.set(b'5', test_tx_small)
assert added
assert (removals == [(b'1', test_tx_small)])
(added, removals) = cache.set(b'6', test_tx_small)
assert added
assert (removals == [(b'4', test_tx_small)]) |
_icmp_type(ICMP_ECHO_REPLY, ICMP_ECHO_REQUEST)
class echo(_ICMPv4Payload):
_PACK_STR = '!HH'
_MIN_LEN = struct.calcsize(_PACK_STR)
def __init__(self, id_=0, seq=0, data=None):
super(echo, self).__init__()
self.id = id_
self.seq = seq
self.data = data
def parser(cls, buf, offset):
(id_, seq) = struct.unpack_from(cls._PACK_STR, buf, offset)
msg = cls(id_, seq)
offset += cls._MIN_LEN
if (len(buf) > offset):
msg.data = buf[offset:]
return msg
def serialize(self):
hdr = bytearray(struct.pack(echo._PACK_STR, self.id, self.seq))
if (self.data is not None):
hdr += self.data
return hdr
def __len__(self):
length = self._MIN_LEN
if (self.data is not None):
length += len(self.data)
return length |
class MyObject(event.Component):
foo = event.Property(0)
def set_foo(self, v):
self._mutate_foo(v)
def set_foo_add(self, *args):
self._mutate_foo(sum(args))
def increase_foo(self):
self.set_foo((self.foo + 1))
self.set_foo((self.foo + 1))
def do_silly(self):
return 1 |
class PlatformDdosResponseData(ModelSimple):
allowed_values = {}
validations = {}
additional_properties_type = None
_nullable = False
_property
def openapi_types():
lazy_import()
return {'value': ([PlatformDdosEntry],)}
_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_path_to_item = kwargs.pop('_path_to_item', ())
if ('value' in kwargs):
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError("value is required, but not passed in args or kwargs and doesn't have default", path_to_item=_path_to_item, valid_classes=(self.__class__,))
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
self.value = value
if kwargs:
raise ApiTypeError(('Invalid named arguments=%s passed to %s. Remove those invalid named arguments.' % (kwargs, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if ('value' in kwargs):
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError("value is required, but not passed in args or kwargs and doesn't have default", path_to_item=_path_to_item, valid_classes=(self.__class__,))
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
self.value = value
if kwargs:
raise ApiTypeError(('Invalid named arguments=%s passed to %s. Remove those invalid named arguments.' % (kwargs, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
return self |
class TestTurnBattleItemsFunc(BaseEvenniaTest):
('evennia.contrib.game_systems.turnbattle.tb_items.tickerhandler', new=MagicMock())
def setUp(self):
super().setUp()
self.testroom = create_object(DefaultRoom, key='Test Room')
self.attacker = create_object(tb_items.TBItemsCharacter, key='Attacker', location=self.testroom)
self.defender = create_object(tb_items.TBItemsCharacter, key='Defender', location=self.testroom)
self.joiner = create_object(tb_items.TBItemsCharacter, key='Joiner', location=self.testroom)
self.user = create_object(tb_items.TBItemsCharacter, key='User', location=self.testroom)
self.test_healpotion = create_object(key='healing potion')
self.test_healpotion.db.item_func = 'heal'
self.test_healpotion.db.item_uses = 3
def tearDown(self):
super().tearDown()
self.turnhandler.stop()
self.attacker.delete()
self.defender.delete()
self.joiner.delete()
self.user.delete()
self.testroom.delete()
def test_tbitemsfunc(self):
initiative = tb_items.COMBAT_RULES.roll_init(self.attacker)
self.assertTrue(((initiative >= 0) and (initiative <= 1000)))
attack_roll = tb_items.COMBAT_RULES.get_attack(self.attacker, self.defender)
self.assertTrue(((attack_roll >= 0) and (attack_roll <= 100)))
defense_roll = tb_items.COMBAT_RULES.get_defense(self.attacker, self.defender)
self.assertTrue((defense_roll == 50))
damage_roll = tb_items.COMBAT_RULES.get_damage(self.attacker, self.defender)
self.assertTrue(((damage_roll >= 15) and (damage_roll <= 25)))
self.defender.db.hp = 10
tb_items.COMBAT_RULES.apply_damage(self.defender, 3)
self.assertTrue((self.defender.db.hp == 7))
self.defender.db.hp = 40
tb_items.COMBAT_RULES.resolve_attack(self.attacker, self.defender, attack_value=20, defense_value=10)
self.assertTrue((self.defender.db.hp < 40))
self.attacker.db.Combat_attribute = True
tb_items.COMBAT_RULES.combat_cleanup(self.attacker)
self.assertFalse(self.attacker.db.combat_attribute)
self.assertFalse(tb_items.COMBAT_RULES.is_in_combat(self.attacker))
self.attacker.location.scripts.add(tb_items.TBItemsTurnHandler)
self.turnhandler = self.attacker.db.combat_turnHandler
self.assertTrue(self.attacker.db.combat_turnHandler)
self.turnhandler.interval = 10000
self.turnhandler.db.fighters = [self.attacker, self.defender]
self.turnhandler.db.turn = 0
self.assertTrue(tb_items.COMBAT_RULES.is_turn(self.attacker))
self.attacker.db.Combat_ActionsLeft = 1
tb_items.COMBAT_RULES.spend_action(self.attacker, 1, action_name='Test')
self.assertTrue((self.attacker.db.Combat_ActionsLeft == 0))
self.assertTrue((self.attacker.db.Combat_LastAction == 'Test'))
self.attacker.db.Combat_ActionsLeft = 983
self.turnhandler.initialize_for_combat(self.attacker)
self.assertTrue((self.attacker.db.Combat_ActionsLeft == 0))
self.assertTrue((self.attacker.db.Combat_LastAction == 'null'))
self.defender.db.Combat_ActionsLeft = 0
self.turnhandler.start_turn(self.defender)
self.assertTrue((self.defender.db.Combat_ActionsLeft == 1))
self.turnhandler.db.fighters = [self.attacker, self.defender]
self.turnhandler.db.turn = 0
self.turnhandler.next_turn()
self.assertTrue((self.turnhandler.db.turn == 1))
self.turnhandler.db.fighters = [self.attacker, self.defender]
self.turnhandler.db.turn = 0
self.attacker.db.Combat_ActionsLeft = 0
self.turnhandler.turn_end_check(self.attacker)
self.assertTrue((self.turnhandler.db.turn == 1))
self.turnhandler.db.fighters = [self.attacker, self.defender]
self.turnhandler.db.turn = 0
self.turnhandler.join_fight(self.joiner)
self.assertTrue((self.turnhandler.db.turn == 1))
self.assertTrue((self.turnhandler.db.fighters == [self.joiner, self.attacker, self.defender]))
tb_items.COMBAT_RULES.spend_item_use(self.test_healpotion, self.user)
self.assertTrue((self.test_healpotion.db.item_uses == 2))
self.user.db.hp = 2
tb_items.COMBAT_RULES.use_item(self.user, self.test_healpotion, self.user)
self.assertTrue((self.user.db.hp > 2))
tb_items.COMBAT_RULES.add_condition(self.user, self.user, 'Test', 5)
self.assertTrue((self.user.db.conditions == {'Test': [5, self.user]}))
tb_items.COMBAT_RULES.condition_tickdown(self.user, self.user)
self.assertEqual(self.user.db.conditions, {'Test': [4, self.user]})
self.user.db.hp = 2
tb_items.COMBAT_RULES.itemfunc_heal(self.test_healpotion, self.user, self.user)
self.user.db.conditions = {}
tb_items.COMBAT_RULES.itemfunc_add_condition(self.test_healpotion, self.user, self.user)
self.assertTrue((self.user.db.conditions == {'Regeneration': [5, self.user]}))
self.user.db.conditions = {'Poisoned': [5, self.user]}
tb_items.COMBAT_RULES.itemfunc_cure_condition(self.test_healpotion, self.user, self.user)
self.assertTrue((self.user.db.conditions == {})) |
class BranchNotFoundError(FoundryAPIError):
def __init__(self, dataset_rid: str, branch: str, transaction_rid: (str | None)=None, response: (requests.Response | None)=None):
super().__init__((((f'Dataset {dataset_rid} ' + (f'on transaction {transaction_rid}' if (transaction_rid is not None) else '')) + f'''has no branch {branch}.
''') + (response.text if (response is not None) else '')))
self.dataset_rid = dataset_rid
self.branch = branch
self.transaction_rid = transaction_rid
self.response = response |
def test_lifespan_scope_asgi2app():
def asgi2app(scope):
assert (scope == {'type': 'lifespan', 'asgi': {'version': '2.0', 'spec_version': '2.0'}, 'state': {}})
async def asgi(receive, send):
pass
return asgi
async def test():
config = Config(app=asgi2app, lifespan='on')
lifespan = LifespanOn(config)
(await lifespan.startup())
(await lifespan.shutdown())
loop = asyncio.new_event_loop()
loop.run_until_complete(test())
loop.close() |
class PhantomClient(object):
def __init__(self, user, password, base_url, verify_ssl=True):
self._user = user
self._password = password
self._base_url = base_url
self._verify_ssl = verify_ssl
def create_container(self, container):
response = requests.post((self._base_url + '/rest/container'), data=json.dumps(container), auth=(self._user, self._password), verify=self._verify_ssl)
response_as_json = response.json()
if ('success' in response_as_json):
result = container.copy()
result['id'] = response_as_json['id']
return result
raise RuntimeError(response_as_json['message']) |
def eager(_fn=None, *, remote: Optional[FlyteRemote]=None, client_secret_group: Optional[str]=None, client_secret_key: Optional[str]=None, timeout: Optional[timedelta]=None, poll_interval: Optional[timedelta]=None, local_entrypoint: bool=False, **kwargs):
if (_fn is None):
return partial(eager, remote=remote, client_secret_group=client_secret_group, client_secret_key=client_secret_key, local_entrypoint=local_entrypoint, **kwargs)
if (local_entrypoint and (remote is None)):
raise ValueError('Must specify remote argument if local_entrypoint is True')
(_fn)
async def wrapper(*args, **kws):
logger.debug('Starting')
_remote = remote
ctx = kws.pop('async_ctx', None)
(task_id, execution_id) = (None, None)
if ctx:
exec_params = ctx.user_space_params
task_id = exec_params.task_id
execution_id = exec_params.execution_id
async_stack = AsyncStack(task_id, execution_id)
_remote = _prepare_remote(_remote, ctx, client_secret_group, client_secret_key, local_entrypoint)
loop = asyncio.get_event_loop()
node_cleanup_partial = partial(node_cleanup_async, async_stack=async_stack)
cleanup_fn = partial(asyncio.ensure_future, node_cleanup_partial(signal.SIGTERM, loop))
signal.signal(signal.SIGTERM, partial(node_cleanup, loop=loop, async_stack=async_stack))
async with eager_context(_fn, _remote, ctx, async_stack, timeout, poll_interval, local_entrypoint):
try:
if (_remote is not None):
with _remote.remote_context():
out = (await _fn(*args, **kws))
else:
out = (await _fn(*args, **kws))
(await render_deck(async_stack))
return out
finally:
(await cleanup_fn())
secret_requests = (kwargs.pop('secret_requests', None) or [])
if ((client_secret_group is not None) and (client_secret_key is not None)):
secret_requests.append(Secret(group=client_secret_group, key=client_secret_key))
return task(wrapper, secret_requests=secret_requests, enable_deck=True, execution_mode=PythonFunctionTask.ExecutionBehavior.EAGER, **kwargs) |
def exceptHook(exc_type, exc_value, exc_traceback):
if issubclass(exc_type, KeyboardInterrupt):
sys.__excepthook__(exc_type, exc_value, exc_traceback)
return
mainLogger = logging.getLogger('Main')
mainLogger.critical('Uncaught exception!')
mainLogger.critical('Uncaught exception', exc_info=(exc_type, exc_value, exc_traceback)) |
def SetTransform(kwargs: dict) -> OutgoingMessage:
compulsory_params = ['id']
optional_params = ['position', 'rotation', 'scale', 'is_world']
utility.CheckKwargs(kwargs, compulsory_params)
msg = OutgoingMessage()
msg.write_int32(kwargs['id'])
msg.write_string('SetTransform')
position = None
set_position = False
rotation = None
set_rotation = False
scale = None
set_scale = False
if ('position' in kwargs):
position = kwargs['position']
set_position = True
assert ((type(position) == list) and (len(position) == 3)), 'Argument position must be a 3-d list.'
if ('rotation' in kwargs):
rotation = kwargs['rotation']
set_rotation = True
assert ((type(rotation) == list) and (len(rotation) == 3)), 'Argument rotation must be a 3-d list.'
if ('scale' in kwargs):
scale = kwargs['scale']
set_scale = True
assert ((type(scale) == list) and (len(scale) == 3)), 'Argument rotation must be a 3-d list.'
msg.write_bool(set_position)
msg.write_bool(set_rotation)
msg.write_bool(set_scale)
if set_position:
for i in range(3):
msg.write_float32(position[i])
if set_rotation:
for i in range(3):
msg.write_float32(rotation[i])
if set_scale:
for i in range(3):
msg.write_float32(scale[i])
if ('is_world' in kwargs.keys()):
msg.write_bool(kwargs['is_world'])
else:
msg.write_bool(True)
return msg |
class OptionAccessibilityKeyboardnavigationFocusborder(Options):
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def hideBrowserFocusOutline(self):
return self._config_get(True)
def hideBrowserFocusOutline(self, flag: bool):
self._config(flag, js_type=False)
def margin(self):
return self._config_get(2)
def margin(self, num: float):
self._config(num, js_type=False)
def style(self) -> 'OptionAccessibilityKeyboardnavigationFocusborderStyle':
return self._config_sub_data('style', OptionAccessibilityKeyboardnavigationFocusborderStyle) |
def extractHidamarisoutranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class OptionSeriesSankeySonificationDefaultspeechoptionsMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.django_db
def test_non_matching_schedule_is_removed(client):
baker.make('submissions.DABSSubmissionWindowSchedule', id=2010121, submission_reveal_date='2010-12-23', submission_due_date='2010-12-23', submission_fiscal_year=2010)
call_command('load_dabs_submission_window_schedule', file=SCHEDULE_FILE)
schedule_count = DABSSubmissionWindowSchedule.objects.all().filter(id=2010121).count()
assert (schedule_count == 0) |
.evm_tools
.parametrize('test_case', find_test_fixtures(), ids=idfn)
def test_b11r(test_case: Dict) -> None:
if (test_case['name'] in IGNORE_TESTS):
pytest.xfail('Undefined behavior for specs')
elif test_case['success']:
b11r_tool_test(test_case)
else:
with pytest.raises(FatalException):
b11r_tool_test(test_case) |
def init_supervisor(ns, node, outputs=tuple(), state_outputs=tuple()):
tp_scheduler = ThreadPoolScheduler(max_workers=5)
reset_disp = CompositeDisposable()
done_outputs = []
for s in state_outputs:
s['done'] = Subject()
done_outputs.append(dict(name=s['name'], address=(s['address'] + '/done'), dtype='bool', msg=s['done']))
s['msg'] = Subject()
s['address'] += '/set'
SR = Subject()
start_reset = dict(name='start_reset', address=(ns + '/start_reset'), msg=Subject(), dtype='int64')
d = SR.subscribe(start_reset['msg'], scheduler=tp_scheduler)
reset_disp.add(d)
ER = Subject()
end_register = dict(name='reset', address=(ns + '/end_register'), dtype='int64', msg=ER)
real_reset = dict(name='real_reset', address=(ns + '/real_reset'), msg=Subject(), dtype='int64')
d = ER.subscribe(real_reset['msg'])
reset_disp.add(d)
msgs = ER.pipe(ops.map(node._get_states), ops.share())
for s in state_outputs:
d = msgs.pipe(ops.pluck((s['name'] + '/done')), trace_observable('done', node), ops.share()).subscribe(s['done'])
reset_disp.add(d)
d = msgs.pipe(filter_dict_on_key(s['name']), ops.filter((lambda msg: (msg is not None))), convert(s['space'], s['processor'], s['name'], 'states', node, direction='out'), ops.share()).subscribe(s['msg'])
reset_disp.add(d)
R = Subject()
reset = dict(name='reset', address=(ns + '/reset'), dtype='int64', msg=R)
node_reset = dict(name=node.ns_name, address=(node.ns_name + '/end_reset'), dtype='bool', msg=Subject())
d = R.pipe(ops.map((lambda x: True))).subscribe(node_reset['msg'], scheduler=tp_scheduler)
reset_disp.add(d)
space = eagerx.Space(shape=(), dtype='int64')
dtype = space.to_dict()['dtype']
tick = dict(name='tick', address=(ns + '/engine/outputs/tick'), msg=Subject(), dtype=dtype)
node_inputs = [reset, end_register]
node_outputs = [start_reset, tick, node_reset, real_reset]
outputs = []
env_subjects = dict(start_reset=SR)
rx_objects = dict(node_inputs=node_inputs, node_outputs=node_outputs, outputs=outputs, state_outputs=(state_outputs + tuple(done_outputs)), disposable=reset_disp)
return (rx_objects, env_subjects) |
class stats_sprint_burndown1(StdOutputParams, ExecutorTopicContinuum, CreateMakeDependencies):
def __init__(self, oconfig):
tracer.info('Called.')
StdOutputParams.__init__(self, oconfig)
CreateMakeDependencies.__init__(self)
def cmad_topic_continuum_pre(self, _):
tracer.debug('Called.')
CreateMakeDependencies.write_reqs_dep(self._cmad_file, self._output_filename)
def topic_continuum_sort(self, vcs_commit_ids, topic_sets):
return [topic_sets[vcs_commit_ids[(- 1)].get_commit()]]
def requirement_set_pre(self, requirement_set):
rval = Statistics.get_units_sprint(requirement_set, self._start_date, self._end_date)
Statistics.output_stat_files(self._output_filename, self._start_date, rval) |
def gen_dim_calculator(dim_info: DimInfo, is_ptr: bool) -> str:
prefix = ('*' if is_ptr else '')
if (dim_info.source == Source.INPUT):
if (dim_info.tensor_idx == 0):
prefix += 'a_dim'
else:
assert (dim_info.tensor_idx == 1), f'Unsupported gemm dim: {dim_info}'
prefix += 'b_dim'
else:
assert ((dim_info.source == Source.OUTPUT) and (dim_info.tensor_idx == 0)), f'Unsupported gemm dim: {dim_info}'
prefix += 'c_dim'
dim_names = [((('(' + prefix) + str(idx)) + ')') for idx in dim_info.dim_idx]
return ' * '.join(dim_names) |
class OefSearchHandler(Handler):
SUPPORTED_PROTOCOL = OefSearchMessage.protocol_id
def setup(self) -> None:
def handle(self, message: Message) -> None:
oef_search_msg = cast(OefSearchMessage, message)
oef_search_dialogues = cast(OefSearchDialogues, self.context.oef_search_dialogues)
oef_search_dialogue = cast(Optional[OefSearchDialogue], oef_search_dialogues.update(oef_search_msg))
if (oef_search_dialogue is None):
self._handle_unidentified_dialogue(oef_search_msg)
return
if (oef_search_msg.performative is OefSearchMessage.Performative.OEF_ERROR):
self._handle_error(oef_search_msg, oef_search_dialogue)
elif (oef_search_msg.performative is OefSearchMessage.Performative.SEARCH_RESULT):
self._handle_search(oef_search_msg)
else:
self._handle_invalid(oef_search_msg, oef_search_dialogue)
def _handle_unidentified_dialogue(self, oef_search_msg: OefSearchMessage) -> None:
self.context.logger.info('received invalid oef_search message={}, unidentified dialogue.'.format(oef_search_msg))
def _handle_error(self, oef_search_msg: OefSearchMessage, oef_search_dialogue: OefSearchDialogue) -> None:
self.context.logger.info('received oef_search error message={} in dialogue={}.'.format(oef_search_msg, oef_search_dialogue))
def _handle_search(self, oef_search_msg: OefSearchMessage) -> None:
agents = list(oef_search_msg.agents)
nb_agents = len(agents)
if (nb_agents == 0):
self.context.logger.info(f'no agents found, search_response={oef_search_msg}')
return
strategy = cast(Strategy, self.context.strategy)
self.context.logger.info(f'found number of agents={nb_agents}, search_response={oef_search_msg}')
if (self.context.shared_state.get(strategy.shared_storage_key, None) is None):
self.context.shared_state[strategy.shared_storage_key] = set()
for agent in agents:
self.context.shared_state[strategy.shared_storage_key].add(agent)
def _handle_invalid(self, oef_search_msg: OefSearchMessage, oef_search_dialogue: OefSearchDialogue) -> None:
self.context.logger.warning('cannot handle oef_search message of performative={} in dialogue={}.'.format(oef_search_msg.performative, oef_search_dialogue))
def teardown(self) -> None: |
def extractReondellWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class FilterAction(Action):
metadata = Instance(Metadata)
mayavi = Instance('mayavi.plugins.script.Script')
enabled = False
def __init__(self, **traits):
super(FilterAction, self).__init__(**traits)
self.mayavi.engine.on_trait_change(self._update_enabled, ['current_selection', 'current_object'])
def perform(self, event):
callable = self.metadata.get_callable()
obj = callable()
mv = self.mayavi
mv.add_filter(obj)
mv.engine.current_selection = obj
def _update_enabled(self, obj):
if isinstance(obj, PipelineBase):
e = obj.menu_helper.check_active(self.metadata)
self.enabled = e
else:
self.enabled = False
def _mayavi_default(self):
return get_imayavi(self.window) |
def test_simple_renaming_with_arguments(renaming_graph, arg1, arg2, variable_v, variable_v_new, variable_u, variable_u_new, variable_x, variable_x_new, variable_y, variable_y_new):
(task, interference_graph) = renaming_graph
simple_variable_renamer = SimpleVariableRenamer(task, interference_graph)
arg1_new = ([Variable('arg1', Integer.int32_t())] + [Variable(f'arg1_{i}', Integer.int32_t()) for i in range(1, 6)])
arg2_new = ([Variable('arg2', Integer.int32_t())] + [Variable(f'arg2_{i}', Integer.int32_t()) for i in range(1, 6)])
assert (simple_variable_renamer.renaming_map == {arg1[0]: arg1_new[0], arg2[0]: arg2_new[0], arg2[2]: arg2_new[2], arg2[3]: arg2_new[3], arg2[4]: arg2_new[4], variable_v[1]: variable_v_new[1], variable_u[2]: variable_u_new[2], variable_x[2]: variable_x_new[2], variable_v[2]: variable_v_new[2], variable_u[5]: variable_u_new[5], variable_y[1]: variable_y_new[1]}) |
def test_valid_schema_decimal():
spark_schema = foundry_schema_to_spark_schema({'fieldSchemaList': [{'type': 'DECIMAL', 'name': 'price', 'nullable': None, 'userDefinedTypeClass': None, 'customMetadata': {}, 'arraySubtype': None, 'precision': 17, 'scale': 2, 'mapKeyType': None, 'mapValueType': None, 'subSchemas': None}]})
assert (spark_schema == StructType([StructField('price', DecimalType(17, 2), True, {})])) |
def check_tflite_gcs_format(model, validation_error=None):
assert (model.validation_error == validation_error)
assert (model.published is False)
assert model.model_format.model_source.gcs_tflite_uri.startswith('gs://')
if validation_error:
assert (model.model_format.size_bytes is None)
assert (model.model_hash is None)
else:
assert (model.model_format.size_bytes is not None)
assert (model.model_hash is not None) |
def test_replace_version():
assert (replace_version('saas_config:\n version: 0.0.1\n key: example', '0.0.2') == 'saas_config:\n version: 0.0.2\n key: example')
assert (replace_version('saas_config:\n version: 0.0.1\n key: example', '0.0.2') == 'saas_config:\n version: 0.0.2\n key: example')
(replace_version('saas_config:\n key: example', '1.0.0') == 'saas_config:\n key: example')
assert (replace_version('saas_config:\n version: 0.0.1\n key: example\n other_version: 0.0.2', '0.0.3') == 'saas_config:\n version: 0.0.3\n key: example\n other_version: 0.0.2') |
def update_number(num: str, delta: float, precision=3):
try:
fmt = (('%.' + str(precision)) + 'f')
value = (float(num) + delta)
neg = (value < 0)
result = (fmt % abs(value))
result = result.rstrip('0').rstrip('.')
if (((num[0] == '.') or (num[0:2] == '-.')) and (result[0] == '0')):
result = result[1:]
return ('-{0}'.format(result) if neg else result)
except:
return None |
.integration_postgres
.integration
class TestPostgresConnector():
def test_postgres_db_connector(self, api_client: TestClient, db: Session, generate_auth_header, connection_config, postgres_integration_db, postgres_example_secrets) -> None:
connector = get_connector(connection_config)
assert (connector.__class__ == PostgreSQLConnector)
client = connector.client()
assert (client.__class__ == Engine)
assert (connector.test_connection() == ConnectionTestStatus.succeeded)
connection_config.secrets = {'url': str(URL.create('postgresql', username=postgres_example_secrets['username'], password=postgres_example_secrets['password'], host=postgres_example_secrets['host'], database=postgres_example_secrets['dbname']))}
connection_config.save(db)
connector = get_connector(connection_config)
assert (connector.test_connection() == ConnectionTestStatus.succeeded)
connection_config.secrets = {'host': 'bad_host'}
connection_config.save(db)
connector = get_connector(connection_config)
with pytest.raises(ConnectionException):
connector.test_connection() |
(no_gui_test_assistant, 'No GuiTestAssistant')
class TestConfirm(unittest.TestCase, GuiTestAssistant):
def setUp(self):
GuiTestAssistant.setUp(self)
def tearDown(self):
GuiTestAssistant.tearDown(self)
(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_extras(self):
tester = ModalDialogTester((lambda : confirm(None, 'message', default=NO, no_label='Reject', yes_label='Confirm', informative='informative', detail='detail')))
tester.open_and_run(when_opened=(lambda x: x.close(accept=True)))
self.assertEqual(tester.result, OK)
(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_reject(self):
tester = ModalDialogTester((lambda : confirm(None, 'message', cancel=True)))
tester.open_and_run(when_opened=(lambda x: x.close(accept=False)))
self.assertEqual(tester.result, CANCEL)
(is_pyqt5, "Confirmation dialog click tests don't work on pyqt5.")
(is_pyqt4_linux, "Confirmation dialog click tests don't work reliably on linux. Issue #282.")
(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_yes(self):
tester = ModalDialogTester((lambda : confirm(None, 'message')))
tester.open_and_wait(when_opened=(lambda x: x.click_button(YES)))
self.assertEqual(tester.result, YES)
(is_pyqt5, "Confirmation dialog click tests don't work on pyqt5.")
(is_pyqt4_linux, "Confirmation dialog click tests don't work reliably on linux. Issue #282.")
(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_no(self):
tester = ModalDialogTester((lambda : confirm(None, 'message')))
tester.open_and_wait(when_opened=(lambda x: x.click_button(NO)))
self.assertEqual(tester.result, NO)
(is_pyqt5, "Confirmation dialog click tests don't work on pyqt5.")
(is_pyqt4_linux, "Confirmation dialog click tests don't work reliably on linux. Issue #282.")
(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_cancel(self):
tester = ModalDialogTester((lambda : confirm(None, 'message', cancel=True)))
tester.open_and_wait(when_opened=(lambda x: x.click_button(CANCEL)))
self.assertEqual(tester.result, CANCEL)
(is_pyqt5, "Confirmation dialog click tests don't work on pyqt5.")
(is_pyqt4_linux, "Confirmation dialog click tests don't work reliably on linux. Issue #282.")
(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_title(self):
tester = ModalDialogTester((lambda : confirm(None, 'message', title='Title')))
tester.open_and_run(when_opened=(lambda x: x.click_button(NO)))
self.assertEqual(tester.result, NO)
(is_pyqt5, "Confirmation dialog click tests don't work on pyqt5.")
(is_pyqt4_linux, "Confirmation dialog click tests don't work reliably on linux. Issue #282.")
(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_default_yes(self):
tester = ModalDialogTester((lambda : confirm(None, 'message', default=YES)))
tester.open_and_run(when_opened=(lambda x: x.click_button(YES)))
self.assertEqual(tester.result, YES)
(is_pyqt5, "Confirmation dialog click tests don't work on pyqt5.")
(is_pyqt4_linux, "Confirmation dialog click tests don't work reliably on linux. Issue #282.")
(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_default_cancel(self):
tester = ModalDialogTester((lambda : confirm(None, 'message', cancel=True, default=YES)))
tester.open_and_run(when_opened=(lambda x: x.click_button(CANCEL)))
self.assertEqual(tester.result, CANCEL) |
def extractMulotranslationsWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Aiming For Harem Queen in Different World', 'Aiming For Harem Queen in Different World', 'translated'), ('TS Reincarnated as the sub heroine', 'TS Reincarnated as the Sub Heroine.', 'translated'), ('I was just an only child boy, Now I became one of a four quadruplet sisters.', 'I was just an only child boy, Now I became one of a four quadruplet sisters.', 'translated'), ('When I became a Girl, an Unexpected Love Quarrel Occurred!', 'When I became a Girl, an Unexpected Love Quarrel Occurred!', 'translated'), ('They Said My Status Stayed the Same Even Though I Reincarnated in Another World!?', 'They Said My Status Stayed the Same Even Though I Reincarnated in Another World!?', 'translated'), ('emergency adaptation for a male high school put into ts.', 'emergency adaptation for a male high school put into ts.', 'translated'), ('the struggles of a young ts yuki-onna.', 'the struggles of a young ts yuki-onna.', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def fortios_certificate(data, fos, check_mode):
fos.do_member_operation('certificate', 'ca')
if data['certificate_ca']:
resp = certificate_ca(data, fos, check_mode)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'certificate_ca'))
if check_mode:
return resp
return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {}) |
.parametrize('comparison, expected_value', maxauthtries_fail_params)
def test_integration_audit_sshd_config_option_fail_maxauthtries(setup_sshd_config, expected_value, comparison):
state = CISAudit().audit_sshd_config_option(parameter='maxauthtries', expected_value=expected_value, comparison=comparison)
assert (state == 2) |
class conv3d(Operator):
def __init__(self, stride, pad, dilate=1, group=1) -> None:
super().__init__()
self._attrs['op'] = 'conv3d'
self._attrs['stride'] = stride
if isinstance(stride, int):
self._attrs['stride'] = (stride, stride, stride)
self._attrs['pad'] = pad
if isinstance(pad, int):
self._attrs['pad'] = (pad, pad, pad)
self._attrs['dilate'] = dilate
if isinstance(dilate, int):
self._attrs['dilate'] = (dilate, dilate, dilate)
self._attrs['group'] = group
self._attrs['has_profiler'] = True
self._attrs['epilogue_alignment'] = 1
self._attrs['epilogue'] = 'LinearCombination'
self._attrs['workspace'] = 0
self._attrs['split_k'] = None
self.shape_eval_template = SHAPE_FUNC_TEMPLATE
self.shape_save_template = SHAPE_ASSIGNMENT_TEMPLATE
self.exec_key_template = EXEC_KEY_TEMPLATE
self.exec_dyn_key_template = EXEC_DYN_KEY_TEMPLATE
self.exec_cond_template = EXEC_COND_TEMPLATE
def _infer_shape(self, x: List[int], w: List[int]) -> List[int]:
if (x[4] != (w[4] * self._attrs['group'])):
raise RuntimeError('X/W Shape mismatch for conv3d')
eval_func = self.shape_eval_template.render(indent='', dtype='', div='//', stride_d=self._attrs['stride'][0], stride_h=self._attrs['stride'][1], stride_w=self._attrs['stride'][2], pad_d=self._attrs['pad'][0], pad_h=self._attrs['pad'][1], pad_w=self._attrs['pad'][2], dilate_d=self._attrs['dilate'][0], dilate_h=self._attrs['dilate'][1], dilate_w=self._attrs['dilate'][2], x_dim0=x[0], x_dim1=x[1], x_dim2=x[2], x_dim3=x[3], x_dim4=x[4], w_dim0=w[0], w_dim1=w[1], w_dim2=w[2], w_dim3=w[3])
output = {}
exec(eval_func, output)
return [int(output['NO']), int(output['DO']), int(output['HO']), int(output['WO']), int(output['CO'])]
def _infer_shapes(self, x: Tensor, w: Tensor) -> List[int]:
x_shape_values = [var._attrs['values'] for var in x._attrs['shape']]
x_shapes = itertools.product(*x_shape_values)
w_shape = [var._attrs['values'][0] for var in w._attrs['shape']]
self._attrs['CO'] = w_shape[0]
self._attrs['KD'] = w_shape[1]
self._attrs['KH'] = w_shape[2]
self._attrs['KW'] = w_shape[3]
y_shapes = []
for x_shape in x_shapes:
y_shape = self._infer_shape(x_shape, w_shape)
y_shapes.append(y_shape)
def unique(vector):
return sorted(set(vector))
output_shape = [x._attrs['shape'][0], shape_utils.gen_int_var(unique([d[1] for d in y_shapes])), shape_utils.gen_int_var(unique([d[2] for d in y_shapes])), shape_utils.gen_int_var(unique([d[3] for d in y_shapes])), shape_utils.gen_int_var(unique([d[4] for d in y_shapes]))]
return output_shape
def _invert_exec_key(self, key):
tmp = re.findall('(\\d+)', key)
return [int(x) for x in tmp]
def _gen_exec_key(self, shape: List[int]):
return self.exec_key_template.render(x_dim0=shape[0], x_dim1=shape[1], x_dim2=shape[2], x_dim3=shape[3], x_dim4=shape[4]).replace('\n', '')
def _gen_dyn_exec_key(self, dim0_lb, dim0_ub, dim1_lb, dim1_ub, dim2_lb, dim2_ub, dim3_lb, dim3_ub, dim4):
return self.exec_dyn_key_template.render(x_dim0_lb=dim0_lb, x_dim0_ub=dim0_ub, x_dim1_lb=dim1_lb, x_dim1_ub=dim1_ub, x_dim2_lb=dim2_lb, x_dim2_ub=dim2_ub, x_dim3_lb=dim3_lb, x_dim3_ub=dim3_ub, x_dim4=dim4).replace('\n', '')
def _extract_exec_path(self, x: Tensor):
x_shape_values = [var._attrs['values'] for var in x._attrs['shape']]
self._attrs['dim_lower_bounds'] = [min(vals) for vals in x_shape_values]
x_shape_values = ([x_shape_values[0]] + [[max(vs)] for vs in x_shape_values[1:]])
x_shapes = itertools.product(*x_shape_values)
self._attrs['exec_path'] = OrderedDict()
for x_shape in x_shapes:
key = self._gen_exec_key(x_shape)
self._attrs['exec_path'][key] = ''
def _signature(self):
signature = 'conv3d: K=[{kd}, {kh}, {kw}], S=[{sd}, {sh}, {sw}], P=[{pd}, {ph}, {pw}], CO=[{co}]'.format(kd=self._attrs['KD'], kh=self._attrs['KH'], kw=self._attrs['KW'], sd=self._attrs['stride'][0], sh=self._attrs['stride'][1], sw=self._attrs['stride'][2], pd=self._attrs['pad'][0], ph=self._attrs['pad'][1], pw=self._attrs['pad'][2], co=self._attrs['CO'])
return signature
def _extract_epilogue_alignment(self, output_shape: List[IntVar]) -> None:
epilogue_dim = output_shape[(- 1)]
if (not isinstance(epilogue_dim, IntImm)):
raise RuntimeError('Conv output last dimension must be static!')
self._attrs['epilogue_alignment'] = alignment.find_max_alignment(number=epilogue_dim._attrs['values'][0], dtype=self._attrs['inputs'][0]._attrs['dtype'])
def __call__(self, x: Tensor, w: Tensor) -> List[Tensor]:
self._attrs['inputs'] = [x, w]
self._set_depth()
output_shape = self._infer_shapes(x, w)
self._extract_exec_path(x)
self._extract_epilogue_alignment(output_shape)
output = Tensor(output_shape, src_ops={self}, dtype=x._attrs['dtype'])
self._attrs['outputs'] = [output]
return output
def _get_op_attributes(self) -> Dict[(str, Any)]:
target_attrs = ['dilate', 'group', 'pad', 'stride']
attr = {}
for target_attr in target_attrs:
if (target_attr in self._attrs):
attr[target_attr] = self._attrs[target_attr]
return attr
def _should_build_profiler(self) -> bool:
force_cache = environ.force_profiler_cache()
if self._has_dynamic_input_dims():
if force_cache:
raise RuntimeError('We cannot force to use the cache as dynamic dims require us to generate and build the profilers')
return True
if force_cache:
return False
target = backend.target.Target.current()
workloads = list(self._attrs['exec_path'].keys())
build_profiler = True
if (not target.use_dummy_profiling_results()):
tmp_key = next(iter(self._attrs['op_instance'].keys()))
tmp_op = self._attrs['op_instance'][tmp_key]
build_profiler = False
for wkl in workloads:
exec_entry_sha1 = sha1(wkl.encode('utf-8')).hexdigest()
split_k = (1 if (self._attrs['split_k'] is None) else self._attrs['split_k'])
query = Conv3dQueryEntry(dtype_a=(tmp_op.A.element.value - 1), dtype_b=(tmp_op.B.element.value - 1), dtype_c=(tmp_op.C.element.value - 1), dtype_acc=(tmp_op.tile_description.math_instruction.element_accumulator.value - 1), major_a=tmp_op.A.layout.value, major_b=tmp_op.B.layout.value, major_c=tmp_op.C.layout.value, kd=self._attrs['KD'], kh=self._attrs['KH'], kw=self._attrs['KW'], co=self._attrs['CO'], stride_d=self._attrs['stride'][0], stride_h=self._attrs['stride'][1], stride_w=self._attrs['stride'][2], pad_d=self._attrs['pad'][0], pad_h=self._attrs['pad'][1], pad_w=self._attrs['pad'][2], dilate_d=self._attrs['dilate'][0], dilate_h=self._attrs['dilate'][1], dilate_w=self._attrs['dilate'][2], op_type=self._attrs['op'], device=target._arch, epilogue=tmp_op.epilogue_functor.value, split_k=split_k, exec_entry_sha1=exec_entry_sha1)
cache_value = target.query_profile_cache('conv3d', query.__dict__)
if ((cache_value is not None) and (not target.force_profile())):
_LOGGER.info(f"Load profiling result for {self._attrs['name']} from cache: {cache_value}")
(best_algo, workspace) = cache_value
self._attrs['exec_path'][wkl] = best_algo
self._attrs['workspace'] = max(self._attrs['workspace'], workspace)
else:
build_profiler = True
return build_profiler
def gen_profiler(self, workdir: str=None, dynamic_profiling_strategy=DynamicProfileStrategy.HINTS) -> None:
target = backend.target.Target.current()
func_key = '{target}.{op}.config'.format(target=target.name(), op=self._attrs['op'])
func = registry.get(func_key)
func(self._attrs, dtype=self._attrs['inputs'][0]._attrs['dtype'])
if self._should_build_profiler():
x_shapes = [self._invert_exec_key(exec_key) for exec_key in self._attrs['exec_path']]
self._attrs['op_instance'] = filter_op_instances(func_attrs=self._attrs, x_shapes=x_shapes)
return generate_profiler_sources(func_attrs=self._attrs, op_class='conv3d', workdir=workdir, shape_template=self.shape_eval_template)
def _gen_profile_cmd(self, profiler_prefix, cfg, x_shape):
exe_path = os.path.join(profiler_prefix, cfg)
if (not os.access(exe_path, os.X_OK)):
raise RuntimeError(('Profiler %s is not executable' % exe_path))
cmd = [exe_path]
cmd.append(x_shape[0])
cmd.append(x_shape[1])
cmd.append(x_shape[2])
cmd.append(x_shape[3])
cmd.append(x_shape[4])
cmd.append(self._attrs['KD'])
cmd.append(self._attrs['KH'])
cmd.append(self._attrs['KW'])
cmd.append(self._attrs['CO'])
cmd.append(self._attrs['stride'][0])
cmd.append(self._attrs['stride'][1])
cmd.append(self._attrs['stride'][2])
cmd.append(self._attrs['pad'][0])
cmd.append(self._attrs['pad'][1])
cmd.append(self._attrs['pad'][2])
cmd.append(self._attrs['dilate'][0])
cmd.append(self._attrs['dilate'][1])
cmd.append(self._attrs['dilate'][2])
cmd.append(self._attrs['group'])
command = [str(x) for x in cmd]
return command
def _profile_single_workload(self, profiler_prefix, exec_key, devices, force_cache):
target = backend.target.Target.current()
tmp_key = next(iter(self._attrs['op_instance'].keys()))
tmp_op = self._attrs['op_instance'][tmp_key]
exec_entry_sha1 = sha1(exec_key.encode('utf-8')).hexdigest()
split_k = (1 if (self._attrs['split_k'] is None) else self._attrs['split_k'])
query = Conv3dQueryEntry(dtype_a=(tmp_op.A.element.value - 1), dtype_b=(tmp_op.B.element.value - 1), dtype_c=(tmp_op.C.element.value - 1), dtype_acc=(tmp_op.tile_description.math_instruction.element_accumulator.value - 1), major_a=tmp_op.A.layout.value, major_b=tmp_op.B.layout.value, major_c=tmp_op.C.layout.value, kd=self._attrs['KD'], kh=self._attrs['KH'], kw=self._attrs['KW'], co=self._attrs['CO'], stride_d=self._attrs['stride'][0], stride_h=self._attrs['stride'][1], stride_w=self._attrs['stride'][2], pad_d=self._attrs['pad'][0], pad_h=self._attrs['pad'][1], pad_w=self._attrs['pad'][2], dilate_d=self._attrs['dilate'][0], dilate_h=self._attrs['dilate'][1], dilate_w=self._attrs['dilate'][2], op_type=self._attrs['op'], device=target._arch, epilogue=tmp_op.epilogue_functor.value, split_k=split_k, exec_entry_sha1=exec_entry_sha1)
cache_value = target.query_profile_cache('conv3d', query.__dict__)
if ((cache_value is not None) and (not target.force_profile())):
_LOGGER.info('Load profiling result from cache.')
return cache_value
if ((cache_value is None) and force_cache):
op_type = self._attrs['op']
raise RuntimeError('force_cache is enabled but we could not find the following cache ', f'available on device target._arch={target._arch!r}, op_type={op_type!r}, exec_entry_sha1={exec_entry_sha1!r}')
if target.use_dummy_profiling_results():
op_type = self._attrs['op']
raise Exception('This is a CI run but we could not find the following cache ', f'''available on device {target._arch}
''', f'''{op_type} {exec_entry_sha1}.
''', 'To bypass, you need to make it available in the db table.')
profiler_filename = get_profiler_filename(self._attrs, 'conv3d')
runner = backend.profiler_runner.Runner(devices, self._attrs['name'])
x_shape = self._invert_exec_key(exec_key)
command = self._gen_profile_cmd(profiler_prefix, profiler_filename, x_shape)
runner.push(profiler_filename, command)
runner.join()
result = runner.pull()
if (len(result) == 0):
raise RuntimeError(f'Profile workload: {exec_key} failed. Results: {result}.')
out = min(result, key=itemgetter(1))
best_algo = out[1].op_config
workspace = out[1].workspace
cache_record = Conv3dRecordEntry(exec_entry=exec_key, exec_entry_sha1=exec_entry_sha1, dtype_a=(tmp_op.A.element.value - 1), dtype_b=(tmp_op.B.element.value - 1), dtype_c=(tmp_op.C.element.value - 1), dtype_acc=(tmp_op.tile_description.math_instruction.element_accumulator.value - 1), major_a=tmp_op.A.layout.value, major_b=tmp_op.B.layout.value, major_c=tmp_op.C.layout.value, kd=self._attrs['KD'], kh=self._attrs['KH'], kw=self._attrs['KW'], co=self._attrs['CO'], stride_d=self._attrs['stride'][0], stride_h=self._attrs['stride'][1], stride_w=self._attrs['stride'][2], pad_d=self._attrs['pad'][0], pad_h=self._attrs['pad'][1], pad_w=self._attrs['pad'][2], dilate_d=self._attrs['dilate'][0], dilate_h=self._attrs['dilate'][1], dilate_w=self._attrs['dilate'][2], op_type=self._attrs['op'], epilogue=tmp_op.epilogue_functor.value, device=target._arch, algo=best_algo, workspace=workspace, split_k=split_k)
Target.current().insert_profile_cache('conv3d', cache_record.__dict__)
return (best_algo, workspace)
def _has_dynamic_input_dims(self):
for input_tensor in self._attrs['inputs']:
for dim in input_tensor._attrs['shape']:
if (not isinstance(dim, IntImm)):
return True
return False
def profile(self, workdir='./', devices=None, dynamic_profiling_strategy=DynamicProfileStrategy.HINTS):
if (devices is None):
devices = [0]
self._profile_static(workdir, devices)
if self._has_dynamic_input_dims():
if (dynamic_profiling_strategy != DynamicProfileStrategy.HINTS):
raise NotImplementedError('conv3d only supports HINTS dynamic profiling strategy for now! Current strategy: {}'.format(dynamic_profiling_strategy))
self._profile_dynamic_dim(workdir)
def _profile_static(self, workdir, devices):
workloads = list(self._attrs['exec_path'].keys())
profiler_prefix = os.path.join(workdir, 'profiler', self._attrs['op'])
target = backend.target.Target.current()
if (('op_instance' not in self._attrs) or (len(self._attrs['op_instance']) == 0)):
func_key = '{target}.{op}.config'.format(target=target.name(), op=self._attrs['op'])
func = registry.get(func_key)
func(self._attrs, dtype=self._attrs['inputs'][0]._attrs['dtype'])
force_cache = environ.force_profiler_cache()
for wkl in workloads:
_LOGGER.info('Profile: {name}: {wkl}'.format(name=self._attrs['name'], wkl=wkl))
if (target.use_dummy_profiling_results() and (not force_cache)):
algo = target.select_minimal_algo(list(self._attrs['op_instance'].keys()))
_LOGGER.info(f'Select minimal algo {algo} for CI')
self._attrs['exec_path'][wkl] = algo
self._attrs['workspace'] = 102400
elif (self._attrs['exec_path'][wkl] == ''):
(best_algo, workspace) = self._profile_single_workload(profiler_prefix, wkl, devices, force_cache)
self._attrs['exec_path'][wkl] = best_algo
self._attrs['workspace'] = max(self._attrs['workspace'], workspace)
def _profile_dynamic_dim(self, workdir):
def _extract_dynamic_dim(exec_keys):
_LOGGER.info('ONLY SUPPORT DYNAMIC BATCH (dim0)!')
var_dims = [[], [], [], [], []]
for key in exec_keys:
dims = self._invert_exec_key(key)
for (i, v) in enumerate(dims):
var_dims[i].append(v)
return var_dims
dim_lbs = self._attrs['dim_lower_bounds']
dims = _extract_dynamic_dim(self._attrs['exec_path'].keys())
dim0_lb = dim_lbs[0]
dim1_lb = dim_lbs[1]
dim2_lb = dim_lbs[2]
dim3_lb = dim_lbs[3]
dim1_ub = dims[1][0]
dim2_ub = dims[2][0]
dim3_ub = dims[3][0]
dim4 = dims[4][0]
num_exec_path = len(self._attrs['exec_path'])
if (num_exec_path < 1):
return
algos = list(self._attrs['exec_path'].values())
if ((num_exec_path == 1) or (len(set(algos)) <= 1)):
new_exec_paths = OrderedDict()
dim0_ub = max(dims[0])
new_key = self._gen_dyn_exec_key(dim0_lb, dim0_ub, dim1_lb, dim1_ub, dim2_lb, dim2_ub, dim3_lb, dim3_ub, dim4)
new_exec_paths[new_key] = algos[0]
self._attrs['exec_path'] = new_exec_paths
return
target = backend.target.Target.current()
if target.use_dummy_profiling_results():
return
profiler_prefix = os.path.join(workdir, 'profiler', self._attrs['op'])
runner = backend.profiler_runner.Runner([0], self._attrs['name'])
regions = []
for i in range((len(dims[0]) - 1)):
regions.append([dims[0][i], dims[0][(i + 1)], algos[i], algos[(i + 1)]])
special_cases = OrderedDict()
new_exec_paths = OrderedDict()
for (lb, ub, lb_algo, ub_algo) in regions:
mid = ((lb + ub) // 2)
origin_lb = lb
origin_ub = ub
last_mid = mid
while ((mid > lb) and (mid < ub)):
mid = ((lb + ub) // 2)
mid_shape = [mid, dim1_ub, dim2_ub, dim3_ub, dim4]
_LOGGER.info('current: lb_algo: {lb_algo}, LB:{lb} MID:{mid} UB:{ub}'.format(lb_algo=lb_algo, lb=lb, mid=mid, ub=ub))
profiler_filename = get_profiler_filename(self._attrs, 'conv3d')
profiler_cmd = self._gen_profile_cmd(profiler_prefix, profiler_filename, mid_shape)
runner.push(idx=profiler_filename, cmd=profiler_cmd, return_ops=[str(lb_algo), str(ub_algo)])
runner.join()
result = runner.pull()
result_dict = {res.op_config: res for res in result[0][1]}
assert (len(result_dict) >= 1)
if (len(result_dict) == 1):
assert (str(ub_algo) not in result_dict)
lb = (mid + 1)
else:
lb_time = result_dict[str(lb_algo)].duration
ub_time = result_dict[str(ub_algo)].duration
if (lb_time < ub_time):
lb = (mid + 1)
else:
ub = (mid - 1)
last_mid = mid
mid = ((lb + ub) // 2)
lo_region_key = self._gen_dyn_exec_key(origin_lb, last_mid, dim1_lb, dim1_ub, dim2_lb, dim2_ub, dim3_lb, dim3_ub, dim4)
up_region_key = self._gen_dyn_exec_key(last_mid, origin_ub, dim1_lb, dim1_ub, dim2_lb, dim2_ub, dim3_lb, dim3_ub, dim4)
new_exec_paths[lo_region_key] = lb_algo
new_exec_paths[up_region_key] = ub_algo
special_cases.update(new_exec_paths)
self._attrs['exec_path'] = special_cases
def gen_function(self) -> str:
target = backend.target.Target.current()
op_name = self._attrs['op']
func_key = '{target}.{op}.gen_function'.format(target=target.name(), op=op_name)
func = registry.get(func_key)
return func(self._attrs, self.exec_cond_template, self.shape_eval_template, self.shape_save_template) |
class FacetSplitPC(PCBase):
needs_python_pmat = False
_prefix = 'facet_'
_permutation_cache = {}
def get_permutation(self, V, W):
key = (V, W)
if (key not in self._permutation_cache):
indices = get_permutation_map(V, W)
if V._comm.allreduce(numpy.all((indices[:(- 1)] <= indices[1:])), MPI.PROD):
self._permutation_cache[key] = None
else:
self._permutation_cache[key] = indices
return self._permutation_cache[key]
def initialize(self, pc):
from finat.ufl import RestrictedElement, MixedElement, TensorElement, VectorElement
from firedrake import FunctionSpace, TestFunctions, TrialFunctions
from firedrake.assemble import allocate_matrix, TwoFormAssembler
(_, P) = pc.getOperators()
appctx = self.get_appctx(pc)
fcp = appctx.get('form_compiler_parameters')
prefix = pc.getOptionsPrefix()
options_prefix = (prefix + self._prefix)
options = PETSc.Options(options_prefix)
mat_type = options.getString('mat_type', 'submatrix')
if (P.getType() == 'python'):
ctx = P.getPythonContext()
a = ctx.a
bcs = tuple(ctx.row_bcs)
else:
ctx = dmhooks.get_appctx(pc.getDM())
a = (ctx.Jp or ctx.J)
bcs = tuple(ctx._problem.bcs)
V = a.arguments()[(- 1)].function_space()
assert (len(V) == 1), 'Interior-facet decomposition of mixed elements is not supported'
def restrict(ele, restriction_domain):
if isinstance(ele, VectorElement):
return type(ele)(restrict(ele._sub_element, restriction_domain), dim=ele.num_sub_elements)
elif isinstance(ele, TensorElement):
return type(ele)(restrict(ele._sub_element, restriction_domain), shape=ele._shape, symmetry=ele._symmety)
else:
return RestrictedElement(ele, restriction_domain)
W = FunctionSpace(V.mesh(), MixedElement([restrict(V.ufl_element(), d) for d in ('interior', 'facet')]))
assert (W.dim() == V.dim()), 'Dimensions of the original and decomposed spaces do not match'
mixed_operator = a(sum(TestFunctions(W)), sum(TrialFunctions(W)), coefficients={})
mixed_bcs = tuple((bc.reconstruct(V=W[(- 1)], g=0) for bc in bcs))
self.perm = None
self.iperm = None
indices = self.get_permutation(V, W)
if (indices is not None):
self.perm = PETSc.IS().createGeneral(indices, comm=V._comm)
self.iperm = self.perm.invertPermutation()
if (mat_type != 'submatrix'):
self.mixed_op = allocate_matrix(mixed_operator, bcs=mixed_bcs, form_compiler_parameters=fcp, mat_type=mat_type, options_prefix=options_prefix)
self._assemble_mixed_op = TwoFormAssembler(mixed_operator, tensor=self.mixed_op, form_compiler_parameters=fcp, bcs=mixed_bcs).assemble
self._assemble_mixed_op()
mixed_opmat = self.mixed_op.petscmat
def _permute_nullspace(nsp):
if (not (nsp.handle and self.iperm)):
return nsp
vecs = [vec.duplicate() for vec in nsp.getVecs()]
for vec in vecs:
vec.permute(self.iperm)
return PETSc.NullSpace().create(constant=nsp.hasConstant(), vectors=vecs, comm=nsp.getComm())
mixed_opmat.setNullSpace(_permute_nullspace(P.getNullSpace()))
mixed_opmat.setNearNullSpace(_permute_nullspace(P.getNearNullSpace()))
mixed_opmat.setTransposeNullSpace(_permute_nullspace(P.getTransposeNullSpace()))
elif self.perm:
self._permute_op = partial(PETSc.Mat().createSubMatrixVirtual, P, self.iperm, self.iperm)
mixed_opmat = self._permute_op()
else:
mixed_opmat = P
scpc = PETSc.PC().create(comm=pc.comm)
scpc.incrementTabLevel(1, parent=pc)
mixed_dm = W.dm
self._dm = mixed_dm
self._ctx_ref = self.new_snes_ctx(pc, mixed_operator, mixed_bcs, mat_type, fcp, options_prefix=options_prefix)
scpc.setDM(mixed_dm)
scpc.setOptionsPrefix(options_prefix)
scpc.setOperators(A=mixed_opmat, P=mixed_opmat)
with dmhooks.add_hooks(mixed_dm, self, appctx=self._ctx_ref, save=False):
scpc.setFromOptions()
self.pc = scpc
def update(self, pc):
if hasattr(self, 'mixed_op'):
self._assemble_mixed_op()
elif hasattr(self, '_permute_op'):
for mat in self.pc.getOperators():
mat.destroy()
P = self._permute_op()
self.pc.setOperators(A=P, P=P)
self.pc.setUp()
def apply(self, pc, x, y):
if self.perm:
x.permute(self.iperm)
dm = self._dm
with dmhooks.add_hooks(dm, self, appctx=self._ctx_ref):
self.pc.apply(x, y)
if self.perm:
x.permute(self.perm)
y.permute(self.perm)
def applyTranspose(self, pc, x, y):
if self.perm:
x.permute(self.iperm)
dm = self._dm
with dmhooks.add_hooks(dm, self, appctx=self._ctx_ref):
self.pc.applyTranspose(x, y)
if self.perm:
x.permute(self.perm)
y.permute(self.perm)
def view(self, pc, viewer=None):
super(FacetSplitPC, self).view(pc, viewer)
if hasattr(self, 'pc'):
viewer.printfASCII('PC using interior-facet decomposition\n')
self.pc.view(viewer)
def destroy(self, pc):
if hasattr(self, 'pc'):
if hasattr(self, '_permute_op'):
for mat in self.pc.getOperators():
mat.destroy()
self.pc.destroy()
if hasattr(self, 'iperm'):
if self.iperm:
self.iperm.destroy()
if hasattr(self, 'perm'):
if self.perm:
self.perm.destroy() |
def run(fips_dir, proj_dir, args):
sdk_name = None
if (len(args) > 0):
sdk_name = args[0]
if (sdk_name == 'emscripten'):
emsdk.install(fips_dir, None)
elif (sdk_name == 'android'):
android.setup(fips_dir)
elif (sdk_name == 'wasisdk'):
wasisdk.setup(fips_dir)
else:
log.error("invalid SDK name (must be 'emscripten' or 'android')") |
class OptionSeriesGaugeSonificationDefaultspeechoptionsMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class IGVideoCopyrightCheckMatchesInformation(AbstractObject):
def __init__(self, api=None):
super(IGVideoCopyrightCheckMatchesInformation, self).__init__()
self._isIGVideoCopyrightCheckMatchesInformation = True
self._api = api
class Field(AbstractObject.Field):
copyright_matches = 'copyright_matches'
status = 'status'
_field_types = {'copyright_matches': 'list<Object>', 'status': 'IGVideoCopyrightCheckStatus'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
class MappingForm(FlaskForm):
distro = SelectField('Distribution', [validators.DataRequired()], choices=[])
package_name = StringField('Package name', [validators.DataRequired()])
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if ('distros' in kwargs):
self.distro.choices = [(distro, distro) for distro in sorted(kwargs['distros'], key=(lambda s: s.lower()))] |
class TestExpansionNode(TestCase):
def setUp(self):
self.expansion_node = ExpansionNode(node_type='foo', name='foo', source_field='foo_id', destination_field='foo', service='foo', action='get_foo', request_field='id', response_field='foo')
def test_to_strings_with_expansions(self):
bar_expansion_node = ExpansionNode(node_type='bar', name='bar', source_field='bar_id', destination_field='bar', service='bar', action='get_bar', request_field='id', response_field='bar')
baz_expansion_node = ExpansionNode(node_type='baz', name='baz', source_field='baz_id', destination_field='baz', service='baz', action='get_baz', request_field='id', response_field='baz')
self.expansion_node.add_expansion(bar_expansion_node)
self.expansion_node.add_expansion(baz_expansion_node)
self.assertEqual(set(self.expansion_node.to_strings()), {'foo.baz', 'foo.bar'})
def test_to_strings_without_expansions(self):
self.assertEqual(self.expansion_node.to_strings(), ['foo']) |
def text_on_image(image, text, font_size, color):
img = Image.open(image)
font = ImageFont.truetype('arial.ttf', font_size)
draw = ImageDraw.Draw(img)
(iw, ih) = img.size
(fw, fh) = font.getsize(text)
draw.text((((iw - fw) / 2), ((ih - fh) / 2)), text, fill=color, font=font)
img.save('last_image.jpg') |
class JobKind():
NAME: Optional[str] = None
TYPICAL_DURATION_SECS: Optional[int] = None
REQFS_FIELDS = ['job_script', 'portal_script', 'ssh_okay']
RESFS_FIELDS = ['pidfile', 'logfile']
Request: Type[_utils.Metadata] = requests.Request
Result: Type[_utils.Metadata] = requests.Result
def set_request_fs(self, fs: 'JobRequestFS', context: str) -> None:
raise NotImplementedError
def set_work_fs(self, fs: 'JobWorkFS', context: str) -> None:
raise NotImplementedError
def set_result_fs(self, fs: 'JobResultFS', context: str) -> None:
raise NotImplementedError
def create(self, reqid: requests.ToRequestIDType, jobfs: '_job.JobFS', workerfs: '_workers.WorkerJobsFS', **req_kwargs):
raise NotImplementedError
def as_row(self, req: requests.Request):
raise NotImplementedError |
class TensorAccessor():
def __init__(self, original_tensor: Tensor) -> None:
super().__init__()
self.offset = 0
self.original_shapes = original_tensor._attrs['shape']
self.tensor_dtype = original_tensor.dtype()
self.is_contiguous = True
self.is_from_strided_tensor = False
self.stride_dim = None
self.actual_shapes = None
self.original_total_elements_from_stride_dim = None
self.actual_total_elements_from_stride_dim = None
self._dim_mapping = [([i], [i]) for i in range(len(self.original_shapes))]
def __deepcopy__(self, memo):
res = copy.copy(self)
res.original_shapes = copy.deepcopy(self.original_shapes, memo)
res.actual_shapes = copy.deepcopy(self.actual_shapes, memo)
res._dim_mapping = copy.deepcopy(self._dim_mapping, memo)
return res
def __str__(self) -> str:
return pformat(vars(self), indent=2)
def __repr__(self) -> str:
return self.__str__()
def __eq__(self, other: Any) -> bool:
if (not isinstance(other, TensorAccessor)):
return False
attrs = vars(self)
for attr in attrs:
self_attr = getattr(self, attr)
other_attr = getattr(other, attr)
if (self_attr != other_attr):
return False
return True
def _try_gen_dim_mapping(self):
self._dim_mapping = None
dim_mapping = []
original_value = None
actual_value = None
original_idx = 0
actual_idx = 0
prev_original_idx = 0
prev_actual_idx = 0
INT_MAX = int(('0x' + ('F' * 16)), 16)
original_shapes = (list(self.original_shapes) + [IntImm(INT_MAX)])
actual_shapes = (list(self.actual_shapes) + [IntImm(INT_MAX)])
while ((original_idx < len(original_shapes)) and (actual_idx < len(actual_shapes))):
original_d = original_shapes[original_idx]
actual_d = actual_shapes[actual_idx]
if ((original_d == IntImm(1)) and (original_value is None)):
original_idx += 1
dim_mapping.append((list(range(prev_original_idx, original_idx)), []))
prev_original_idx = original_idx
continue
if ((actual_d == IntImm(1)) and (actual_value is None)):
actual_idx += 1
dim_mapping.append(([], list(range(prev_actual_idx, actual_idx))))
prev_actual_idx = actual_idx
continue
if ((not isinstance(original_d, IntImm)) or (not isinstance(actual_d, IntImm))):
if ((original_d != actual_d) or (original_value is not None) or (actual_value is not None)):
return
else:
original_value = original_d
actual_value = actual_d
original_idx += 1
actual_idx += 1
elif ((original_value is None) or (actual_value is None)):
if (original_value is None):
original_value = original_d.value()
original_idx += 1
if (actual_value is None):
actual_value = actual_d.value()
actual_idx += 1
elif (original_value < actual_value):
original_value *= original_d.value()
original_idx += 1
elif (original_value > actual_value):
actual_value *= actual_d.value()
actual_idx += 1
else:
raise AssertionError('This branch should never be reached.')
if (original_value == actual_value):
dim_mapping.append((list(range(prev_original_idx, original_idx)), list(range(prev_actual_idx, actual_idx))))
prev_original_idx = original_idx
prev_actual_idx = actual_idx
original_value = None
actual_value = None
if ((original_value is not None) or (actual_value is not None) or (original_idx != len(original_shapes)) or (actual_idx != len(actual_shapes))):
_LOGGER.debug(f'tail processing failed, dim_mapping: {dim_mapping}')
return
dim_mapping = dim_mapping[:(- 1)]
self._dim_mapping = dim_mapping
_LOGGER.debug(f'generate dim_mapping: {dim_mapping}')
def try_get_stride_strs(self, dim: int, dim_names: List[str]=None) -> Optional[List[str]]:
assert (dim < len(self.original_shapes)), f"dim {dim} must be smaller than original_shapes' rank, {len(self.original_shapes)}"
if (dim_names is not None):
assert (len(dim_names) == len(self.original_shapes)), f'dim_names must have the same length as shapes, dim_names: {dim_names}, shapes: {self.original_shapes}'
def _get_value_or_names(shape: List[IntVar], indices: Iterable[int]) -> List[str]:
res = []
for index in indices:
d = shape[index]
if isinstance(d, IntImm):
res.append(str(d.value()))
else:
res.append((dim_names[index] if (dim_names is not None) else d._attrs['name']))
return res
if (self.stride_dim is None):
return _get_value_or_names(self.original_shapes, range((dim + 1), len(self.original_shapes)))
if (self._dim_mapping is None):
_LOGGER.debug('Failed to get dim mapping.')
return None
found_original_dim_group = False
res = []
for (original_group, actual_group) in self._dim_mapping:
if (not found_original_dim_group):
if (dim in original_group):
found_original_dim_group = True
idx = original_group.index(dim)
if ((self.stride_dim in actual_group) and (idx != (len(original_group) - 1))):
_LOGGER.debug(f'Multiple dims in stride_dim group. dim_mapping: {self._dim_mapping}, dim: {dim}, stride_dim: {self.stride_dim}, self: {self}')
return None
res.extend(_get_value_or_names(self.original_shapes, original_group[(idx + 1):]))
elif (self.stride_dim in actual_group):
if (actual_group.index(self.stride_dim) != 0):
_LOGGER.debug(f'Stride dim {self.stride_dim} is not the first dim of the underlying group {actual_group}.')
return None
res.extend(_get_value_or_names(self.actual_shapes, actual_group))
else:
res.extend(_get_value_or_names(self.original_shapes, original_group))
_LOGGER.debug(f'dim: {dim}, stride_dim: {self.stride_dim}, mapping: {self._dim_mapping}, stride_strs: {res}, original: {self.original_shapes}, actual: {self.actual_shapes}')
return res
def stride(self, dim: int) -> int:
for (i, d) in enumerate(self.original_shapes[(dim + 1):], (dim + 1)):
if (not isinstance(d, IntImm)):
raise RuntimeError(f'Can only calculate static stride from static dim: {i}, original shapes: {self.original_shapes}.')
strides = self.try_get_stride_strs(dim)
if (strides is None):
raise RuntimeError('Failed to get stride strs!')
stride = 1
for s in strides:
stride *= int(s)
return stride
def gen_stride_str(self, dim: int, dim_names: List[str]) -> str:
strides = self.try_get_stride_strs(dim, dim_names)
if (strides is None):
raise RuntimeError('Failed to get stride strs!')
return ' * '.join(strides)
def update_base_tensor(self, new_tensor: Tensor, stride_dim: int, stride_dim_offset: int) -> None:
assert (self.stride_dim is None), 'Tensor accessor cannot be updated once stride_dim is set!'
original_shapes = (self.actual_shapes if (self.actual_shapes is not None) else self.original_shapes)
self.actual_shapes = new_tensor._attrs['shape']
self.stride_dim = stride_dim
assert (len(self.actual_shapes) == len(original_shapes)), f'Original tensor and new tensor must have the same number of dims! Original tensor shape: {original_shapes}, new tensor shape: {self.actual_shapes}'
assert (len(self.actual_shapes) > stride_dim), f'stride_dim {stride_dim} must be less than #dims {len(self.actual_shapes)}!'
assert isinstance(original_shapes[stride_dim], IntImm), "Stride dim can't be dynamic!"
assert isinstance(self.actual_shapes[stride_dim], IntImm), "Stride dim can't be dynamic!"
self.original_total_elements_from_stride_dim = original_shapes[stride_dim].value()
self.actual_total_elements_from_stride_dim = self.actual_shapes[stride_dim].value()
self.offset = stride_dim_offset
for (original_shape, actual_shape) in zip(original_shapes[(stride_dim + 1):], self.actual_shapes[(stride_dim + 1):]):
assert isinstance(original_shape, IntImm), 'Dims after the stride dim must have static shapes! Shapes: {}'.format(original_shapes)
assert isinstance(actual_shape, IntImm), 'Dims after the stride dim must have static shapes! Shapes: {}'.format(self.actual_shapes)
assert (original_shape._attrs['values'] == actual_shape._attrs['values']), 'original shapes {} and actual shapes {} after the stride dim must be equal! '.format(original_shapes, self.actual_shapes)
value = actual_shape._attrs['values'][0]
self.original_total_elements_from_stride_dim *= value
self.actual_total_elements_from_stride_dim *= value
self.offset *= value
if ((stride_dim > 0) and (self.actual_total_elements_from_stride_dim > self.original_total_elements_from_stride_dim)):
self.is_contiguous = False
if (self.actual_total_elements_from_stride_dim > self.original_total_elements_from_stride_dim):
self.is_from_strided_tensor = True
def update_base_tensor_shape(self, new_tensor: Tensor) -> None:
assert (self.stride_dim is None), 'Tensor accessor cannot be updated once stride_dim is set!'
self.actual_shapes = new_tensor._attrs['shape']
original_dynamic_dims = {dim for dim in self.original_shapes if (not isinstance(dim, IntImm))}
new_dynamic_dims = {dim for dim in self.actual_shapes if (not isinstance(dim, IntImm))}
assert (original_dynamic_dims == new_dynamic_dims), f'Original tensor and actual tensor have different dynamic dimensions! Original tensor: {self.original_shapes}, actual tensor: {self.actual_shapes}!'
self._try_gen_dim_mapping()
def is_rightmost_dim_contiguous(self, cat_dim: int) -> bool:
num_groups = len(self._dim_mapping)
for group_idx in range(num_groups):
(original_group, actual_group) = self._dim_mapping[group_idx]
if (cat_dim in actual_group):
if actual_group.index(cat_dim):
return False
while ((group_idx < num_groups) and (not len(self._dim_mapping[group_idx][0]))):
group_idx += 1
if (group_idx >= num_groups):
return False
return True
return False |
class Top():
def __init__(self):
pass
def copy(self):
return Top()
def __hash__(self):
return hash('Top')
def __eq__(self, other):
return isinstance(other, Top)
def __str__(self):
return 'Top'
def __repr__(self):
return str(self) |
class Solution():
def findMin(self, nums: List[int]) -> int:
(start, end) = (0, (len(nums) - 1))
while (start < end):
if (start == (end - 1)):
return (nums[start] if (nums[start] < nums[end]) else nums[end])
mid = ((start + end) // 2)
if (nums[start] > nums[end]):
if (nums[mid] > nums[start]):
start = (mid + 1)
else:
end = mid
else:
return nums[start]
return nums[start] |
class OptionPlotoptionsItemSonificationTracksMappingTremoloDepth(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.