code stringlengths 281 23.7M |
|---|
.usefixtures('use_tmpdir')
def test_load_forward_model_missing_raises():
with open('CONFIG', 'w', encoding='utf-8') as f:
f.write('EXECUTABLE missing_script.sh\n')
with pytest.raises(ConfigValidationError, match='Could not find executable'):
_ = ForwardModel.from_config_file('CONFIG') |
(scope='module')
def df():
(X, y) = make_classification(n_samples=1000, n_features=6, n_redundant=2, n_clusters_per_class=1, weights=[0.5], class_sep=2, random_state=1)
colnames = [('var_' + str(i)) for i in range(6)]
X = pd.DataFrame(X, columns=colnames)
X['cat_1'] = (['A', 'B'] * int((X.shape[0] / 2)))
X['drift_1'] = [number for number in range(X.shape[0])]
X['drift_2'] = [(number / 2) for number in range(X.shape[0])]
X['drift_cat_1'] = (['A' for _ in range(int((X.shape[0] / 2)))] + ['B' for _ in range(int((X.shape[0] / 2)))])
X['drift_cat_1'] = X['drift_cat_1'].astype('category')
return X |
_renderer(wrap_type=ConflictTargetMetric)
class ConflictTargetMetricRenderer(MetricRenderer):
def render_html(self, obj: ConflictTargetMetric) -> List[BaseWidgetInfo]:
metric_result = obj.get_result()
counters = [CounterData('number of conflicts (current)', self._get_string(metric_result.number_not_stable_target, metric_result.share_not_stable_target))]
if ((metric_result.number_not_stable_target_ref is not None) and (metric_result.share_not_stable_target_ref is not None)):
counters.append(CounterData('number of conflicts (reference)', self._get_string(metric_result.number_not_stable_target_ref, metric_result.share_not_stable_target_ref)))
result = [header_text(label='Conflicts in Target'), counter(counters=counters)]
return result
def _get_string(number: int, ratio: float) -> str:
return f'{number} ({(ratio * 100)}%)' |
def main(server, username, infile, infotype):
password = getpass.getpass()
sessionkey = auth(server, username, password)
n = 0
for line in file(infile):
n += 1
(timestamp, track, artist, album, trackmbid, artistmbid, albummbid) = line.strip('\n').split('\t')
if submit(server, infotype, artist, track, sessionkey):
print(('%d: %s %s - %s' % (n, infotype, artist, track)))
else:
print(('FAILED: %s - %s' % (artist, track)))
time.sleep(0.5) |
class Migration(migrations.Migration):
dependencies = [('awards', '0088_drop_old_defc_field'), ('financial_activities', '0007_drop_old_defc_field'), ('references', '0056_use_new_defc_text_field')]
operations = [migrations.RemoveField(model_name='gtassf133balances', name='disaster_emergency_fund'), migrations.RemoveField(model_name='disasteremergencyfundcode', name='code'), migrations.RenameField(model_name='gtassf133balances', old_name='disaster_emergency_fund_temp', new_name='disaster_emergency_fund'), migrations.RenameField(model_name='disasteremergencyfundcode', old_name='code_temp', new_name='code'), migrations.AlterField(model_name='disasteremergencyfundcode', name='code', field=models.TextField(primary_key=True, serialize=False)), migrations.AlterField(model_name='gtassf133balances', name='disaster_emergency_fund', field=models.ForeignKey(blank=True, db_column='disaster_emergency_fund_code', null=True, on_delete=models.deletion.DO_NOTHING, to='references.DisasterEmergencyFundCode'))] |
class TableManager(Service, TableManagerT):
_channels: MutableMapping[(CollectionT, ChannelT)]
_changelogs: MutableMapping[(str, CollectionT)]
_tables_finalized: asyncio.Event
_tables_registed: asyncio.Event
_recovery_started: asyncio.Event
_changelog_queue: Optional[ThrowableQueue]
_pending_persisted_offsets: MutableMapping[(TP, Tuple[(StoreT, int)])]
_recovery: Optional[Recovery] = None
def __init__(self, app: AppT, **kwargs: Any) -> None:
super().__init__(**kwargs)
self.app = app
self.data: MutableMapping = {}
self._changelog_queue = None
self._channels = {}
self._changelogs = {}
self._tables_finalized = asyncio.Event()
self._tables_registered = asyncio.Event()
self._recovery_started = asyncio.Event()
self.actives_ready = False
self.standbys_ready = False
self._pending_persisted_offsets = {}
def persist_offset_on_commit(self, store: StoreT, tp: TP, offset: int) -> None:
existing_entry = self._pending_persisted_offsets.get(tp)
if (existing_entry is not None):
(_, existing_offset) = existing_entry
if (offset < existing_offset):
return
self._pending_persisted_offsets[tp] = (store, offset)
def on_commit(self, offsets: MutableMapping[(TP, int)]) -> None:
for tp in offsets:
self.on_commit_tp(tp)
def on_commit_tp(self, tp: TP) -> None:
entry = self._pending_persisted_offsets.get(tp)
if (entry is not None):
(store, offset) = entry
store.set_persisted_offset(tp, offset)
def on_rebalance_start(self) -> None:
self.actives_ready = False
self.standbys_ready = False
def on_actives_ready(self) -> None:
self.actives_ready = True
def on_standbys_ready(self) -> None:
self.standbys_ready = True
def __hash__(self) -> int:
return object.__hash__(self)
def changelog_topics(self) -> Set[str]:
return set(self._changelogs.keys())
def changelog_queue(self) -> ThrowableQueue:
if (self._changelog_queue is None):
self._changelog_queue = self.app.FlowControlQueue(maxsize=self.app.conf.stream_buffer_maxsize, clear_on_resume=True)
return self._changelog_queue
def recovery(self) -> Recovery:
if (self._recovery is None):
self._recovery = Recovery(self.app, self, beacon=self.beacon, loop=self.loop)
return self._recovery
def add(self, table: CollectionT) -> CollectionT:
if self._tables_finalized.is_set():
raise RuntimeError('Too late to add tables at this point')
assert (table.name is not None)
if (table.name in self):
raise ValueError(f'Table with name {table.name!r} already exists')
self[table.name] = table
self._changelogs[table.changelog_topic.get_topic_name()] = table
return table
async def on_start(self) -> None:
(await self.sleep(1.0))
if (not self.should_stop):
(await self._update_channels())
(await self.recovery.start())
async def wait_until_tables_registered(self) -> None:
if ((not self.app.producer_only) and (not self.app.client_only)):
(await self.wait_for_stopped(self._tables_registered))
async def _update_channels(self) -> None:
self._tables_finalized.set()
for table in self.values():
(await asyncio.sleep(0))
if (table not in self._channels):
chan = table.changelog_topic.clone_using_queue(self.changelog_queue)
self.app.topics.add(chan)
(await asyncio.sleep(0))
self._channels[table] = chan
(await table.maybe_start())
self.app.consumer.pause_partitions({tp for tp in self.app.consumer.assignment() if (tp.topic in self._changelogs)})
(await asyncio.sleep(0))
self._tables_registered.set()
async def on_stop(self) -> None:
(await cast(_App, self.app)._fetcher.stop())
if self._recovery:
(await self._recovery.stop())
for table in self.values():
(await table.stop())
def on_partitions_revoked(self, revoked: Set[TP]) -> None:
T = traced_from_parent_span()
T(self.recovery.on_partitions_revoked)(revoked)
async def on_rebalance(self, assigned: Set[TP], revoked: Set[TP], newly_assigned: Set[TP], generation_id: int=0) -> None:
self._recovery_started.set()
T = traced_from_parent_span()
for table in self.values():
(await T(table.on_rebalance)(assigned, revoked, newly_assigned, generation_id))
(await asyncio.sleep(0))
(await T(self._update_channels)())
(await asyncio.sleep(0))
(await T(self.recovery.on_rebalance)(assigned, revoked, newly_assigned, generation_id))
async def wait_until_recovery_completed(self) -> bool:
if (self.recovery.started and (not self.app.producer_only) and (not self.app.client_only)):
return (await self.wait_for_stopped(self.recovery.completed))
return False |
class TestNull(util.ColorAsserts, unittest.TestCase):
def test_null_input(self):
c = Color('cubehelix', [NaN, 0.5, 1], 1)
self.assertTrue(c.is_nan('hue'))
def test_none_input(self):
c = Color('color(--cubehelix none 0% 75% / 1)')
self.assertTrue(c.is_nan('hue'))
def test_null_normalization_min_sat(self):
c = Color('color(--cubehelix 270 0% 75% / 1)').normalize()
self.assertTrue(c.is_nan('hue'))
def test_null_normalization_min_intensity(self):
c = Color('color(--cubehelix 270 20% 0% / 1)').normalize()
self.assertTrue(c.is_nan('hue')) |
class OracleInterface(SqlInterfaceCursor):
target = oracle
id_type_decl = 'NUMBER GENERATED BY DEFAULT ON NULL AS IDENTITY'
def __init__(self, host, port, database, user, password, print_sql=False):
self._print_sql = print_sql
self.args = dict(dsn=(('%s/%s' % (host, database)) if database else host), user=user, password=password)
super().__init__(print_sql)
def _create_connection(self):
import cx_Oracle
try:
return cx_Oracle.connect(**self.args)
except Exception as e:
raise ConnectError(*e.args) from e
def list_tables(self):
sql_code = "SELECT table_name FROM all_tables WHERE tablespace_name = 'USERS' ORDER BY table_name"
names = self._execute_sql(T.list[T.string], sql_code)
return list(map(Id, names))
def import_table_type(self, name, columns_whitelist=None):
columns_t = T.table(dict(name=T.string, type=T.string, nullable=T.string, default=T.string))
columns_q = ("SELECT column_name, data_type, nullable, data_default FROM USER_TAB_COLUMNS WHERE table_name = '%s'" % name.name.upper())
sql_columns = self._execute_sql(columns_t, columns_q)
wl = {}
if columns_whitelist:
wl = {c.upper(): c for c in columns_whitelist}
sql_columns = [c for c in sql_columns if (c['name'] in wl)]
cols = {wl.get(c['name'], c['name']): type_from_sql(c['type'], c['nullable']) for c in sql_columns}
return T.table(cols, name=name)
def quote_name(self, name):
return f'{name}'
def table_exists(self, name):
assert isinstance(name, Id)
tables = [t.lower() for t in self.list_tables()]
return (name.lower() in tables) |
def retryable(which_block_arg_name: str) -> Func:
def make_meth_retryable(meth: Meth) -> Meth:
sig = inspect.signature(meth)
if (which_block_arg_name not in sig.parameters):
raise Exception(f'"{which_block_arg_name}" does not name an argument to this function')
setattr(meth, RETRYABLE_ATTRIBUTE_NAME, True)
setattr(meth, AT_BLOCK_ATTRIBUTE_NAME, which_block_arg_name)
return meth
return make_meth_retryable |
class DE94(DeltaE):
NAME = '94'
def __init__(self, kl: float=1, k1: float=0.045, k2: float=0.015):
self.kl = kl
self.k1 = k1
self.k2 = k2
def distance(self, color: 'Color', sample: 'Color', kl: Optional[float]=None, k1: Optional[float]=None, k2: Optional[float]=None, **kwargs: Any) -> float:
if (kl is None):
kl = self.kl
if (k1 is None):
k1 = self.k1
if (k2 is None):
k2 = self.k2
(l1, a1, b1) = alg.no_nans(color.convert('lab')[:(- 1)])
(l2, a2, b2) = alg.no_nans(sample.convert('lab')[:(- 1)])
c1 = math.sqrt(((a1 ** 2) + (b1 ** 2)))
c2 = math.sqrt(((a2 ** 2) + (b2 ** 2)))
dl = (l1 - l2)
dc = (c1 - c2)
da = (a1 - a2)
db = (b1 - b2)
dh = (((da ** 2) + (db ** 2)) - (dc ** 2))
sl = 1
sc = (1 + (k1 * c1))
sh = (1 + (k2 * c1))
kc = 1
kh = 1
return math.sqrt(((((dl / (kl * sl)) ** 2) + ((dc / (kc * sc)) ** 2)) + (dh / ((kh * sh) ** 2)))) |
def rem_and_collect_indents(inputstr):
(non_indent_chars, change_in_level) = rem_and_count_indents(inputstr)
if (change_in_level == 0):
indents = ''
elif (change_in_level < 0):
indents = (closeindent * (- change_in_level))
else:
indents = (openindent * change_in_level)
return (non_indent_chars, indents) |
def test_advanced_package_override_simple(tmpdir: Path) -> None:
cmd = ['examples/advanced/package_overrides/simple.py', ('hydra.run.dir=' + str(tmpdir)), 'hydra.job.chdir=True']
(result, _err) = run_python_script(cmd)
assert (OmegaConf.create(result) == {'db': {'driver': 'mysql', 'user': 'omry', 'pass': 'secret'}}) |
class OptionPlotoptionsSankeyLevelsDatalabelsTextpath(Options):
def attributes(self):
return self._config_get(None)
def attributes(self, value: Any):
self._config(value, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False) |
def _stimulus_timing(exp):
def _test1():
info = 'This will test the visual stimulus presentation timing specifics of your system.\nDuring the test, you will see two squares on the screen.\nAfter the test, you will be asked to indicate which (if any) of those two squares were flickering.\n\n[Press RETURN to continue]'
text = stimuli.TextScreen('Visual stimulus presentation test', info)
while True:
text.present()
(key, rt_) = exp.keyboard.wait([constants.K_RETURN])
if (key is not None):
break
message = stimuli.TextScreen('Running', 'Please wait...')
message.present()
message.present()
message.present()
c1 = stimuli.Canvas((400, 400))
c2 = stimuli.Canvas((400, 400))
c3 = stimuli.Canvas((400, 400))
frame1 = stimuli.Rectangle((100, 100), position=((- 100), 0))
frame2 = stimuli.Rectangle((100, 100), position=(100, 0))
bg = stimuli.Rectangle((90, 90), colour=exp.background_colour)
bg.plot(frame1)
bg.plot(frame2)
frame1.plot(c1)
frame2.plot(c2)
frame1.plot(c3)
frame2.plot(c3)
c1.preload()
c2.preload()
c3.preload()
c1.present(clear=False)
c2.present(clear=False)
c3.present(clear=False)
s1 = stimuli.Circle(1, colour=exp.background_colour)
s2 = stimuli.Circle(1, colour=exp.background_colour)
s1.preload()
s2.preload()
to_do_time = (list(range(0, 60)) * 3)
randomize.shuffle_list(to_do_time)
actual_time = []
for x in to_do_time:
s1.present(clear=False)
start = get_time()
exp.clock.wait(x)
s2.present(clear=False)
actual_time.append(((get_time() - start) * 1000))
exp.clock.wait(randomize.rand_int(30, 60))
tmp = []
for _x in range(100):
start = get_time()
s1.present(clear=False)
tmp.append((get_time() - start))
start = get_time()
s2.present(clear=False)
tmp.append((get_time() - start))
refresh_rate = (1000 / (statistics.mean(tmp) * 1000))
def expected_delay(presentation_time, refresh_rate):
refresh_time = (1000 / refresh_rate)
if (refresh_time >= 1):
return (refresh_time - (presentation_time % refresh_time))
else:
return 0
unexplained_delay = [((x[1] - x[0]) - expected_delay(x[0], refresh_rate)) for x in zip(to_do_time, actual_time)]
(hist, hist_str) = _histogram(unexplained_delay)
inaccuracies = []
delayed_presentations = 0
for key in list(hist.keys()):
inaccuracies.extend(([(key % max(1, (1000 // refresh_rate)))] * hist[key]))
if (key != 0):
delayed_presentations += hist[key]
inaccuracy = int(misc.round((sum(inaccuracies) / len(inaccuracies))))
delayed = misc.round(((100 * delayed_presentations) / 180.0), 2)
respkeys = {constants.K_F1: 0, constants.K_F2: 1, constants.K_F3: 2, constants.K_0: 0, constants.K_1: 1, constants.K_2: 2}
text = stimuli.TextScreen('How many of the two squares were flickering?', '[Press 0 (or F1), 1 (or F2), 2 (or F3)]')
while True:
text.present()
(key, _rt) = exp.keyboard.wait(respkeys)
if (key is not None):
break
response = respkeys[key]
info = stimuli.TextScreen('Results', '')
if ((int(misc.round(refresh_rate)) < 50) or (int(misc.round(refresh_rate)) > 360)):
results1_colour = [255, 0, 0]
elif (int(misc.round(refresh_rate)) not in (60, 75, 120, 144, 240)):
results1_colour = [255, 255, 0]
else:
results1_colour = [0, 255, 0]
results1 = stimuli.TextScreen('', 'Estimated Screen Refresh Rate: {0} Hz (~ every {1} ms)\n\n'.format(int(misc.round(refresh_rate)), misc.round((1000 / refresh_rate), 2)), text_font='freemono', text_size=16, text_bold=True, text_justification=0, text_colour=results1_colour, position=(0, 40))
results2 = stimuli.TextScreen('', 'Detected Framebuffer Pages: {0}\n\n'.format((response + 1)), text_font='freemono', text_size=16, text_bold=True, text_justification=0, position=(0, 20))
if (inaccuracy > 2):
results3_colour = [255, 0, 0]
elif (inaccuracy in (1, 2)):
results3_colour = [255, 255, 0]
else:
results3_colour = [0, 255, 0]
results3 = stimuli.TextScreen('', 'Average Reporting Inaccuracy: {0} ms\n\n'.format(inaccuracy), text_font='freemono', text_size=16, text_bold=True, text_justification=0, text_colour=results3_colour, position=(0, (- 20)))
if (delayed > 10):
results4_colour = [255, 0, 0]
elif (10 > delayed > 1):
results4_colour = [255, 255, 0]
else:
results4_colour = [0, 255, 0]
results4 = stimuli.TextScreen('', 'Unexplained Presentation Delays: {0} %\n\n\n'.format(delayed), text_font='freemono', text_size=16, text_bold=True, text_justification=0, text_colour=results4_colour, position=(0, (- 40)))
results5 = stimuli.TextScreen('', hist_str, text_font='freemono', text_size=16, text_bold=True, text_justification=0, position=(0, (- 100)))
results1.plot(info)
results2.plot(info)
results3.plot(info)
results4.plot(info)
results5.plot(info)
info2 = stimuli.TextLine('[Press RETURN to continue]', position=(0, (- 160)))
info2.plot(info)
while True:
info.present()
(key, rt_) = exp.keyboard.wait([constants.K_RETURN])
if (key is not None):
break
return (to_do_time, actual_time, refresh_rate, inaccuracy, delayed, response)
return _test1() |
def test_create_plan_start_model_upstream_and_downstream():
parsed = Namespace(select=['+modelA+'])
graph = _create_test_graph()
execution_plan = ExecutionPlan.create_plan_from_graph(parsed, graph, MagicMock(project_name=PROJECT_NAME))
assert_contains_only(execution_plan.before_scripts, ['script.model.BEFORE.scriptC.py', 'script.model.BEFORE.scriptD.py'])
assert (execution_plan.dbt_models == ['model.test_project.modelA'])
assert_contains_only(execution_plan.after_scripts, ['script.model.AFTER.scriptA.py', 'script.model.AFTER.scriptB.py']) |
def _parse(mod, buf, offset):
(oxx_type_num, total_hdr_len, hasmask, value_len, field_len) = _parse_header_impl(mod, buf, offset)
value_offset = (offset + total_hdr_len)
value_pack_str = ('!%ds' % value_len)
assert (struct.calcsize(value_pack_str) == value_len)
(value,) = struct.unpack_from(value_pack_str, buf, value_offset)
if hasmask:
(mask,) = struct.unpack_from(value_pack_str, buf, (value_offset + value_len))
else:
mask = None
return (oxx_type_num, value, mask, field_len) |
def set_observation_idx(doc):
if doc.parent_observation:
parent_template = frappe.db.get_value('Observation', doc.parent_observation, 'observation_template')
idx = frappe.db.get_value('Observation Component', {'parent': parent_template, 'observation_template': doc.observation_template}, 'idx')
if idx:
doc.observation_idx = idx |
class ElasticsearchDisasterBase(DisasterBase):
query_fields: List[str]
agg_key: str
agg_group_name: str = 'group_by_agg_key'
sub_agg_key: str = None
sub_agg_group_name: str = 'sub_group_by_sub_agg_key'
filter_query: ES_Q
bucket_count: int
pagination: Pagination
sort_column_mapping: Dict[(str, str)]
sum_column_mapping: Dict[(str, str)]
sub_top_hits_fields: List[str] = None
top_hits_fields: List[str] = None
_response()
def post(self, request: Request) -> Response:
query = self.filters.pop('query', None)
if query:
self.filters['query'] = {'text': query, 'fields': self.query_fields}
self.filter_query = QueryWithFilters.generate_awards_elasticsearch_query(self.filters)
non_zero_queries = []
for field in self.sum_column_mapping.values():
non_zero_queries.append(ES_Q('range', **{field: {'gt': 0}}))
non_zero_queries.append(ES_Q('range', **{field: {'lt': 0}}))
self.filter_query.must.append(ES_Q('bool', should=non_zero_queries, minimum_should_match=1))
self.bucket_count = get_number_of_unique_terms_for_awards(self.filter_query, f"{self.agg_key.replace('.keyword', '')}.hash")
messages = []
if (self.pagination.sort_key in ('id', 'code')):
messages.append(f"Notice! API Request to sort on '{self.pagination.sort_key}' field isn't fully implemented. Results were actually sorted using 'description' field.")
if ((self.bucket_count > 10000) and (self.agg_key == settings.ES_ROUTING_FIELD)):
self.bucket_count = 10000
messages.append("Notice! API Request is capped at 10,000 results. Either download to view all results or filter using the 'query' attribute.")
response = self.query_elasticsearch()
response['page_metadata'] = get_pagination_metadata(self.bucket_count, self.pagination.limit, self.pagination.page)
if messages:
response['messages'] = messages
return Response(response)
def build_elasticsearch_result(self, info_buckets: List[dict]) -> List[dict]:
pass
def build_elasticsearch_search_with_aggregations(self) -> Optional[AwardSearch]:
self.filter_query.must.append(ES_Q('exists', field=self.agg_key))
search = AwardSearch().filter(self.filter_query)
if (self.bucket_count == 0):
return None
elif (self.agg_key == settings.ES_ROUTING_FIELD):
size = self.bucket_count
shard_size = size
group_by_agg_key_values = {'order': [{self.sort_column_mapping[self.pagination.sort_key]: self.pagination.sort_order}, {self.sort_column_mapping['id']: self.pagination.sort_order}]}
bucket_sort_values = None
else:
size = self.bucket_count
shard_size = (self.bucket_count + 100)
group_by_agg_key_values = {}
bucket_sort_values = {'sort': [{self.sort_column_mapping[self.pagination.sort_key]: {'order': self.pagination.sort_order}}, {self.sort_column_mapping['id']: {'order': self.pagination.sort_order}}]}
if (shard_size > 10000):
raise ForbiddenException('Current filters return too many unique items. Narrow filters to return results or use downloads.')
group_by_agg_key_values.update({'field': self.agg_key, 'size': size, 'shard_size': shard_size})
group_by_agg_key = A('terms', **group_by_agg_key_values)
filter_agg_query = ES_Q('terms', **{'covid_spending_by_defc.defc': self.filters.get('def_codes')})
filtered_aggs = A('filter', filter_agg_query)
sum_covid_outlay = A('sum', field='covid_spending_by_defc.outlay', script='_value * 100')
sum_covid_obligation = A('sum', field='covid_spending_by_defc.obligation', script='_value * 100')
sum_loan_value = A('sum', field='total_loan_value', script='_value * 100')
if self.top_hits_fields:
dim_metadata = A('top_hits', size=1, sort=[{'update_date': {'order': 'desc'}}], _source={'includes': self.top_hits_fields})
reverse_nested = A('reverse_nested', **{})
search.aggs.bucket(self.agg_group_name, group_by_agg_key).bucket('nested', A('nested', path='covid_spending_by_defc')).bucket('filtered_aggs', A('filter', filter_agg_query)).metric('total_covid_obligation', sum_covid_obligation).metric('total_covid_outlay', sum_covid_outlay).bucket('reverse_nested', reverse_nested).metric('total_loan_value', sum_loan_value)
if self.top_hits_fields:
search.aggs[self.agg_group_name].metric('dim_metadata', dim_metadata)
search.aggs.bucket('totals', A('nested', path='covid_spending_by_defc')).bucket('filtered_aggs', filtered_aggs).metric('total_covid_obligation', sum_covid_obligation).metric('total_covid_outlay', sum_covid_outlay).bucket('reverse_nested', reverse_nested).metric('total_loan_value', sum_loan_value)
if bucket_sort_values:
bucket_sort_aggregation = A('bucket_sort', **bucket_sort_values)
search.aggs[self.agg_group_name].pipeline('pagination_aggregation', bucket_sort_aggregation)
if self.sub_agg_key:
self.extend_elasticsearch_search_with_sub_aggregation(search)
search.update_from_dict({'size': 0})
return search
def extend_elasticsearch_search_with_sub_aggregation(self, search: AwardSearch):
sub_bucket_count = get_number_of_unique_terms_for_awards(self.filter_query, f'{self.sub_agg_key}.hash')
size = sub_bucket_count
shard_size = (sub_bucket_count + 100)
sub_group_by_sub_agg_key_values = {}
if (shard_size > 10000):
raise ForbiddenException('Current filters return too many unique items. Narrow filters to return results or use downloads.')
if (sub_bucket_count == 0):
return None
sub_group_by_sub_agg_key_values.update({'field': self.sub_agg_key, 'size': size, 'shard_size': shard_size, 'order': [{self.sort_column_mapping[self.pagination.sort_key]: self.pagination.sort_order}, {self.sort_column_mapping['id']: self.pagination.sort_order}]})
sub_group_by_sub_agg_key = A('terms', **sub_group_by_sub_agg_key_values)
sum_covid_outlay = A('sum', field='covid_spending_by_defc.outlay', script='_value * 100')
sum_covid_obligation = A('sum', field='covid_spending_by_defc.obligation', script='_value * 100')
reverse_nested = A('reverse_nested', **{})
sum_loan_value = A('sum', field='total_loan_value', script='_value * 100')
filter_agg_query = ES_Q('terms', **{'covid_spending_by_defc.defc': self.filters.get('def_codes')})
filtered_aggs = A('filter', filter_agg_query)
if self.sub_top_hits_fields:
sub_dim_metadata = A('top_hits', size=1, sort=[{'update_date': {'order': 'desc'}}], _source={'includes': self.sub_top_hits_fields})
search.aggs[self.agg_group_name].bucket(self.sub_agg_group_name, sub_group_by_sub_agg_key).bucket('nested', A('nested', path='covid_spending_by_defc')).bucket('filtered_aggs', filtered_aggs).metric('total_covid_obligation', sum_covid_obligation).metric('total_covid_outlay', sum_covid_outlay).bucket('reverse_nested', reverse_nested).metric('total_loan_value', sum_loan_value)
if self.sub_top_hits_fields:
search.aggs[self.agg_group_name][self.sub_agg_group_name].metric('dim_metadata', sub_dim_metadata)
def build_totals(self, response: dict) -> dict:
totals = {key: 0 for key in self.sum_column_mapping.keys()}
for key in totals.keys():
totals[key] += get_summed_value_as_float((response if (key != 'face_value_of_loan') else response.get('reverse_nested', {})), self.sum_column_mapping[key])
totals['award_count'] = int(response.get('reverse_nested', {}).get('doc_count', 0))
return totals
def query_elasticsearch(self) -> dict:
search = self.build_elasticsearch_search_with_aggregations()
if (search is None):
totals = self.build_totals(response={})
return {'totals': totals, 'results': []}
response = search.handle_execute()
response = response.aggs.to_dict()
buckets = response.get('group_by_agg_key', {}).get('buckets', [])
totals = self.build_totals(response.get('totals', {}).get('filtered_aggs', {}))
results = self.build_elasticsearch_result(buckets[self.pagination.lower_limit:self.pagination.upper_limit])
return {'totals': totals, 'results': results} |
def apply_png_predictor(pred, colors, columns, bitspercomponent, data):
if (bitspercomponent != 8):
raise ValueError(("Unsupported `bitspercomponent': %d" % bitspercomponent))
nbytes = (((colors * columns) * bitspercomponent) // 8)
i = 0
buf = b''
line0 = (b'\x00' * columns)
for i in range(0, len(data), (nbytes + 1)):
ft = data[i:(i + 1)]
i += 1
line1 = data[i:(i + nbytes)]
line2 = b''
if (ft == b'\x00'):
line2 += line1
elif (ft == b'\x01'):
c = 0
for b in line1:
c = ((c + b) & 255)
line2 += bytes([c])
elif (ft == b'\x02'):
for (a, b) in zip(line0, line1):
c = ((a + b) & 255)
line2 += bytes([c])
elif (ft == b'\x03'):
c = 0
for (a, b) in zip(line0, line1):
c = ((((c + a) + b) // 2) & 255)
line2 += bytes([c])
else:
raise ValueError(('Unsupported predictor value: %d' % ft))
buf += line2
line0 = line2
return buf |
_type(ofproto.OFPTFPT_NEXT_TABLES)
_type(ofproto.OFPTFPT_NEXT_TABLES_MISS)
_type(ofproto.OFPTFPT_TABLE_SYNC_FROM)
class OFPTableFeaturePropNextTables(OFPTableFeatureProp):
_TABLE_ID_PACK_STR = '!B'
def __init__(self, type_=None, length=None, table_ids=None):
table_ids = (table_ids if table_ids else [])
super(OFPTableFeaturePropNextTables, self).__init__(type_, length)
self.table_ids = table_ids
def parser(cls, buf):
rest = cls.get_rest(buf)
ids = []
while rest:
(i,) = struct.unpack_from(cls._TABLE_ID_PACK_STR, six.binary_type(rest), 0)
rest = rest[struct.calcsize(cls._TABLE_ID_PACK_STR):]
ids.append(i)
return cls(table_ids=ids)
def serialize_body(self):
bin_ids = bytearray()
for i in self.table_ids:
bin_id = bytearray()
msg_pack_into(self._TABLE_ID_PACK_STR, bin_id, 0, i)
bin_ids += bin_id
return bin_ids |
class OptionSeriesWindbarbSonificationContexttracksMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
.parametrize('contents, expected_errors', [(dedent('\n QUEUE_OPTION DOCAL MAX_RUNNING 4\n STOP_LONG_RUNNING flase 0 1\n NUM_REALIZATIONS not_int\n ENKF_ALPHA not_float\n RUN_TEMPLATE dsajldkald/sdjkahsjka/wqehwqhdsa\n JOB_SCRIPT dnsjklajdlksaljd/dhs7sh/qhwhe\n JOB_SCRIPT non_executable_file\n NUM_REALIZATIONS 1 2 3 4 5\n NUM_REALIZATIONS\n '), [ExpectedErrorInfo(line=2, column=14, end_column=19, match='argument 1 must be one of'), ExpectedErrorInfo(line=3, column=19, end_column=24, match='must have a boolean value as argument 1'), ExpectedErrorInfo(line=3, column=1, end_column=18, match='must have maximum'), ExpectedErrorInfo(line=4, column=18, end_column=25, match='must have an integer value as argument 1'), ExpectedErrorInfo(line=5, column=12, end_column=21, match='must have a number as argument 1'), ExpectedErrorInfo(line=6, column=14, end_column=46, match='Cannot find file or directory "dsajldkald/sdjkahsjka/wqehwqhdsa". The configured value was'), ExpectedErrorInfo(line=7, column=12, end_column=41, match='Could not find executable'), ExpectedErrorInfo(other_files={'non_executable_file': FileDetail(contents='', is_executable=False)}, line=8, column=12, end_column=31, match='File not executable'), ExpectedErrorInfo(line=9, column=1, end_column=17, match='must have maximum'), ExpectedErrorInfo(line=10, column=1, end_column=17, match='must have at least')])])
.usefixtures('use_tmpdir')
def test_that_multiple_keyword_specific_tokens_are_located(contents, expected_errors):
for expected_error in expected_errors:
assert_that_config_leads_to_error(config_file_contents=contents, expected_error=expected_error) |
class ToolBarManager(pyface.ToolBarManager):
window = Instance('pyface.workbench.api.WorkbenchWindow')
def create_tool_bar(self, parent, controller=None, **kwargs):
if (controller is None):
controller = ActionController(window=self.window)
tool_bar = super().create_tool_bar(parent, controller=controller, **kwargs)
return tool_bar |
def test_config_file_dir_parsing_options():
(server, r) = unittest_server_init()
assert (server.source_dirs == {'pp/**', 'subdir'})
assert (server.incl_suffixes == {'.FF', '.fpc', '.h', 'f20'})
assert (server.excl_suffixes == {'_tmp.f90', '_h5hut_tests.F90'})
assert (server.excl_paths == {'excldir', 'hover/**'}) |
def _diff(trivial_changes: List[Tuple[(str, str, re.RegexFlag)]], ignore_in_name: List[str], old_path: str, new_path: str, diff_path: str, input_path: str, output_path: str, diff_index_path: str, pickles_len: int, count: int, pickle_path: str) -> None:
old_pickle_path = os.path.join(old_path, pickle_path)
new_pickle_path = os.path.join(new_path, pickle_path)
try:
with GzipFile(old_pickle_path, 'rb') as old_file:
old_doc = pickle.load(cast(IO[bytes], old_file))
except FileNotFoundError:
old_doc = new_document(old_pickle_path)
old_pickle_path = os.devnull
try:
with GzipFile(new_pickle_path, 'rb') as new_file:
new_doc = pickle.load(cast(IO[bytes], new_file))
except FileNotFoundError:
new_doc = new_document(new_pickle_path)
new_pickle_path = os.devnull
if (not meaningful_diffs(old_doc, new_doc, trivial_changes)):
return
diff_pickle_path = os.path.join(diff_path, (pickle_path + '64'))
diff_dir_path = os.path.dirname(diff_pickle_path)
os.makedirs(diff_dir_path, exist_ok=True)
print('diff', old_pickle_path[len(input_path):], '->', new_pickle_path[len(input_path):])
pub = rstdiff.processCommandLine()
pub.set_writer('picklebuilder.writers.pickle64')
settings_spec = SettingsSpec()
settings_spec.settings_spec = rstdiff.settings_spec
settings_spec.settings_defaults = rstdiff.settings_defaults
pub.process_command_line(usage=rstdiff.usage, description=rstdiff.description, settings_spec=settings_spec, config_section=rstdiff.config_section)
pub.set_destination(destination_path=diff_pickle_path)
pub.set_reader('standalone', None, 'restructuredtext')
pub.settings.language_code = 'en'
pub.settings.ignore_in_section_name = ignore_in_name
old_doc.settings = pub.settings
old_doc.reporter = new_reporter('RSTDIFF', pub.settings)
new_doc.settings = pub.settings
new_doc.reporter = new_reporter('RSTDIFF', pub.settings)
rstdiff.Text2Words(old_doc).apply()
rstdiff.Text2Words(new_doc).apply()
try:
diff_doc = rstdiff.createDiff(pub, old_doc, new_doc)
except rstdiff.DocumentUnchanged:
return
rstdiff.Words2Text(diff_doc).apply()
rstdiff.Generated2Inline(diff_doc).apply()
pub.writer.write(diff_doc, pub.destination)
pub.writer.assemble_parts() |
def render_query(query, engine_type, config=None):
metadata = {}
if (not isinstance(query, PipedQuery)):
metadata['_source'] = query
query = parse_query(query)
analytic = EqlAnalytic(query=query, metadata=metadata)
rendered = render_analytic(analytic, engine_type=engine_type, config=config, analytics_only=False)
return rendered |
class OptionPlotoptionsTreegraphMarker(Options):
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def fillOpacity(self):
return self._config_get(1)
def fillOpacity(self, num: float):
self._config(num, js_type=False)
def height(self):
return self._config_get(None)
def height(self, num: float):
self._config(num, js_type=False)
def lineColor(self):
return self._config_get('#ffffff')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(10)
def radius(self, num: float):
self._config(num, js_type=False)
def states(self) -> 'OptionPlotoptionsTreegraphMarkerStates':
return self._config_sub_data('states', OptionPlotoptionsTreegraphMarkerStates)
def symbol(self):
return self._config_get('circle')
def symbol(self, text: str):
self._config(text, js_type=False)
def width(self):
return self._config_get(None)
def width(self, num: float):
self._config(num, js_type=False) |
def find_object_given_state(needle, haystack, object):
if (needle is haystack):
return object
if hasattr(object, 'filter'):
return find_object_given_state(needle, haystack.filter, object.filter)
elif hasattr(object, 'filters'):
for (h, obj) in zip(haystack.filters, object.filters):
r = find_object_given_state(needle, h, obj)
if (r is not None):
return r
return None |
class TestPriceList(unittest.TestCase):
def test_price_list(self):
price_list = PriceList(logging.Logger('test'))
price_list.append(zoneKey=ZoneKey('AT'), datetime=datetime(2023, 1, 1, tzinfo=timezone.utc), price=1, source='trust.me', currency='EUR')
assert (len(price_list.events) == 1)
def test_append_to_list_logs_error(self):
price_list = PriceList(logging.Logger('test'))
with patch.object(price_list.logger, 'error') as mock_error:
price_list.append(zoneKey=ZoneKey('AT'), datetime=datetime(2023, 1, 1, tzinfo=timezone.utc), price=1, source='trust.me', currency='EURO')
mock_error.assert_called_once() |
def extractLangyanirvanaWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if (item['title'].startswith('Chapter ') and (item['tags'] == ['Uncategorized'])):
return buildReleaseMessageWithType(item, 'Nirvana In Fire', vol, chp, frag=frag, postfix=postfix, tl_type='translated')
return False |
class Ui_DumpSoDialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName('Dialog')
Dialog.resize(372, 316)
self.gridLayout_3 = QtWidgets.QGridLayout(Dialog)
self.gridLayout_3.setObjectName('gridLayout_3')
self.groupBox = QtWidgets.QGroupBox(Dialog)
self.groupBox.setObjectName('groupBox')
self.gridLayout = QtWidgets.QGridLayout(self.groupBox)
self.gridLayout.setObjectName('gridLayout')
self.label_3 = QtWidgets.QLabel(self.groupBox)
self.label_3.setObjectName('label_3')
self.gridLayout.addWidget(self.label_3, 0, 0, 1, 1)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName('horizontalLayout')
self.btnSubmit = QtWidgets.QPushButton(self.groupBox)
self.btnSubmit.setMaximumSize(QtCore.QSize(120, ))
self.btnSubmit.setObjectName('btnSubmit')
self.horizontalLayout.addWidget(self.btnSubmit)
self.btnClear = QtWidgets.QPushButton(self.groupBox)
self.btnClear.setMaximumSize(QtCore.QSize(120, ))
self.btnClear.setObjectName('btnClear')
self.horizontalLayout.addWidget(self.btnClear)
self.gridLayout.addLayout(self.horizontalLayout, 2, 1, 1, 1)
self.txtModule = QtWidgets.QLineEdit(self.groupBox)
self.txtModule.setObjectName('txtModule')
self.gridLayout.addWidget(self.txtModule, 0, 1, 1, 1)
self.listModule = QtWidgets.QListWidget(self.groupBox)
self.listModule.setObjectName('listModule')
self.gridLayout.addWidget(self.listModule, 1, 1, 1, 1)
self.gridLayout_3.addWidget(self.groupBox, 0, 0, 1, 1)
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate('Dialog', 'dump_so'))
self.groupBox.setTitle(_translate('Dialog', 'match'))
self.label_3.setText(_translate('Dialog', ':'))
self.btnSubmit.setText(_translate('Dialog', ''))
self.btnClear.setText(_translate('Dialog', '')) |
class Container(containers.DeclarativeContainer):
wiring_config = containers.WiringConfiguration(modules=['.endpoints'])
config = providers.Configuration(yaml_files=['config.yml'])
db = providers.Singleton(Database, db_url=config.db.url)
user_repository = providers.Factory(UserRepository, session_factory=db.provided.session)
user_service = providers.Factory(UserService, user_repository=user_repository) |
class OptionSeriesGaugeAccessibilityPoint(Options):
def dateFormat(self):
return self._config_get(None)
def dateFormat(self, text: str):
self._config(text, js_type=False)
def dateFormatter(self):
return self._config_get(None)
def dateFormatter(self, value: Any):
self._config(value, js_type=False)
def describeNull(self):
return self._config_get(True)
def describeNull(self, flag: bool):
self._config(flag, js_type=False)
def descriptionFormat(self):
return self._config_get(None)
def descriptionFormat(self, text: str):
self._config(text, js_type=False)
def descriptionFormatter(self):
return self._config_get(None)
def descriptionFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valueDescriptionFormat(self):
return self._config_get('{xDescription}{separator}{value}.')
def valueDescriptionFormat(self, text: str):
self._config(text, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False) |
def test_transaction_cost_valid(london_plus_miner, funded_address, funded_address_private_key):
chain = london_plus_miner
vm = chain.get_vm()
base_fee_per_gas = vm.get_header().base_fee_per_gas
assert (base_fee_per_gas > 0)
account_balance = vm.state.get_balance(funded_address)
tx = new_dynamic_fee_transaction(vm, from_=funded_address, to=ADDRESS_A, private_key=funded_address_private_key, gas=GAS_TX, amount=(account_balance - (base_fee_per_gas * GAS_TX)), max_priority_fee_per_gas=1, max_fee_per_gas=base_fee_per_gas)
assert (vm.get_header().gas_used == 0)
chain.apply_transaction(tx)
assert (chain.get_vm().get_header().gas_used > 0) |
class TestLang(util.TestCase):
MARKUP = '\n <div lang="de-DE">\n <p id="1"></p>\n </div>\n <div lang="de-DE-1996">\n <p id="2"></p>\n </div>\n <div lang="de-Latn-DE">\n <p id="3"></p>\n </div>\n <div lang="de-Latf-DE">\n <p id="4"></p>\n </div>\n <div lang="de-Latn-DE-1996">\n <p id="5"></p>\n </div>\n <p id="6" lang="de-DE"></p>\n <div lang="a-DE">\n <p id="7"></p>\n </div>\n <!-- Singletons don\'t match implicit wildcards (* not at start are ignored and treated as implicit) -->\n <div lang="a-a-DE">\n <p id="8"></p>\n </div>\n <div lang="en-a-DE">\n <p id="9"></p>\n </div>\n '
def test_lang(self):
self.assert_selector(self.MARKUP, 'p:lang(de-DE)', ['1', '2', '3', '4', '5', '6'], flags=util.HTML)
def test_lang_missing_range(self):
self.assert_selector(self.MARKUP, 'p:lang(de--DE)', [], flags=util.HTML)
def test_explicit_wildcard(self):
self.assert_selector(self.MARKUP, 'p:lang(de-\\*-DE)', ['1', '2', '3', '4', '5', '6'], flags=util.HTML)
def test_only_wildcard(self):
self.assert_selector(self.MARKUP, "p:lang('*')", ['1', '2', '3', '4', '5', '6', '7', '8', '9'], flags=util.HTML)
def test_wildcard_start_no_match(self):
self.assert_selector(self.MARKUP, "p:lang('*-de-DE')", [], flags=util.HTML)
def test_wildcard_start_collapse(self):
self.assert_selector(self.MARKUP, "p:lang('*-*-*-DE')", ['1', '2', '3', '4', '5', '6', '7'], flags=util.HTML)
def test_wildcard_at_start_escaped(self):
self.assert_selector(self.MARKUP, 'p:lang(\\*-DE)', ['1', '2', '3', '4', '5', '6', '7'], flags=util.HTML)
def test_language_quoted(self):
self.assert_selector(self.MARKUP, "p:lang('de-DE')", ['1', '2', '3', '4', '5', '6'], flags=util.HTML)
def test_language_quoted_with_escaped_newline(self):
self.assert_selector(self.MARKUP, "p:lang('de-\\\nDE')", ['1', '2', '3', '4', '5', '6'], flags=util.HTML)
def test_wildcard_at_start_quoted(self):
self.assert_selector(self.MARKUP, "p:lang('*-DE')", ['1', '2', '3', '4', '5', '6', '7'], flags=util.HTML)
def test_avoid_implicit_language(self):
self.assert_selector(self.MARKUP, 'p[lang]:lang(de-DE)', ['6'], flags=util.HTML)
def test_language_und(self):
markup = '\n <div id="1" lang=""></div>\n <div id="2" lang="und"></div>\n <div id="3" lang=>\n <div id="4"></div>\n </div>\n <div id="5"></div>\n '
self.assert_selector(markup, "div:lang('*')", ['2'], flags=util.HTML)
def test_language_empty_string(self):
markup = '\n <div id="1" lang=""></div>\n <div id="2" lang="und"></div>\n <div id="3" lang=>\n <div id="4"></div>\n </div>\n <div id="5"></div>\n '
self.assert_selector(markup, "div:lang('')", ['1', '3', '4'], flags=util.HTML)
def test_language_list(self):
markup = '\n <div lang="de-DE">\n <p id="1"></p>\n </div>\n <div lang="en">\n <p id="2"></p>\n </div>\n <div lang="de-Latn-DE">\n <p id="3"></p>\n </div>\n <div lang="de-Latf-DE">\n <p id="4"></p>\n </div>\n <div lang="en-US">\n <p id="5"></p>\n </div>\n <p id="6" lang="de-DE"></p>\n '
self.assert_selector(markup, "p:lang(de-DE, '*-US')", ['1', '3', '4', '5', '6'], flags=util.HTML)
def test_undetermined_language(self):
markup = '\n <div>\n <p id="1"></p>\n </div>\n '
self.assert_selector(markup, 'p:lang(en)', [], flags=util.HTML)
def test_language_in_header(self):
markup = '\n <!DOCTYPE html>\n <html>\n <head>\n <meta content="en-US">\n </head>\n <body>\n <div>\n <p id="1"></p>\n </div>\n <div>\n <p id="2"></p>\n </div>\n </body>\n '
self.assert_selector(markup, "p:lang('*-US')", ['1', '2'], flags=util.HTML)
def test_xml_style_language_in_html5(self):
markup = '\n <math xml:lang="en">\n <mtext id="1"></mtext>\n </math>\n <div xml:lang="en">\n <mtext id="2"></mtext>\n </div>\n '
self.assert_selector(markup, 'mtext:lang(en)', ['1'], flags=util.HTML5)
def test_xml_style_language(self):
markup = '\n <?xml version="1.0" encoding="UTF-8"?>\n <html>\n <head>\n </head>\n <body>\n <div xml:lang="de-DE">\n <p id="1"></p>\n </div>\n <div xml:lang="de-DE-1996">\n <p id="2"></p>\n </div>\n <div xml:lang="de-Latn-DE">\n <p id="3"></p>\n </div>\n <div xml:lang="de-Latf-DE">\n <p id="4"></p>\n </div>\n <div xml:lang="de-Latn-DE-1996">\n <p id="5"></p>\n </div>\n <p id="6" xml:lang="de-DE"></p>\n </body>\n </html>\n '
self.assert_selector(markup, 'p:lang(de-DE)', ['1', '2', '3', '4', '5', '6'], flags=util.XML)
def test_language_in_xhtml(self):
markup = '\n <?xml version="1.0" encoding="UTF-8"?>\n <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"\n " <html lang="en" xmlns=" <head>\n </head>\n <body>\n <div lang="de-DE" xml:lang="de-DE">\n <p id="1"></p>\n </div>\n <div lang="de-DE-1996" xml:lang="de-DE-1996">\n <p id="2"></p>\n </div>\n <div lang="de-Latn-DE" xml:lang="de-Latn-DE">\n <p id="3"></p>\n </div>\n <div lang="de-Latf-DE" xml:lang="de-Latf-DE">\n <p id="4"></p>\n </div>\n <div lang="de-Latn-DE-1996" xml:lang="de-Latn-DE-1996">\n <p id="5"></p>\n </div>\n <p id="6" lang="de-DE" xml:lang="de-DE"></p>\n </body>\n </html>\n '
self.assert_selector(markup, 'p:lang(de-DE)', ['1', '2', '3', '4', '5', '6'], flags=util.XML)
def test_language_in_xhtml_without_html_style_lang(self):
markup = '\n <?xml version="1.0" encoding="UTF-8"?>\n <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"\n " <html lang="en" xmlns=" <head>\n </head>\n <body>\n <div xml:lang="de-DE">\n <p id="1"></p>\n </div>\n <div xml:lang="de-DE-1996">\n <p id="2"></p>\n </div>\n <div xml:lang="de-Latn-DE">\n <p id="3"></p>\n </div>\n <div xml:lang="de-Latf-DE">\n <p id="4"></p>\n </div>\n <div xml:lang="de-Latn-DE-1996">\n <p id="5"></p>\n </div>\n <p id="6" xml:lang="de-DE"></p>\n </body>\n </html>\n '
self.assert_selector(markup, 'p:lang(de-DE)', [], flags=util.XHTML) |
class KeyboardControlsTester(flx.Widget):
def init(self):
combo_options = ['Paris', 'New York', 'Enschede', 'Tokio']
with flx.HBox():
self.tree = TreeWithControls(flex=1, max_selected=1)
with flx.VBox(flex=1):
self.combo = flx.ComboBox(options=combo_options, editable=True)
flx.Widget(flex=1)
with self.tree:
for cat in ('foo', 'bar', 'spam'):
with flx.TreeItem(text=cat):
for name in ('Martin', 'Kees', 'Hans'):
item = flx.TreeItem(title=name)
item.set_checked(((cat == 'foo') or None))
('combo.text')
def _combo_text_changed(self, *events):
for ev in events:
print('combo text is now', ev.new_value) |
def test_missing_units(tmpdir):
fname = os.path.join(TEST_DATA_DIR, 'icgem-sample.gdf')
corrupt = str(tmpdir.join('missing_units.gdf'))
with open(fname) as gdf_file, open(corrupt, 'w') as corrupt_gdf:
for line in gdf_file:
if ('[mgal]' in line):
continue
corrupt_gdf.write(line)
with raises(IOError):
load_icgem_gdf(corrupt) |
class LevelModel(proteus.Transport.OneLevelTransport):
nCalls = 0
def __init__(self, uDict, phiDict, testSpaceDict, matType, dofBoundaryConditionsDict, dofBoundaryConditionsSetterDict, coefficients, elementQuadrature, elementBoundaryQuadrature, fluxBoundaryConditionsDict=None, advectiveFluxBoundaryConditionsSetterDict=None, diffusiveFluxBoundaryConditionsSetterDictDict=None, stressTraceBoundaryConditionsSetterDict=None, stabilization=None, shockCapturing=None, conservativeFluxDict=None, numericalFluxType=None, TimeIntegrationClass=None, massLumping=False, reactionLumping=False, options=None, name='defaultName', reuse_trial_and_test_quadrature=True, sd=True, movingDomain=False, bdyNullSpace=False):
self.hasCutCells = True
self.useConstantH = coefficients.useConstantH
from proteus import Comm
self.movingDomain = movingDomain
self.tLast_mesh = None
self.name = name
self.sd = sd
self.Hess = False
self.lowmem = True
self.timeTerm = True
self.testIsTrial = True
self.phiTrialIsTrial = True
self.u = uDict
self.ua = {}
self.phi = phiDict
self.dphi = {}
self.matType = matType
self.reuse_test_trial_quadrature = reuse_trial_and_test_quadrature
if self.reuse_test_trial_quadrature:
for ci in range(1, coefficients.nc):
assert (self.u[ci].femSpace.__class__.__name__ == self.u[0].femSpace.__class__.__name__), 'to reuse_test_trial_quad all femSpaces must be the same!'
self.mesh = self.u[0].femSpace.mesh
self.testSpace = testSpaceDict
self.dirichletConditions = dofBoundaryConditionsDict
self.dirichletNodeSetList = None
self.bdyNullSpace = bdyNullSpace
self.coefficients = coefficients
self.coefficients.initializeMesh(self.mesh)
self.nc = self.coefficients.nc
self.stabilization = stabilization
self.shockCapturing = shockCapturing
self.conservativeFlux = conservativeFluxDict
self.fluxBoundaryConditions = fluxBoundaryConditionsDict
self.advectiveFluxBoundaryConditionsSetterDict = advectiveFluxBoundaryConditionsSetterDict
self.diffusiveFluxBoundaryConditionsSetterDictDict = diffusiveFluxBoundaryConditionsSetterDictDict
self.stabilizationIsNonlinear = False
if (self.stabilization is not None):
for ci in range(self.nc):
if (ci in coefficients.mass):
for flag in list(coefficients.mass[ci].values()):
if (flag == 'nonlinear'):
self.stabilizationIsNonlinear = True
if (ci in coefficients.advection):
for flag in list(coefficients.advection[ci].values()):
if (flag == 'nonlinear'):
self.stabilizationIsNonlinear = True
if (ci in coefficients.diffusion):
for diffusionDict in list(coefficients.diffusion[ci].values()):
for flag in list(diffusionDict.values()):
if (flag != 'constant'):
self.stabilizationIsNonlinear = True
if (ci in coefficients.potential):
for flag in list(coefficients.potential[ci].values()):
if (flag == 'nonlinear'):
self.stabilizationIsNonlinear = True
if (ci in coefficients.reaction):
for flag in list(coefficients.reaction[ci].values()):
if (flag == 'nonlinear'):
self.stabilizationIsNonlinear = True
if (ci in coefficients.hamiltonian):
for flag in list(coefficients.hamiltonian[ci].values()):
if (flag == 'nonlinear'):
self.stabilizationIsNonlinear = True
self.elementBoundaryIntegrals = {}
for ci in range(self.nc):
self.elementBoundaryIntegrals[ci] = ((self.conservativeFlux is not None) or (numericalFluxType is not None) or (self.fluxBoundaryConditions[ci] == 'outFlow') or (self.fluxBoundaryConditions[ci] == 'mixedFlow') or (self.fluxBoundaryConditions[ci] == 'setFlow'))
self.nSpace_global = self.u[0].femSpace.nSpace_global
self.nDOF_trial_element = [u_j.femSpace.max_nDOF_element for u_j in list(self.u.values())]
self.nDOF_phi_trial_element = [phi_k.femSpace.max_nDOF_element for phi_k in list(self.phi.values())]
self.n_phi_ip_element = [phi_k.femSpace.referenceFiniteElement.interpolationConditions.nQuadraturePoints for phi_k in list(self.phi.values())]
self.nDOF_test_element = [femSpace.max_nDOF_element for femSpace in list(self.testSpace.values())]
self.nFreeDOF_global = [dc.nFreeDOF_global for dc in list(self.dirichletConditions.values())]
self.nVDOF_element = sum(self.nDOF_trial_element)
self.nFreeVDOF_global = sum(self.nFreeDOF_global)
NonlinearEquation.__init__(self, self.nFreeVDOF_global)
elementQuadratureDict = {}
elemQuadIsDict = isinstance(elementQuadrature, dict)
if elemQuadIsDict:
for I in self.coefficients.elementIntegralKeys:
if (I in elementQuadrature):
elementQuadratureDict[I] = elementQuadrature[I]
else:
elementQuadratureDict[I] = elementQuadrature['default']
else:
for I in self.coefficients.elementIntegralKeys:
elementQuadratureDict[I] = elementQuadrature
if (self.stabilization is not None):
for I in self.coefficients.elementIntegralKeys:
if elemQuadIsDict:
if (I in elementQuadrature):
elementQuadratureDict[(('stab',) + I[1:])] = elementQuadrature[I]
else:
elementQuadratureDict[(('stab',) + I[1:])] = elementQuadrature['default']
else:
elementQuadratureDict[(('stab',) + I[1:])] = elementQuadrature
if (self.shockCapturing is not None):
for ci in self.shockCapturing.components:
if elemQuadIsDict:
if (('numDiff', ci, ci) in elementQuadrature):
elementQuadratureDict[('numDiff', ci, ci)] = elementQuadrature[('numDiff', ci, ci)]
else:
elementQuadratureDict[('numDiff', ci, ci)] = elementQuadrature['default']
else:
elementQuadratureDict[('numDiff', ci, ci)] = elementQuadrature
if massLumping:
for ci in list(self.coefficients.mass.keys()):
elementQuadratureDict[('m', ci)] = Quadrature.SimplexLobattoQuadrature(self.nSpace_global, 1)
for I in self.coefficients.elementIntegralKeys:
elementQuadratureDict[(('stab',) + I[1:])] = Quadrature.SimplexLobattoQuadrature(self.nSpace_global, 1)
if reactionLumping:
for ci in list(self.coefficients.mass.keys()):
elementQuadratureDict[('r', ci)] = Quadrature.SimplexLobattoQuadrature(self.nSpace_global, 1)
for I in self.coefficients.elementIntegralKeys:
elementQuadratureDict[(('stab',) + I[1:])] = Quadrature.SimplexLobattoQuadrature(self.nSpace_global, 1)
elementBoundaryQuadratureDict = {}
if isinstance(elementBoundaryQuadrature, dict):
for I in self.coefficients.elementBoundaryIntegralKeys:
if (I in elementBoundaryQuadrature):
elementBoundaryQuadratureDict[I] = elementBoundaryQuadrature[I]
else:
elementBoundaryQuadratureDict[I] = elementBoundaryQuadrature['default']
else:
for I in self.coefficients.elementBoundaryIntegralKeys:
elementBoundaryQuadratureDict[I] = elementBoundaryQuadrature
(self.elementQuadraturePoints, self.elementQuadratureWeights, self.elementQuadratureRuleIndeces) = Quadrature.buildUnion(elementQuadratureDict)
self.nQuadraturePoints_element = self.elementQuadraturePoints.shape[0]
self.nQuadraturePoints_global = (self.nQuadraturePoints_element * self.mesh.nElements_global)
(self.elementBoundaryQuadraturePoints, self.elementBoundaryQuadratureWeights, self.elementBoundaryQuadratureRuleIndeces) = Quadrature.buildUnion(elementBoundaryQuadratureDict)
self.nElementBoundaryQuadraturePoints_elementBoundary = self.elementBoundaryQuadraturePoints.shape[0]
self.nElementBoundaryQuadraturePoints_global = ((self.mesh.nElements_global * self.mesh.nElementBoundaries_element) * self.nElementBoundaryQuadraturePoints_elementBoundary)
self.q = {}
self.ebq = {}
self.ebq_global = {}
self.ebqe = {}
self.phi_ip = {}
self.q[('u', 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.q[('grad(u)', 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element, self.nSpace_global), 'd')
self.q[('r', 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.ebqe[('u', 0)] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.ebqe[('grad(u)', 0)] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary, self.nSpace_global), 'd')
self.points_elementBoundaryQuadrature = set()
self.scalars_elementBoundaryQuadrature = set([('u', ci) for ci in range(self.nc)])
self.vectors_elementBoundaryQuadrature = set()
self.tensors_elementBoundaryQuadrature = set()
logEvent(memory('element and element boundary Jacobians', 'OneLevelTransport'), level=4)
self.inflowBoundaryBC = {}
self.inflowBoundaryBC_values = {}
self.inflowFlux = {}
for cj in range(self.nc):
self.inflowBoundaryBC[cj] = np.zeros((self.mesh.nExteriorElementBoundaries_global,), 'i')
self.inflowBoundaryBC_values[cj] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nDOF_trial_element[cj]), 'd')
self.inflowFlux[cj] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.internalNodes = set(range(self.mesh.nNodes_global))
for ebNE in range(self.mesh.nExteriorElementBoundaries_global):
ebN = self.mesh.exteriorElementBoundariesArray[ebNE]
eN_global = self.mesh.elementBoundaryElementsArray[(ebN, 0)]
ebN_element = self.mesh.elementBoundaryLocalElementBoundariesArray[(ebN, 0)]
for i in range(self.mesh.nNodes_element):
if (i != ebN_element):
I = self.mesh.elementNodesArray[(eN_global, i)]
self.internalNodes -= set([I])
self.nNodes_internal = len(self.internalNodes)
self.internalNodesArray = np.zeros((self.nNodes_internal,), 'i')
for (nI, n) in enumerate(self.internalNodes):
self.internalNodesArray[nI] = n
del self.internalNodes
self.internalNodes = None
logEvent('Updating local to global mappings', 2)
self.updateLocal2Global()
logEvent('Building time integration object', 2)
logEvent(memory('inflowBC, internalNodes,updateLocal2Global', 'OneLevelTransport'), level=4)
if (self.stabilization and self.stabilization.usesGradientStabilization):
self.timeIntegration = TimeIntegrationClass(self, integrateInterpolationPoints=True)
else:
self.timeIntegration = TimeIntegrationClass(self)
if (options is not None):
self.timeIntegration.setFromOptions(options)
logEvent(memory('TimeIntegration', 'OneLevelTransport'), level=4)
logEvent('Calculating numerical quadrature formulas', 2)
self.calculateQuadrature()
self.setupFieldStrides()
self.MassMatrix = None
self.LumpedMassMatrix = None
self.rhs_mass_correction = None
self.MassMatrix_sparseFactor = None
self.Jacobian_sparseFactor = None
self.lumped_L2p_vof_mass_correction = None
self.limited_L2p_vof_mass_correction = None
self.L2p_vof_mass_correction = None
comm = Comm.get()
self.comm = comm
if (comm.size() > 1):
assert ((numericalFluxType is not None) and numericalFluxType.useWeakDirichletConditions), 'You must use a numerical flux to apply weak boundary conditions for parallel runs'
logEvent(memory('stride+offset', 'OneLevelTransport'), level=4)
if (numericalFluxType is not None):
if ((options is None) or (options.periodicDirichletConditions is None)):
self.numericalFlux = numericalFluxType(self, dofBoundaryConditionsSetterDict, advectiveFluxBoundaryConditionsSetterDict, diffusiveFluxBoundaryConditionsSetterDictDict)
else:
self.numericalFlux = numericalFluxType(self, dofBoundaryConditionsSetterDict, advectiveFluxBoundaryConditionsSetterDict, diffusiveFluxBoundaryConditionsSetterDictDict, options.periodicDirichletConditions)
else:
self.numericalFlux = None
if ('penalty' in self.ebq_global):
for ebN in range(self.mesh.nElementBoundaries_global):
for k in range(self.nElementBoundaryQuadraturePoints_elementBoundary):
self.ebq_global['penalty'][(ebN, k)] = old_div(self.numericalFlux.penalty_constant, (self.mesh.elementBoundaryDiametersArray[ebN] ** self.numericalFlux.penalty_power))
if ('penalty' in self.ebqe):
for ebNE in range(self.mesh.nExteriorElementBoundaries_global):
ebN = self.mesh.exteriorElementBoundariesArray[ebNE]
for k in range(self.nElementBoundaryQuadraturePoints_elementBoundary):
self.ebqe['penalty'][(ebNE, k)] = old_div(self.numericalFlux.penalty_constant, (self.mesh.elementBoundaryDiametersArray[ebN] ** self.numericalFlux.penalty_power))
logEvent(memory('numericalFlux', 'OneLevelTransport'), level=4)
self.elementEffectiveDiametersArray = self.mesh.elementInnerDiametersArray
from proteus import PostProcessingTools
self.velocityPostProcessor = PostProcessingTools.VelocityPostProcessingChooser(self)
logEvent(memory('velocity postprocessor', 'OneLevelTransport'), level=4)
from proteus import Archiver
self.elementQuadratureDictionaryWriter = Archiver.XdmfWriter()
self.elementBoundaryQuadratureDictionaryWriter = Archiver.XdmfWriter()
self.exteriorElementBoundaryQuadratureDictionaryWriter = Archiver.XdmfWriter()
self.globalResidualDummy = None
compKernelFlag = 0
if self.coefficients.useConstantH:
self.elementDiameter = self.mesh.elementDiametersArray.copy()
self.elementDiameter[:] = max(self.mesh.elementDiametersArray)
else:
self.elementDiameter = self.mesh.elementDiametersArray
self.mcorr = cMCorr_base(self.nSpace_global, self.nQuadraturePoints_element, self.u[0].femSpace.elementMaps.localFunctionSpace.dim, self.u[0].femSpace.referenceFiniteElement.localFunctionSpace.dim, self.testSpace[0].referenceFiniteElement.localFunctionSpace.dim, self.nElementBoundaryQuadraturePoints_elementBoundary, compKernelFlag)
def FCTStep(self):
(rowptr, colind, MassMatrix) = self.MassMatrix.getCSRrepresentation()
if (self.limited_L2p_vof_mass_correction is None):
self.limited_L2p_vof_mass_correction = np.zeros(self.LumpedMassMatrix.size, 'd')
argsDict = cArgumentsDict.ArgumentsDict()
argsDict['NNZ'] = self.nnz
argsDict['numDOFs'] = (len(rowptr) - 1)
argsDict['lumped_mass_matrix'] = self.LumpedMassMatrix
argsDict['solH'] = self.L2p_vof_mass_correction
argsDict['solL'] = self.lumped_L2p_vof_mass_correction
argsDict['limited_solution'] = self.limited_L2p_vof_mass_correction
argsDict['csrRowIndeces_DofLoops'] = rowptr
argsDict['csrColumnOffsets_DofLoops'] = colind
argsDict['matrix'] = MassMatrix
self.mcorr.FCTStep(argsDict)
def calculateCoefficients(self):
pass
def calculateElementResidual(self):
if (self.globalResidualDummy is not None):
self.getResidual(self.u[0].dof, self.globalResidualDummy)
def getResidual(self, u, r):
import pdb
import copy
r.fill(0.0)
try:
self.isActiveR[:] = 0.0
self.isActiveDOF[:] = 0.0
self.isActiveElement[:] = 0
except AttributeError:
self.isActiveR = np.zeros_like(r)
self.isActiveDOF = np.zeros_like(self.u[0].dof)
self.isActiveElement = np.zeros((self.mesh.nElements_global,), 'i')
self.setUnknowns(u)
argsDict = cArgumentsDict.ArgumentsDict()
argsDict['mesh_trial_ref'] = self.u[0].femSpace.elementMaps.psi
argsDict['mesh_grad_trial_ref'] = self.u[0].femSpace.elementMaps.grad_psi
argsDict['mesh_dof'] = self.mesh.nodeArray
argsDict['mesh_l2g'] = self.mesh.elementNodesArray
argsDict['x_ref'] = self.elementQuadraturePoints
argsDict['dV_ref'] = self.elementQuadratureWeights[('u', 0)]
argsDict['u_trial_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_trial_ref'] = self.u[0].femSpace.grad_psi
argsDict['u_test_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_test_ref'] = self.u[0].femSpace.grad_psi
argsDict['mesh_trial_trace_ref'] = self.u[0].femSpace.elementMaps.psi_trace
argsDict['mesh_grad_trial_trace_ref'] = self.u[0].femSpace.elementMaps.grad_psi_trace
argsDict['dS_ref'] = self.elementBoundaryQuadratureWeights[('u', 0)]
argsDict['u_trial_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_trial_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['u_test_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_test_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['normal_ref'] = self.u[0].femSpace.elementMaps.boundaryNormals
argsDict['boundaryJac_ref'] = self.u[0].femSpace.elementMaps.boundaryJacobians
argsDict['nElements_global'] = self.mesh.nElements_global
argsDict['useMetrics'] = self.coefficients.useMetrics
argsDict['epsFactHeaviside'] = self.coefficients.epsFactHeaviside
argsDict['epsFactDirac'] = self.coefficients.epsFactDirac
argsDict['epsFactDiffusion'] = self.coefficients.epsFactDiffusion
argsDict['u_l2g'] = self.u[0].femSpace.dofMap.l2g
argsDict['r_l2g'] = self.l2g[0]['freeGlobal']
argsDict['elementDiameter'] = self.elementDiameter
argsDict['elementBoundaryDiameter'] = self.mesh.elementBoundaryDiametersArray
argsDict['nodeDiametersArray'] = self.mesh.nodeDiametersArray
argsDict['u_dof'] = self.u[0].dof
argsDict['phi_dof'] = self.coefficients.lsModel.u[0].dof
argsDict['q_phi'] = self.coefficients.q_u_ls
argsDict['q_normal_phi'] = self.coefficients.q_n_ls
argsDict['ebqe_phi'] = self.coefficients.ebqe_u_ls
argsDict['ebqe_normal_phi'] = self.coefficients.ebqe_n_ls
argsDict['q_H'] = self.coefficients.q_H_vof
argsDict['q_u'] = self.q[('u', 0)]
argsDict['q_n'] = self.q[('grad(u)', 0)]
argsDict['ebqe_u'] = self.ebqe[('u', 0)]
argsDict['ebqe_n'] = self.ebqe[('grad(u)', 0)]
argsDict['q_r'] = self.q[('r', 0)]
argsDict['q_porosity'] = self.coefficients.q_porosity
argsDict['offset_u'] = self.offset[0]
argsDict['stride_u'] = self.stride[0]
argsDict['globalResidual'] = r
argsDict['nExteriorElementBoundaries_global'] = self.mesh.nExteriorElementBoundaries_global
argsDict['exteriorElementBoundariesArray'] = self.mesh.exteriorElementBoundariesArray
argsDict['elementBoundaryElementsArray'] = self.mesh.elementBoundaryElementsArray
argsDict['elementBoundariesArray'] = self.mesh.elementBoundariesArray
argsDict['elementBoundaryLocalElementBoundariesArray'] = self.mesh.elementBoundaryLocalElementBoundariesArray
argsDict['ghost_penalty_constant'] = self.coefficients.flowCoefficients.ghost_penalty_constant
argsDict['phi_solid_nodes'] = self.coefficients.flowCoefficients.phi_s
argsDict['useExact_s'] = int(self.coefficients.flowCoefficients.useExact)
argsDict['isActiveR'] = self.isActiveR
argsDict['isActiveDOF'] = self.isActiveDOF
argsDict['isActiveElement'] = self.isActiveElement
argsDict['ebqe_phi_s'] = self.coefficients.flowCoefficients.ebqe_phi_s
argsDict['phi_solid'] = self.coefficients.flowCoefficients.q_phi_solid
self.mcorr.calculateResidual(argsDict, self.coefficients.useExact)
r *= self.isActiveR
self.u[0].dof[:] = np.where((self.isActiveDOF == 1.0), self.u[0].dof, 0.0)
logEvent('Global residual', level=9, data=r)
self.coefficients.massConservationError = fabs(globalSum(r[:self.mesh.nNodes_owned].sum()))
logEvent(' Mass Conservation Error: ', level=3, data=self.coefficients.massConservationError)
self.nonlinear_function_evaluations += 1
if (self.globalResidualDummy is None):
self.globalResidualDummy = np.zeros(r.shape, 'd')
def getMassMatrix(self):
if (self.MassMatrix is None):
(rowptr, colind, nzval) = self.jacobian.getCSRrepresentation()
self.MassMatrix_a = nzval.copy()
nnz = nzval.shape[(- 1)]
self.MassMatrix = LinearAlgebraTools.SparseMat(self.nFreeDOF_global[0], self.nFreeDOF_global[0], nnz, self.MassMatrix_a, colind, rowptr)
self.LumpedMassMatrix = np.zeros((rowptr.size - 1), 'd')
else:
self.LumpedMassMatrix.fill(0.0)
cfemIntegrals.zeroJacobian_CSR(self.nNonzerosInJacobian, self.MassMatrix)
argsDict = cArgumentsDict.ArgumentsDict()
argsDict['mesh_trial_ref'] = self.u[0].femSpace.elementMaps.psi
argsDict['mesh_grad_trial_ref'] = self.u[0].femSpace.elementMaps.grad_psi
argsDict['mesh_dof'] = self.mesh.nodeArray
argsDict['mesh_l2g'] = self.mesh.elementNodesArray
argsDict['dV_ref'] = self.elementQuadratureWeights[('u', 0)]
argsDict['u_trial_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_trial_ref'] = self.u[0].femSpace.grad_psi
argsDict['u_test_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_test_ref'] = self.u[0].femSpace.grad_psi
argsDict['mesh_trial_trace_ref'] = self.u[0].femSpace.elementMaps.psi_trace
argsDict['mesh_grad_trial_trace_ref'] = self.u[0].femSpace.elementMaps.grad_psi_trace
argsDict['dS_ref'] = self.elementBoundaryQuadratureWeights[('u', 0)]
argsDict['u_trial_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_trial_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['u_test_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_test_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['normal_ref'] = self.u[0].femSpace.elementMaps.boundaryNormals
argsDict['boundaryJac_ref'] = self.u[0].femSpace.elementMaps.boundaryJacobians
argsDict['nElements_global'] = self.mesh.nElements_global
argsDict['useMetrics'] = self.coefficients.useMetrics
argsDict['epsFactHeaviside'] = self.coefficients.epsFactHeaviside
argsDict['epsFactDirac'] = self.coefficients.epsFactDirac
argsDict['epsFactDiffusion'] = self.coefficients.epsFactDiffusion
argsDict['u_l2g'] = self.u[0].femSpace.dofMap.l2g
argsDict['elementDiameter'] = self.elementDiameter
argsDict['elementBoundaryElementsArray'] = self.mesh.elementBoundaryElementsArray
argsDict['elementBoundariesArray'] = self.mesh.elementBoundariesArray
argsDict['elementBoundaryLocalElementBoundariesArray'] = self.mesh.elementBoundaryLocalElementBoundariesArray
argsDict['nodeDiametersArray'] = self.mesh.nodeDiametersArray
argsDict['u_dof'] = self.u[0].dof
argsDict['q_phi'] = self.coefficients.q_u_ls
argsDict['q_normal_phi'] = self.coefficients.q_n_ls
argsDict['q_H'] = self.coefficients.q_H_vof
argsDict['q_porosity'] = self.coefficients.q_porosity
argsDict['csrRowIndeces_u_u'] = self.csrRowIndeces[(0, 0)]
argsDict['csrColumnOffsets_u_u'] = self.csrColumnOffsets[(0, 0)]
argsDict['globalMassMatrix'] = self.MassMatrix.getCSRrepresentation()[2]
argsDict['globalLumpedMassMatrix'] = self.LumpedMassMatrix
self.mcorr.calculateMassMatrix(argsDict)
def getJacobian(self, jacobian):
cfemIntegrals.zeroJacobian_CSR(self.nNonzerosInJacobian, jacobian)
argsDict = cArgumentsDict.ArgumentsDict()
argsDict['mesh_trial_ref'] = self.u[0].femSpace.elementMaps.psi
argsDict['mesh_grad_trial_ref'] = self.u[0].femSpace.elementMaps.grad_psi
argsDict['mesh_dof'] = self.mesh.nodeArray
argsDict['mesh_l2g'] = self.mesh.elementNodesArray
argsDict['x_ref'] = self.elementQuadraturePoints
argsDict['dV_ref'] = self.elementQuadratureWeights[('u', 0)]
argsDict['u_trial_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_trial_ref'] = self.u[0].femSpace.grad_psi
argsDict['u_test_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_test_ref'] = self.u[0].femSpace.grad_psi
argsDict['mesh_trial_trace_ref'] = self.u[0].femSpace.elementMaps.psi_trace
argsDict['mesh_grad_trial_trace_ref'] = self.u[0].femSpace.elementMaps.grad_psi_trace
argsDict['dS_ref'] = self.elementBoundaryQuadratureWeights[('u', 0)]
argsDict['u_trial_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_trial_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['u_test_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_test_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['normal_ref'] = self.u[0].femSpace.elementMaps.boundaryNormals
argsDict['boundaryJac_ref'] = self.u[0].femSpace.elementMaps.boundaryJacobians
argsDict['nElements_global'] = self.mesh.nElements_global
argsDict['useMetrics'] = self.coefficients.useMetrics
argsDict['epsFactHeaviside'] = self.coefficients.epsFactHeaviside
argsDict['epsFactDirac'] = self.coefficients.epsFactDirac
argsDict['epsFactDiffusion'] = self.coefficients.epsFactDiffusion
argsDict['u_l2g'] = self.u[0].femSpace.dofMap.l2g
argsDict['r_l2g'] = self.l2g[0]['freeGlobal']
argsDict['elementDiameter'] = self.elementDiameter
argsDict['elementBoundaryDiameter'] = self.mesh.elementBoundaryDiametersArray
argsDict['nodeDiametersArray'] = self.mesh.nodeDiametersArray
argsDict['elementBoundaryElementsArray'] = self.mesh.elementBoundaryElementsArray
argsDict['elementBoundariesArray'] = self.mesh.elementBoundariesArray
argsDict['elementBoundaryLocalElementBoundariesArray'] = self.mesh.elementBoundaryLocalElementBoundariesArray
argsDict['u_dof'] = self.u[0].dof
argsDict['phi_dof'] = self.coefficients.lsModel.u[0].dof
argsDict['q_phi'] = self.coefficients.q_u_ls
argsDict['q_normal_phi'] = self.coefficients.q_n_ls
argsDict['q_H'] = self.coefficients.q_H_vof
argsDict['q_porosity'] = self.coefficients.q_porosity
argsDict['csrRowIndeces_u_u'] = self.csrRowIndeces[(0, 0)]
argsDict['csrColumnOffsets_u_u'] = self.csrColumnOffsets[(0, 0)]
argsDict['globalJacobian'] = jacobian.getCSRrepresentation()[2]
argsDict['csrColumnOffsets_eb_u_u'] = self.csrColumnOffsets_eb[(0, 0)]
argsDict['ghost_penalty_constant'] = self.coefficients.flowCoefficients.ghost_penalty_constant
argsDict['phi_solid_nodes'] = self.coefficients.flowCoefficients.phi_s
argsDict['useExact_s'] = int(self.coefficients.flowCoefficients.useExact)
argsDict['isActiveR'] = self.isActiveR
argsDict['isActiveDOF'] = self.isActiveDOF
argsDict['isActiveElement'] = self.isActiveElement
argsDict['ebqe_phi_s'] = self.coefficients.flowCoefficients.ebqe_phi_s
argsDict['phi_solid'] = self.coefficients.flowCoefficients.q_phi_solid
self.mcorr.calculateJacobian(argsDict, self.coefficients.useExact)
for global_dofN_a in np.argwhere((self.isActiveR == 0.0)):
global_dofN = global_dofN_a[0]
for i in range(self.rowptr[global_dofN], self.rowptr[(global_dofN + 1)]):
if (self.colind[i] == global_dofN):
self.nzval[i] = 1.0
else:
self.nzval[i] = 0.0
logEvent('Jacobian ', level=10, data=jacobian)
self.nonlinear_function_jacobian_evaluations += 1
return jacobian
def elementSolve(self, u, r):
import pdb
import copy
r.fill(0.0)
self.setUnknowns(u)
argsDict = cArgumentsDict.ArgumentsDict()
argsDict['mesh_trial_ref'] = self.u[0].femSpace.elementMaps.psi
argsDict['mesh_grad_trial_ref'] = self.u[0].femSpace.elementMaps.grad_psi
argsDict['mesh_dof'] = self.mesh.nodeArray
argsDict['mesh_l2g'] = self.mesh.elementNodesArray
argsDict['dV_ref'] = self.elementQuadratureWeights[('u', 0)]
argsDict['u_trial_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_trial_ref'] = self.u[0].femSpace.grad_psi
argsDict['u_test_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_test_ref'] = self.u[0].femSpace.grad_psi
argsDict['mesh_trial_trace_ref'] = self.u[0].femSpace.elementMaps.psi_trace
argsDict['mesh_grad_trial_trace_ref'] = self.u[0].femSpace.elementMaps.grad_psi_trace
argsDict['dS_ref'] = self.elementBoundaryQuadratureWeights[('u', 0)]
argsDict['u_trial_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_trial_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['u_test_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_test_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['normal_ref'] = self.u[0].femSpace.elementMaps.boundaryNormals
argsDict['boundaryJac_ref'] = self.u[0].femSpace.elementMaps.boundaryJacobians
argsDict['nElements_global'] = self.mesh.nElements_global
argsDict['useMetrics'] = self.coefficients.useMetrics
argsDict['epsFactHeaviside'] = self.coefficients.epsFactHeaviside
argsDict['epsFactDirac'] = self.coefficients.epsFactDirac
argsDict['epsFactDiffusion'] = self.coefficients.epsFactDiffusion
argsDict['u_l2g'] = self.u[0].femSpace.dofMap.l2g
argsDict['elementDiameter'] = self.elementDiameter
argsDict['nodeDiametersArray'] = self.mesh.nodeDiametersArray
argsDict['u_dof'] = self.u[0].dof
argsDict['q_phi'] = self.coefficients.q_u_ls
argsDict['q_normal_phi'] = self.coefficients.q_n_ls
argsDict['ebqe_phi'] = self.coefficients.ebqe_u_ls
argsDict['ebqe_normal_phi'] = self.coefficients.ebqe_n_ls
argsDict['q_H'] = self.coefficients.q_H_vof
argsDict['q_u'] = self.q[('u', 0)]
argsDict['q_n'] = self.q[('grad(u)', 0)]
argsDict['ebqe_u'] = self.ebqe[('u', 0)]
argsDict['ebqe_n'] = self.ebqe[('grad(u)', 0)]
argsDict['q_r'] = self.q[('r', 0)]
argsDict['q_porosity'] = self.coefficients.q_porosity
argsDict['offset_u'] = self.offset[0]
argsDict['stride_u'] = self.stride[0]
argsDict['globalResidual'] = r
argsDict['nExteriorElementBoundaries_global'] = self.mesh.nExteriorElementBoundaries_global
argsDict['exteriorElementBoundariesArray'] = self.mesh.exteriorElementBoundariesArray
argsDict['elementBoundaryElementsArray'] = self.mesh.elementBoundaryElementsArray
argsDict['elementBoundaryLocalElementBoundariesArray'] = self.mesh.elementBoundaryLocalElementBoundariesArray
argsDict['maxIts'] = self.maxIts
argsDict['atol'] = self.atol
self.mcorr.elementSolve(argsDict)
def elementConstantSolve(self, u, r):
import pdb
import copy
r.fill(0.0)
self.setUnknowns(u)
argsDict = cArgumentsDict.ArgumentsDict()
argsDict['mesh_trial_ref'] = self.u[0].femSpace.elementMaps.psi
argsDict['mesh_grad_trial_ref'] = self.u[0].femSpace.elementMaps.grad_psi
argsDict['mesh_dof'] = self.mesh.nodeArray
argsDict['mesh_l2g'] = self.mesh.elementNodesArray
argsDict['dV_ref'] = self.elementQuadratureWeights[('u', 0)]
argsDict['u_trial_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_trial_ref'] = self.u[0].femSpace.grad_psi
argsDict['u_test_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_test_ref'] = self.u[0].femSpace.grad_psi
argsDict['mesh_trial_trace_ref'] = self.u[0].femSpace.elementMaps.psi_trace
argsDict['mesh_grad_trial_trace_ref'] = self.u[0].femSpace.elementMaps.grad_psi_trace
argsDict['dS_ref'] = self.elementBoundaryQuadratureWeights[('u', 0)]
argsDict['u_trial_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_trial_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['u_test_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_test_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['normal_ref'] = self.u[0].femSpace.elementMaps.boundaryNormals
argsDict['boundaryJac_ref'] = self.u[0].femSpace.elementMaps.boundaryJacobians
argsDict['nElements_global'] = self.mesh.nElements_global
argsDict['useMetrics'] = self.coefficients.useMetrics
argsDict['epsFactHeaviside'] = self.coefficients.epsFactHeaviside
argsDict['epsFactDirac'] = self.coefficients.epsFactDirac
argsDict['epsFactDiffusion'] = self.coefficients.epsFactDiffusion
argsDict['u_l2g'] = self.u[0].femSpace.dofMap.l2g
argsDict['elementDiameter'] = self.elementDiameter
argsDict['nodeDiametersArray'] = self.mesh.nodeDiametersArray
argsDict['u_dof'] = self.u[0].dof
argsDict['q_phi'] = self.coefficients.q_u_ls
argsDict['q_normal_phi'] = self.coefficients.q_n_ls
argsDict['ebqe_phi'] = self.coefficients.ebqe_u_ls
argsDict['ebqe_normal_phi'] = self.coefficients.ebqe_n_ls
argsDict['q_H'] = self.coefficients.q_H_vof
argsDict['q_u'] = self.q[('u', 0)]
argsDict['q_n'] = self.q[('grad(u)', 0)]
argsDict['ebqe_u'] = self.ebqe[('u', 0)]
argsDict['ebqe_n'] = self.ebqe[('grad(u)', 0)]
argsDict['q_r'] = self.q[('r', 0)]
argsDict['q_porosity'] = self.coefficients.q_porosity
argsDict['offset_u'] = self.offset[0]
argsDict['stride_u'] = self.stride[0]
argsDict['globalResidual'] = r
argsDict['nExteriorElementBoundaries_global'] = self.mesh.nExteriorElementBoundaries_global
argsDict['exteriorElementBoundariesArray'] = self.mesh.exteriorElementBoundariesArray
argsDict['elementBoundaryElementsArray'] = self.mesh.elementBoundaryElementsArray
argsDict['elementBoundaryLocalElementBoundariesArray'] = self.mesh.elementBoundaryLocalElementBoundariesArray
argsDict['maxIts'] = self.maxIts
argsDict['atol'] = self.atol
self.mcorr.elementConstantSolve(argsDict)
def globalConstantRJ(self, u, r, U):
import pdb
import copy
r.fill(0.0)
self.setUnknowns(u)
argsDict = cArgumentsDict.ArgumentsDict()
argsDict['mesh_trial_ref'] = self.u[0].femSpace.elementMaps.psi
argsDict['mesh_grad_trial_ref'] = self.u[0].femSpace.elementMaps.grad_psi
argsDict['mesh_dof'] = self.mesh.nodeArray
argsDict['mesh_l2g'] = self.mesh.elementNodesArray
argsDict['dV_ref'] = self.elementQuadratureWeights[('u', 0)]
argsDict['u_trial_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_trial_ref'] = self.u[0].femSpace.grad_psi
argsDict['u_test_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_test_ref'] = self.u[0].femSpace.grad_psi
argsDict['mesh_trial_trace_ref'] = self.u[0].femSpace.elementMaps.psi_trace
argsDict['mesh_grad_trial_trace_ref'] = self.u[0].femSpace.elementMaps.grad_psi_trace
argsDict['dS_ref'] = self.elementBoundaryQuadratureWeights[('u', 0)]
argsDict['u_trial_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_trial_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['u_test_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_test_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['normal_ref'] = self.u[0].femSpace.elementMaps.boundaryNormals
argsDict['boundaryJac_ref'] = self.u[0].femSpace.elementMaps.boundaryJacobians
argsDict['nElements_owned'] = self.mesh.nElements_owned
argsDict['useMetrics'] = self.coefficients.useMetrics
argsDict['epsFactHeaviside'] = self.coefficients.epsFactHeaviside
argsDict['epsFactDirac'] = self.coefficients.epsFactDirac
argsDict['epsFactDiffusion'] = self.coefficients.epsFactDiffusion
argsDict['u_l2g'] = self.u[0].femSpace.dofMap.l2g
argsDict['elementDiameter'] = self.elementDiameter
argsDict['nodeDiametersArray'] = self.mesh.nodeDiametersArray
argsDict['u_dof'] = self.u[0].dof
argsDict['q_phi'] = self.coefficients.q_u_ls
argsDict['q_normal_phi'] = self.coefficients.q_n_ls
argsDict['ebqe_phi'] = self.coefficients.ebqe_u_ls
argsDict['ebqe_normal_phi'] = self.coefficients.ebqe_n_ls
argsDict['q_H'] = self.coefficients.q_H_vof
argsDict['q_u'] = self.q[('u', 0)]
argsDict['q_n'] = self.q[('grad(u)', 0)]
argsDict['ebqe_u'] = self.ebqe[('u', 0)]
argsDict['ebqe_n'] = self.ebqe[('grad(u)', 0)]
argsDict['q_r'] = self.q[('r', 0)]
argsDict['q_porosity'] = self.offset[0]
argsDict['offset_u'] = self.stride[0]
argsDict['stride_u'] = r
argsDict['globalResidual'] = self.coefficients.q_porosity
argsDict['nExteriorElementBoundaries_global'] = self.mesh.nExteriorElementBoundaries_global
argsDict['exteriorElementBoundariesArray'] = self.mesh.exteriorElementBoundariesArray
argsDict['elementBoundaryElementsArray'] = self.mesh.elementBoundaryElementsArray
argsDict['elementBoundaryLocalElementBoundariesArray'] = self.mesh.elementBoundaryLocalElementBoundariesArray
argsDict['maxIts'] = self.maxIts
argsDict['atol'] = self.atol
argsDict['constant_u'] = U
(R, J) = self.mcorr.globalConstantRJ(argsDict)
R = globalSum(R)
J = globalSum(J)
self.coefficients.massConservationError = fabs(R)
return (R, J)
def globalConstantSolve(self, u, r):
U = 0.0
R = 0.0
J = 0.0
(R, J) = self.globalConstantRJ(u, r, U)
its = 0
logEvent(' Mass Conservation Residual 0 ', level=3, data=R)
RNORM_OLD = fabs(R)
while (((fabs(R) > self.atol) and (its < self.maxIts)) or (its < 1)):
U -= old_div(R, (J + 1e-08))
(R, J) = self.globalConstantRJ(u, r, U)
lsits = 0
while ((fabs(R) > (0.99 * RNORM_OLD)) and (lsits < self.maxLSits)):
lsits += 1
U += ((0.5 ** lsits) * old_div(R, (J + 1e-08)))
(R, J) = self.globalConstantRJ(u, r, U)
its += 1
logEvent(((' Mass Conservation Residual ' + repr(its)) + ' '), level=3, data=R)
self.u[0].dof.flat[:] = U
def calculateElementQuadrature(self):
self.u[0].femSpace.elementMaps.getBasisValuesRef(self.elementQuadraturePoints)
self.u[0].femSpace.elementMaps.getBasisGradientValuesRef(self.elementQuadraturePoints)
self.u[0].femSpace.getBasisValuesRef(self.elementQuadraturePoints)
self.u[0].femSpace.getBasisGradientValuesRef(self.elementQuadraturePoints)
self.coefficients.initializeElementQuadrature(self.timeIntegration.t, self.q)
if (self.stabilization is not None):
self.stabilization.initializeElementQuadrature(self.mesh, self.timeIntegration.t, self.q)
self.stabilization.initializeTimeIntegration(self.timeIntegration)
if (self.shockCapturing is not None):
self.shockCapturing.initializeElementQuadrature(self.mesh, self.timeIntegration.t, self.q)
def calculateElementBoundaryQuadrature(self):
pass
def calculateExteriorElementBoundaryQuadrature(self):
self.u[0].femSpace.elementMaps.getBasisValuesTraceRef(self.elementBoundaryQuadraturePoints)
self.u[0].femSpace.elementMaps.getBasisGradientValuesTraceRef(self.elementBoundaryQuadraturePoints)
self.u[0].femSpace.getBasisValuesTraceRef(self.elementBoundaryQuadraturePoints)
self.u[0].femSpace.getBasisGradientValuesTraceRef(self.elementBoundaryQuadraturePoints)
def estimate_mt(self):
pass
def calculateAuxiliaryQuantitiesAfterStep(self):
pass
def calculateMass(self, q_phi):
argsDict = cArgumentsDict.ArgumentsDict()
argsDict['mesh_trial_ref'] = self.u[0].femSpace.elementMaps.psi
argsDict['mesh_grad_trial_ref'] = self.u[0].femSpace.elementMaps.grad_psi
argsDict['mesh_dof'] = self.mesh.nodeArray
argsDict['mesh_l2g'] = self.mesh.elementNodesArray
argsDict['x_ref'] = self.elementQuadraturePoints
argsDict['dV_ref'] = self.elementQuadratureWeights[('u', 0)]
argsDict['u_trial_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_trial_ref'] = self.u[0].femSpace.grad_psi
argsDict['u_test_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_test_ref'] = self.u[0].femSpace.grad_psi
argsDict['mesh_trial_trace_ref'] = self.u[0].femSpace.elementMaps.psi_trace
argsDict['mesh_grad_trial_trace_ref'] = self.u[0].femSpace.elementMaps.grad_psi_trace
argsDict['dS_ref'] = self.elementBoundaryQuadratureWeights[('u', 0)]
argsDict['u_trial_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_trial_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['u_test_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_test_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['normal_ref'] = self.u[0].femSpace.elementMaps.boundaryNormals
argsDict['boundaryJac_ref'] = self.u[0].femSpace.elementMaps.boundaryJacobians
argsDict['nElements_owned'] = self.mesh.nElements_owned
argsDict['useMetrics'] = self.coefficients.useMetrics
argsDict['epsFactHeaviside'] = self.coefficients.epsFactHeaviside
argsDict['epsFactDirac'] = self.coefficients.epsFactDirac
argsDict['epsFactDiffusion'] = self.coefficients.epsFactDiffusion
argsDict['u_l2g'] = self.u[0].femSpace.dofMap.l2g
argsDict['elementDiameter'] = self.elementDiameter
argsDict['nodeDiametersArray'] = self.mesh.nodeDiametersArray
argsDict['u_dof'] = self.u[0].dof
argsDict['phi_dof'] = self.coefficients.lsModel.u[0].dof
argsDict['q_phi'] = self.coefficients.q_u_ls
argsDict['q_normal_phi'] = self.coefficients.q_n_ls
argsDict['ebqe_phi'] = self.coefficients.ebqe_u_ls
argsDict['ebqe_normal_phi'] = self.coefficients.ebqe_n_ls
argsDict['q_H'] = self.coefficients.q_H_vof
argsDict['q_u'] = self.q[('u', 0)]
argsDict['q_n'] = self.q[('grad(u)', 0)]
argsDict['ebqe_u'] = self.ebqe[('u', 0)]
argsDict['ebqe_n'] = self.ebqe[('grad(u)', 0)]
argsDict['q_r'] = self.q[('r', 0)]
argsDict['q_porosity'] = self.coefficients.q_porosity
argsDict['offset_u'] = self.offset[0]
argsDict['stride_u'] = self.stride[0]
argsDict['globalResidual'] = self.u[0].dof
argsDict['nExteriorElementBoundaries_global'] = self.mesh.nExteriorElementBoundaries_global
argsDict['exteriorElementBoundariesArray'] = self.mesh.exteriorElementBoundariesArray
argsDict['elementBoundaryElementsArray'] = self.mesh.elementBoundaryElementsArray
argsDict['elementBoundaryLocalElementBoundariesArray'] = self.mesh.elementBoundaryLocalElementBoundariesArray
argsDict['phi_solid_nodes'] = self.coefficients.flowCoefficients.phi_s
argsDict['useExact_s'] = int(self.coefficients.flowCoefficients.useExact)
argsDict['phi_solid'] = self.coefficients.flowCoefficients.q_phi_solid
return globalSum(self.mcorr.calculateMass(argsDict, self.coefficients.useExact))
def setMassQuadratureEdgeBasedStabilizationMethods(self):
if (self.rhs_mass_correction is None):
self.rhs_mass_correction = np.zeros(self.coefficients.vofModel.u[0].dof.shape, 'd')
self.lumped_L2p_vof_mass_correction = np.zeros(self.coefficients.vofModel.u[0].dof.shape, 'd')
self.L2p_vof_mass_correction = np.zeros(self.coefficients.vofModel.u[0].dof.shape, 'd')
else:
self.rhs_mass_correction.fill(0.0)
argsDict = cArgumentsDict.ArgumentsDict()
argsDict['mesh_trial_ref'] = self.u[0].femSpace.elementMaps.psi
argsDict['mesh_grad_trial_ref'] = self.u[0].femSpace.elementMaps.grad_psi
argsDict['mesh_dof'] = self.mesh.nodeArray
argsDict['mesh_l2g'] = self.mesh.elementNodesArray
argsDict['x_ref'] = self.elementQuadraturePoints
argsDict['dV_ref'] = self.elementQuadratureWeights[('u', 0)]
argsDict['u_trial_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_trial_ref'] = self.u[0].femSpace.grad_psi
argsDict['u_test_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_test_ref'] = self.u[0].femSpace.grad_psi
argsDict['mesh_trial_trace_ref'] = self.u[0].femSpace.elementMaps.psi_trace
argsDict['mesh_grad_trial_trace_ref'] = self.u[0].femSpace.elementMaps.grad_psi_trace
argsDict['dS_ref'] = self.elementBoundaryQuadratureWeights[('u', 0)]
argsDict['u_trial_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_trial_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['u_test_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_test_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['normal_ref'] = self.u[0].femSpace.elementMaps.boundaryNormals
argsDict['boundaryJac_ref'] = self.u[0].femSpace.elementMaps.boundaryJacobians
argsDict['nElements_global'] = self.mesh.nElements_global
argsDict['useMetrics'] = self.coefficients.useMetrics
argsDict['epsFactHeaviside'] = self.coefficients.epsFactHeaviside
argsDict['epsFactDirac'] = self.coefficients.epsFactDirac
argsDict['epsFactDiffusion'] = self.coefficients.epsFactDiffusion
argsDict['phi_l2g'] = self.coefficients.lsModel.u[0].femSpace.dofMap.l2g
argsDict['elementDiameter'] = self.elementDiameter
argsDict['nodeDiametersArray'] = self.mesh.nodeDiametersArray
argsDict['phi_dof'] = self.coefficients.lsModel.u[0].dof
argsDict['q_phi'] = self.coefficients.q_u_ls
argsDict['q_normal_phi'] = self.coefficients.q_n_ls
argsDict['ebqe_phi'] = self.coefficients.ebqe_u_ls
argsDict['ebqe_normal_phi'] = self.coefficients.ebqe_n_ls
argsDict['q_H'] = self.coefficients.q_H_vof
argsDict['q_u'] = self.q[('u', 0)]
argsDict['q_n'] = self.q[('grad(u)', 0)]
argsDict['ebqe_u'] = self.ebqe[('u', 0)]
argsDict['ebqe_n'] = self.ebqe[('grad(u)', 0)]
argsDict['q_r'] = self.q[('r', 0)]
argsDict['q_porosity'] = self.coefficients.q_porosity
argsDict['offset_u'] = self.offset[0]
argsDict['stride_u'] = self.stride[0]
argsDict['globalResidual'] = self.u[0].dof
argsDict['nExteriorElementBoundaries_global'] = self.mesh.nExteriorElementBoundaries_global
argsDict['exteriorElementBoundariesArray'] = self.mesh.exteriorElementBoundariesArray
argsDict['elementBoundaryElementsArray'] = self.mesh.elementBoundaryElementsArray
argsDict['elementBoundaryLocalElementBoundariesArray'] = self.mesh.elementBoundaryLocalElementBoundariesArray
argsDict['rhs_mass_correction'] = self.rhs_mass_correction
argsDict['lumped_L2p_vof_mass_correction'] = self.lumped_L2p_vof_mass_correction
argsDict['lumped_mass_matrix'] = self.LumpedMassMatrix
argsDict['numDOFs'] = self.lumped_L2p_vof_mass_correction.size
self.mcorr.setMassQuadratureEdgeBasedStabilizationMethods(argsDict, self.coefficients.useExact)
def setMassQuadrature(self):
argsDict = cArgumentsDict.ArgumentsDict()
argsDict['mesh_trial_ref'] = self.u[0].femSpace.elementMaps.psi
argsDict['mesh_grad_trial_ref'] = self.u[0].femSpace.elementMaps.grad_psi
argsDict['mesh_dof'] = self.mesh.nodeArray
argsDict['mesh_l2g'] = self.mesh.elementNodesArray
argsDict['x_ref'] = self.elementQuadraturePoints
argsDict['dV_ref'] = self.elementQuadratureWeights[('u', 0)]
argsDict['u_trial_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_trial_ref'] = self.u[0].femSpace.grad_psi
argsDict['u_test_ref'] = self.u[0].femSpace.psi
argsDict['u_grad_test_ref'] = self.u[0].femSpace.grad_psi
argsDict['mesh_trial_trace_ref'] = self.u[0].femSpace.elementMaps.psi_trace
argsDict['mesh_grad_trial_trace_ref'] = self.u[0].femSpace.elementMaps.grad_psi_trace
argsDict['dS_ref'] = self.elementBoundaryQuadratureWeights[('u', 0)]
argsDict['u_trial_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_trial_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['u_test_trace_ref'] = self.u[0].femSpace.psi_trace
argsDict['u_grad_test_trace_ref'] = self.u[0].femSpace.grad_psi_trace
argsDict['normal_ref'] = self.u[0].femSpace.elementMaps.boundaryNormals
argsDict['boundaryJac_ref'] = self.u[0].femSpace.elementMaps.boundaryJacobians
argsDict['nElements_global'] = self.mesh.nElements_global
argsDict['useMetrics'] = self.coefficients.useMetrics
argsDict['epsFactHeaviside'] = self.coefficients.epsFactHeaviside
argsDict['epsFactDirac'] = self.coefficients.epsFactDirac
argsDict['epsFactDiffusion'] = self.coefficients.epsFactDiffusion
argsDict['phi_l2g'] = self.coefficients.lsModel.u[0].femSpace.dofMap.l2g
argsDict['elementDiameter'] = self.elementDiameter
argsDict['nodeDiametersArray'] = self.mesh.nodeDiametersArray
argsDict['phi_dof'] = self.coefficients.lsModel.u[0].dof
argsDict['q_phi'] = self.coefficients.q_u_ls
argsDict['q_normal_phi'] = self.coefficients.q_n_ls
argsDict['ebqe_phi'] = self.coefficients.ebqe_u_ls
argsDict['ebqe_normal_phi'] = self.coefficients.ebqe_n_ls
argsDict['q_H'] = self.coefficients.q_H_vof
argsDict['q_u'] = self.q[('u', 0)]
argsDict['q_n'] = self.q[('grad(u)', 0)]
argsDict['ebqe_u'] = self.ebqe[('u', 0)]
argsDict['ebqe_n'] = self.ebqe[('grad(u)', 0)]
argsDict['q_r'] = self.q[('r', 0)]
argsDict['q_porosity'] = self.coefficients.q_porosity
argsDict['offset_u'] = self.offset[0]
argsDict['stride_u'] = self.stride[0]
argsDict['globalResidual'] = self.u[0].dof
argsDict['nExteriorElementBoundaries_global'] = self.mesh.nExteriorElementBoundaries_global
argsDict['exteriorElementBoundariesArray'] = self.mesh.exteriorElementBoundariesArray
argsDict['elementBoundaryElementsArray'] = self.mesh.elementBoundaryElementsArray
argsDict['elementBoundaryLocalElementBoundariesArray'] = self.mesh.elementBoundaryLocalElementBoundariesArray
argsDict['H_dof'] = self.coefficients.vofModel.u[0].dof
self.mcorr.setMassQuadrature(argsDict, self.coefficients.useExact)
def calculateSolutionAtQuadrature(self):
pass
def updateAfterMeshMotion(self):
pass |
def test_value():
enum = EnumValue(value=5)
assert (enum.value == 5)
enum = EnumValue(value='0x5')
assert (enum.value == 5)
enum.value = 3
assert (enum.value == 3)
enum.value = '0x6'
assert (enum.value == 6)
with pytest.raises(ValueError):
enum.value = 'zz'
enum.value = (- 5)
with pytest.raises(AssertionError):
enum.validate() |
def _save_item_locally(ctx: Context, item_type: str, item_id: PublicId) -> None:
item_type_plural = (item_type + 's')
try:
source_path = try_get_item_source_path(ctx.cwd, None, item_type_plural, item_id.name)
except ClickException:
source_path = try_get_item_source_path(os.path.join(ctx.cwd, 'vendor'), item_id.author, item_type_plural, item_id.name)
check_package_public_id(source_path, item_type, item_id)
try:
registry_path = ctx.registry_path
except ValueError as e:
raise click.ClickException(str(e))
target_path = try_get_item_target_path(registry_path, item_id.author, item_type_plural, item_id.name)
copytree(source_path, target_path)
click.echo(f'{item_type.title()} "{item_id}" successfully saved in packages folder.') |
class KMSRuleBook(bre.BaseRuleBook):
supported_resource_types = frozenset(['organization'])
def __init__(self, rule_defs=None):
super(KMSRuleBook, self).__init__()
self._lock = threading.Lock()
self.resource_rules_map = {}
if (not rule_defs):
self.rule_defs = {}
else:
self.rule_defs = rule_defs
self.add_rules(rule_defs)
def __eq__(self, other):
if (not isinstance(other, type(self))):
return NotImplemented
return (self.resource_rules_map == other.resource_rules_map)
def __ne__(self, other):
return (not (self == other))
def __repr__(self):
return 'KMSRuleBook <{}>'.format(self.resource_rules_map)
def add_rules(self, rule_defs):
for (i, rule) in enumerate(rule_defs.get('rules', [])):
self.add_rule(rule, i)
def add_rule(self, rule_def, rule_index):
resources = rule_def.get('resource')
mode = rule_def.get('mode')
key = rule_def.get('key')
if ((not resources) or (key is None) or (mode not in RULE_MODES)):
raise audit_errors.InvalidRulesSchemaError('Faulty rule {}'.format(rule_index))
for resource in resources:
resource_type = resource.get('type')
resource_ids = resource.get('resource_ids')
if (resource_type not in self.supported_resource_types):
raise audit_errors.InvalidRulesSchemaError('Invalid resource type in rule {}'.format(rule_index))
if ((not resource_ids) or (len(resource_ids) < 1)):
raise audit_errors.InvalidRulesSchemaError('Missing resource ids in rule {}'.format(rule_index))
for resource_id in resource_ids:
gcp_resource = resource_util.create_resource(resource_id=resource_id, resource_type=resource_type)
rule_def_resource = {'key': key, 'mode': mode}
rule = Rule(rule_name=rule_def.get('name'), rule_index=rule_index, rule=rule_def_resource)
resource_rules = self.resource_rules_map.setdefault(gcp_resource, ResourceRules(resource=gcp_resource))
if (not resource_rules):
self.resource_rules_map[rule_index] = rule
if (rule not in resource_rules.rules):
resource_rules.rules.add(rule)
def get_resource_rules(self, resource):
return self.resource_rules_map.get(resource)
def find_violations(self, key):
LOGGER.debug('Looking for crypto key violations: %s', key.name)
violations = []
resource_ancestors = resource_util.get_ancestors_from_full_name(key.crypto_key_full_name)
LOGGER.debug('Ancestors of resource: %r', resource_ancestors)
checked_wildcards = set()
for curr_resource in resource_ancestors:
if (not curr_resource):
continue
resource_rule = self.get_resource_rules(curr_resource)
if resource_rule:
violations.extend(resource_rule.find_violations(key))
wildcard_resource = resource_util.create_resource(resource_id='*', resource_type=curr_resource.type)
if (wildcard_resource in checked_wildcards):
continue
checked_wildcards.add(wildcard_resource)
resource_rule = self.get_resource_rules(wildcard_resource)
if resource_rule:
violations.extend(resource_rule.find_violations(key))
LOGGER.debug('Returning violations: %r', violations)
return violations |
def cone_actor(center=(0, 0, 0), height=1.0, radius=0.5, direction=(1, 0, 0), resolution=100, color=colors.red, opacity=1.0):
source = tvtk.ConeSource(center=center, height=height, radius=radius, direction=direction, resolution=resolution)
mapper = tvtk.PolyDataMapper()
configure_input_data(mapper, source.output)
p = tvtk.Property(opacity=opacity, color=color)
actor = tvtk.Actor(mapper=mapper, property=p)
source.update()
return actor |
class FaucetDelPortTest(FaucetConfigReloadTestBase):
CONFIG_GLOBAL = '\nvlans:\n 100:\n description: "untagged"\n 200:\n description: "untagged"\n'
CONFIG = '\n interfaces:\n %(port_1)d:\n native_vlan: 100\n acl_in: allow\n %(port_2)d:\n native_vlan: 100\n %(port_3)d:\n native_vlan: 100\n %(port_4)d:\n native_vlan: 200\n'
def test_port_down_flow_gone(self):
last_host = self.hosts_name_ordered()[(- 1)]
self.require_host_learned(last_host)
second_host_dst_match = {'eth_dst': last_host.MAC()}
self.wait_until_matching_flow(second_host_dst_match, table_id=self._ETH_DST_TABLE)
self.change_port_config(self.port_map['port_4'], None, None, restart=True, cold_start=None)
self.wait_until_no_matching_flow(second_host_dst_match, table_id=self._ETH_DST_TABLE) |
def extractLalalylyWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
titlemap = [('[CEO] Chapter', 'CEO Above, Me Below', 'translated')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class DashboardPanelCounter(DashboardPanel):
agg: CounterAgg
value: Optional[PanelValue] = None
text: Optional[str] = None
_panel_id
def build(self, data_storage: 'DataStorage', project_id: ProjectID, timestamp_start: Optional[datetime.datetime], timestamp_end: Optional[datetime.datetime]):
if (self.agg == CounterAgg.NONE):
return counter(counters=[CounterData(self.title, (self.text or ''))], size=self.size)
if (self.value is None):
raise ValueError('Counters with agg should have value')
points = data_storage.load_points(project_id, self.filter, [self.value], timestamp_start, timestamp_end)[0]
value = self._get_counter_value(points)
if (int(value) != value):
ct = CounterData.float((self.text or ''), value, 3)
else:
ct = CounterData.int((self.text or ''), int(value))
return counter(title=self.title, counters=[ct], size=self.size)
def _get_counter_value(self, points: Dict[(Metric, List[Tuple[(datetime.datetime, Any)]])]) -> float:
if (self.value is None):
raise ValueError('Counters with agg should have value')
if (self.agg == CounterAgg.LAST):
if (len(points) == 0):
return 0
return max(((ts, v) for vs in points.values() for (ts, v) in vs), key=(lambda x: x[0]))[1]
if (self.agg == CounterAgg.SUM):
return sum(((v or 0) for vs in points.values() for (ts, v) in vs))
raise ValueError(f'Unknown agg type {self.agg}') |
class BroadcastingContainer():
def __init__(self):
self._policy_version_counter = 0
self._pickled_policy_state_dict = None
self._stop_flag = False
self._aux_data = None
def stop_flag(self) -> bool:
return self._stop_flag
def set_stop_flag(self) -> NoReturn:
self._stop_flag = True
def policy_version(self) -> int:
return self._policy_version_counter
def policy_state_dict(self) -> Dict:
return cloudpickle.loads(self._pickled_policy_state_dict)
def set_policy_state_dict(self, state_dict: Dict, aux_data: Dict=None) -> NoReturn:
self._pickled_policy_state_dict = cloudpickle.dumps(state_dict)
self._policy_version_counter += 1
self._aux_data = aux_data
def aux_data(self) -> Optional[Dict]:
return self._aux_data |
class CloudWatchLogService(LogService):
def __init__(self, log_group: str, region: str='us-west-1', access_key_id: Optional[str]=None, access_key_data: Optional[str]=None, config: Optional[Dict[(str, Any)]]=None) -> None:
self.cloudwatch_gateway = CloudWatchGateway(region, access_key_id, access_key_data, config)
self.log_group = log_group
def fetch(self, log_path: str, start_time: int=0) -> List[LogEvent]:
return self.cloudwatch_gateway.get_log_events(self.log_group, log_path, start_time)
def get_log_path(self, container_instance: ContainerInstance) -> str:
return ((self.log_group[1:] + '/') + container_instance.instance_id.split('/')[(- 1)]) |
_meter_band_type(ofproto.OFPMBT_DSCP_REMARK, ofproto.OFP_METER_BAND_DSCP_REMARK_SIZE)
class OFPMeterBandDscpRemark(OFPMeterBandHeader):
def __init__(self, rate=0, burst_size=0, prec_level=0, type_=None, len_=None):
super(OFPMeterBandDscpRemark, self).__init__()
self.rate = rate
self.burst_size = burst_size
self.prec_level = prec_level
def serialize(self, buf, offset):
msg_pack_into(ofproto.OFP_METER_BAND_DSCP_REMARK_PACK_STR, buf, offset, self.type, self.len, self.rate, self.burst_size, self.prec_level)
def parser(cls, buf, offset):
(type_, len_, rate, burst_size, prec_level) = struct.unpack_from(ofproto.OFP_METER_BAND_DSCP_REMARK_PACK_STR, buf, offset)
assert (cls.cls_meter_band_type == type_)
assert (cls.cls_meter_band_len == len_)
return cls(rate, burst_size, prec_level) |
class ChainedTransforms(TestCase, Common, Edges):
def setUp(self):
super().setUp()
self.seq = nutils.transformseq.ChainedTransforms((nutils.transformseq.PlainTransforms(((x1, i10), (x1, i11)), 1, 1), nutils.transformseq.PlainTransforms(((x1, i12), (x1, i13)), 1, 1)))
self.check = ((x1, i10), (x1, i11), (x1, i12), (x1, i13))
self.checkmissing = ((l1, i10), (x1, i14), (r1, i10))
self.checkrefs = References.uniform(line, 4)
self.checktodims = 1
self.checkfromdims = 1 |
class OptionPlotoptionsVennSonificationTracksMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Select(Block):
def __init__(self, file_ast: FortranAST, line_number: int, name: str, select_info):
super().__init__(file_ast, line_number, name)
self.select_type = select_info.type
self.binding_name = None
self.bound_var = None
self.binding_type = None
if (self.select_type == 2):
binding_split = select_info.binding.split('=>')
if (len(binding_split) == 1):
self.bound_var = binding_split[0].strip()
elif (len(binding_split) == 2):
self.binding_name = binding_split[0].strip()
self.bound_var = binding_split[1].strip()
elif (self.select_type == 3):
self.binding_type = select_info.binding
if ((file_ast.current_scope is not None) and (file_ast.current_scope.get_type() == SELECT_TYPE_ID) and file_ast.current_scope.is_type_region()):
file_ast.end_scope(line_number)
def get_type(self, no_link=False):
return SELECT_TYPE_ID
def get_desc(self):
return 'SELECT'
def is_type_binding(self):
return (self.select_type == 2)
def is_type_region(self):
return (self.select_type in [3, 4])
def create_binding_variable(self, file_ast, line_number, var_desc, case_type):
if (self.parent.get_type() != SELECT_TYPE_ID):
return None
binding_name = None
bound_var = None
if ((self.parent is not None) and self.parent.is_type_binding()):
binding_name = self.parent.binding_name
bound_var = self.parent.bound_var
if ((binding_name is not None) and (case_type != 4)):
bound_var = None
if (binding_name is not None):
return Variable(file_ast, line_number, binding_name, var_desc, [], link_obj=bound_var)
elif (bound_var is not None):
return Variable(file_ast, line_number, bound_var, var_desc, [])
return None |
def get_snapshot(client, repository=None, snapshot=''):
if (not repository):
raise MissingArgument('No value for "repository" provided')
snapname = ('*' if (snapshot == '') else snapshot)
try:
return client.snapshot.get(repository=repository, snapshot=snapshot)
except (es8exc.TransportError, es8exc.NotFoundError) as err:
msg = f'Unable to get information about snapshot {snapname} from repository: {repository}. Error: {err}'
raise FailedExecution(msg) from err |
class OptionSeriesAreaMarkerStatesSelect(Options):
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get('#cccccc')
def fillColor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('#000000')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(2)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(None)
def radius(self, num: float):
self._config(num, js_type=False) |
def test_extract_features_from_different_timezones():
df = pd.DataFrame()
df['time'] = pd.concat([pd.Series(pd.date_range(start='2014-08-01 09:00', freq='H', periods=3, tz='Europe/Berlin')), pd.Series(pd.date_range(start='2014-08-01 09:00', freq='H', periods=3, tz='US/Central'))], axis=0)
df.reset_index(inplace=True, drop=True)
transformer = DatetimeFeatures(variables='time', features_to_extract=['hour'], utc=True)
X = transformer.fit_transform(df)
pd.testing.assert_frame_equal(X, pd.DataFrame({'time_hour': [7, 8, 9, 14, 15, 16]}), check_dtype=False)
exp_err_msg = 'Tz-aware datetime.datetime cannot be converted to datetime64 unless utc=True, at position 3'
with pytest.raises(ValueError) as errinfo:
assert DatetimeFeatures(variables='time', features_to_extract=['hour'], utc=False).fit_transform(df)
assert (str(errinfo.value) == exp_err_msg) |
class OptionSeriesPackedbubbleSonificationContexttracks(Options):
def activeWhen(self) -> 'OptionSeriesPackedbubbleSonificationContexttracksActivewhen':
return self._config_sub_data('activeWhen', OptionSeriesPackedbubbleSonificationContexttracksActivewhen)
def instrument(self):
return self._config_get('piano')
def instrument(self, text: str):
self._config(text, js_type=False)
def mapping(self) -> 'OptionSeriesPackedbubbleSonificationContexttracksMapping':
return self._config_sub_data('mapping', OptionSeriesPackedbubbleSonificationContexttracksMapping)
def midiName(self):
return self._config_get(None)
def midiName(self, text: str):
self._config(text, js_type=False)
def pointGrouping(self) -> 'OptionSeriesPackedbubbleSonificationContexttracksPointgrouping':
return self._config_sub_data('pointGrouping', OptionSeriesPackedbubbleSonificationContexttracksPointgrouping)
def roundToMusicalNotes(self):
return self._config_get(True)
def roundToMusicalNotes(self, flag: bool):
self._config(flag, js_type=False)
def showPlayMarker(self):
return self._config_get(True)
def showPlayMarker(self, flag: bool):
self._config(flag, js_type=False)
def timeInterval(self):
return self._config_get(None)
def timeInterval(self, num: float):
self._config(num, js_type=False)
def type(self):
return self._config_get('instrument')
def type(self, text: str):
self._config(text, js_type=False)
def valueInterval(self):
return self._config_get(None)
def valueInterval(self, num: float):
self._config(num, js_type=False)
def valueMapFunction(self):
return self._config_get('linear')
def valueMapFunction(self, value: Any):
self._config(value, js_type=False)
def valueProp(self):
return self._config_get('"x"')
def valueProp(self, text: str):
self._config(text, js_type=False) |
class OptionTitle(Options):
def align(self):
return self._config_get()
def align(self, position: str):
self._config(position)
def floating(self):
return self._config_get()
def floating(self, flag: bool):
self._config(flag)
def text(self):
return self._config_get()
def text(self, val: str):
self._config(val)
def offsetX(self):
return self._config_get()
def offsetX(self, num: int):
self._config(num)
def style(self) -> OptionStyle:
return self._config_sub_data('style', OptionStyle) |
def prune(path, days, dry_run=False, stdout=False):
path = os.path.normpath(path)
too_old = (datetime.now() - timedelta(days=days))
for (root, subdirs, files) in walk_limited(path, mindepth=3, maxdepth=3):
parsed = os.path.normpath(root).split(os.sep)
if (parsed[(- 1)] != 'srpm-builds'):
continue
for subdir in subdirs:
subdir = os.path.join(root, subdir)
modified = datetime.fromtimestamp(os.path.getmtime(subdir))
if (modified >= too_old):
continue
print_remove_text(subdir, modified, stdout)
if (not dry_run):
shutil.rmtree(subdir)
for srpm_log_file in files:
srpm_log_file = os.path.join(root, srpm_log_file)
if (not srpm_log_file.endswith('.log')):
continue
modified = datetime.fromtimestamp(os.path.getmtime(srpm_log_file))
print_remove_text(srpm_log_file, modified, stdout)
if (not dry_run):
os.remove(srpm_log_file) |
def get_options(option_type, from_options):
_options = dict()
for key in option_type.keys:
key_with_prefix = '{prefix}{key}'.format(prefix=option_type.prefix, key=key)
if ((key not in from_options) and (key_with_prefix not in from_options)):
_options[key] = ''
elif (key in from_options):
_options[key] = from_options.get(key)
else:
_options[key] = from_options.get(key_with_prefix)
return _options |
def test_config_app_encryption_key_validation() -> None:
app_encryption_key = 'atestencryptionkeythatisvalidlen'
with patch.dict(os.environ, {**REQUIRED_ENV_VARS, 'FIDES__SECURITY__APP_ENCRYPTION_KEY': app_encryption_key}, clear=True):
config = get_config()
assert (config.security.app_encryption_key == app_encryption_key) |
class ConfigHelper(BaseModel):
conf: AppConfigModel
dependency_files: List[str] = []
ignore_files: Dict[(str, List[str])] = {}
language_names: Dict[(str, str)] = {}
language_setup: Dict[(str, List[str])] = {}
class Config():
allow_mutation = True
def __init__(self, conf: AppConfigModel, **data):
super().__init__(conf=conf, **data)
self.load_helper_files()
def load_helper_files(self):
handler = FileHandler()
conf_path_list = [self.conf.app.files.dependency_files, self.conf.app.files.ignore_files, self.conf.app.files.language_names, self.conf.app.files.language_setup]
for path in conf_path_list:
conf_dict = _get_config_dict(handler, path)
if ('dependency_files' in conf_dict):
self.dependency_files.extend(conf_dict.get('dependency_files', []))
if ('ignore_files' in conf_dict):
self.ignore_files.update(conf_dict['ignore_files'])
if ('language_names' in conf_dict):
self.language_names.update(conf_dict['language_names'])
if ('language_setup' in conf_dict):
self.language_setup.update(conf_dict['language_setup']) |
def task_msgfmt():
sources = glob.glob(f'./{PACKAGE}/**/*.po', recursive=True)
dests = [(i[:(- 3)] + '.mo') for i in sources]
actions = [['msgfmt', sources[i], '-o', dests[i]] for i in range(len(sources))]
return {'actions': actions, 'targets': dests, 'file_dep': sources, 'task_dep': ['crowdin', 'crowdin_pull']} |
(scope='session')
def surveymonkey_secrets(saas_config) -> Dict[(str, Any)]:
return {'domain': (pydash.get(saas_config, 'surveymonkey.domain') or secrets['domain']), 'api_token': (pydash.get(saas_config, 'surveymonkey.api_token') or secrets['api_token']), 'survey_id': (pydash.get(saas_config, 'surveymonkey.survey_id') or secrets['survey_id']), 'collector_id': (pydash.get(saas_config, 'surveymonkey.collector_id') or secrets['collector_id']), 'admin_email': (pydash.get(saas_config, 'surveymonkey.admin_email') or secrets['admin_email']), 'page_id': (pydash.get(saas_config, 'surveymonkey.page_id') or secrets['page_id']), 'question_id': (pydash.get(saas_config, 'surveymonkey.question_id') or secrets['question_id']), 'choice_id': (pydash.get(saas_config, 'surveymonkey.choice_id') or secrets['choice_id'])} |
def run_model(prompt: str, activation: str, graph_mode: bool, use_fp16_acc: bool, verify: bool, model_path='bert-base-uncased'):
inputs = prepare_data(prompt, model_path)
inputs_pt = {name: data.cuda() for (name, data) in inputs.items()}
(batch_size, seq_len) = inputs['input_ids'].size()
pt_model = BertPt(model_path=model_path, pretrained=True)._model
pt_model.eval()
hidden_size = pt_model.config.hidden_size
mod = compile_module(batch_size, seq_len, hidden_size, activation, use_fp16_acc, False, pt_model)
outputs = [torch.empty(mod.get_output_maximum_shape(0)).half().cuda()]
mod.run_with_tensors(inputs_pt, outputs, graph_mode=graph_mode)
print(f'Logits: {outputs[0]}')
if verify:
pt_outputs = pt_model.bert(**inputs_pt)
torch.allclose(outputs[0], pt_outputs.last_hidden_state, 0.1, 0.1)
print('Verification done!') |
class TensorFlow2ONNXConfig(DataClassJsonMixin):
input_signature: Union[(tf.TensorSpec, np.ndarray)]
custom_ops: Optional[Dict[(str, Any)]] = None
target: Optional[List[Any]] = None
custom_op_handlers: Optional[Dict[(Any, Tuple)]] = None
custom_rewriter: Optional[List[Any]] = None
opset: Optional[int] = None
extra_opset: Optional[List[int]] = None
shape_override: Optional[Dict[(str, List[Any])]] = None
inputs_as_nchw: Optional[List[str]] = None
large_model: bool = False |
.scheduler
.integration_test
.parametrize('prior_mask,reals_rerun_option,should_resample', [pytest.param(range(5), '0-4', False, id='All realisations first, subset second run'), pytest.param([1, 2, 3, 4], '2-3', False, id='Subset of realisation first run, subs-subset second run'), pytest.param([0, 1, 2], '0-5', True, id='Subset of realisation first, superset in second run - must resample')])
def test_that_prior_is_not_overwritten_in_ensemble_experiment(prior_mask, reals_rerun_option, should_resample, tmpdir, source_root, try_queue_and_scheduler, monkeypatch):
shutil.copytree(os.path.join(source_root, 'test-data', 'poly_example'), os.path.join(str(tmpdir), 'poly_example'))
with tmpdir.as_cwd():
ert_config = ErtConfig.from_file('poly_example/poly.ert')
num_realizations = ert_config.model_config.num_realizations
storage = open_storage(ert_config.ens_path, mode='w')
experiment_id = storage.create_experiment(ert_config.ensemble_config.parameter_configuration)
ensemble = storage.create_ensemble(experiment_id, name='iter-0', ensemble_size=num_realizations)
sample_prior(ensemble, prior_mask)
prior_values = storage.get_ensemble(ensemble.id).load_parameters('COEFFS')['values']
storage.close()
parser = ArgumentParser(prog='test_main')
parsed = ert_parser(parser, [ENSEMBLE_EXPERIMENT_MODE, 'poly_example/poly.ert', '--current-case=iter-0', '--realizations', reals_rerun_option])
FeatureToggling.update_from_args(parsed)
run_cli(parsed)
storage = open_storage(ert_config.ens_path, mode='w')
parameter_values = storage.get_ensemble(ensemble.id).load_parameters('COEFFS')['values']
if should_resample:
with pytest.raises(AssertionError):
np.testing.assert_array_equal(parameter_values, prior_values)
else:
np.testing.assert_array_equal(parameter_values, prior_values)
storage.close() |
def extractReLibrary(item):
if (item['tags'] == ['rhapsody of mulan']):
return None
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Starting Anew as the New Me', 'Starting Anew as the New Me', 'translated'), ('Mysterious World Beast God', 'Mysterious World Beast God', 'translated'), ('Abyss Domination', 'Abyss Domination', 'translated'), ("The Demon King's Daughter", "The Demon King's Daughter", 'translated'), ('Very Pure & Ambiguous - The Prequel', 'Very Pure & Ambiguous - The Prequel', 'translated'), ('Barrier Master Reincarnation', 'Barrier Master Reincarnation', 'translated'), ('6-Year Old Sage', '6-Year Old Sage', 'translated'), ('World of Immortals', 'World of Immortals', 'translated'), ('Bu ni mi', 'Bu ni Mi wo Sasagete Hyaku to Yonen. Elf de Yarinaosu Musha Shugyou', 'translated'), ('Magic Language', "No matter how you look at it, this world's magic language is Japanese", 'translated'), ('High Comprehension Low Strength', 'High Comprehension Low Strength', 'translated'), ('Otherworld Nation Founding Chronicles', 'Otherworld Nation Founding Chronicles', 'translated'), ('Arifureta', 'Arifureta Shokugyou de Sekai Saikyou (WN)', 'translated'), ('Author Reincarnated', 'The Author Reincarnated?!', 'translated'), ('The Strongest System', 'The Strongest System', 'translated'), ('Outcast Magician', 'Outcast Magician and the Power of Heretics', 'translated'), ('Nine Yang Sword Saint', 'Nine Yang Sword Saint', 'translated'), ('i am succubus!', 'i am succubus!', 'oel'), ('God of Chaos', 'God of Chaos', 'oel'), ('Soft Spoken Brutality', 'Soft Spoken Brutality', 'oel'), ('Martial Void King', 'Martial Void King', 'oel'), ('the vampires templar', "The Vampire's Templar", 'oel'), ('loli demon king', 'iving as I Please, as a Loli Demon King', 'oel'), ('Aurora God', 'Aurora God', 'oel'), ("Silva's Diary", "Silva's Diary", 'oel'), ('Dragon Princess', "Even if I'm Reborn as a Cute Dragon Girl, I will still make a Harem", 'translated'), ('female knight & dark elf', 'The Life of a Female Knight and a Dark Elf', 'translated'), ('Shield Hero', 'Tate no Yuusha no Nariagari', 'translated'), ('succubus in another world', "Succubus-san's Life in a Another World", 'translated'), ('not sure, another world reincarnation', 'Not Sure, But It Looks Like I Got Reincarnated in Another World', 'translated'), ('the ancestor of our sect', 'The Ancestor of our Sect Isnt Acting like an Elder', 'translated'), ("hero's daughter", "Reborn as the Hero's Daughter! Time to Become the Hero Once More!", 'translated'), ('reborn as a transcendence', 'reborn as a transcendent', 'translated'), ('reborn as a transcendent', 'reborn as a transcendent', 'translated'), ('song of adolescence', 'song of adolescence', 'translated'), ('wild last boss', 'wild last boss', 'translated'), ('life with a tail', 'life with a tail', 'translated'), ('pupil of the wiseman', 'She Professed Herself The Pupil Of The Wiseman', 'translated'), ("hero's redo", 'Heros Redo ~ A Hero That Once Saved the World Reborn as a Girl ~ ', 'translated'), ('worlds unknown', 'Worlds Unknown: The Enlightened', 'oel'), ('loli elf', 'Loli Elf & Her Happy Life', 'translated'), ('my loli brother', 'my loli brother', 'translated'), ("devil's evolution catalog", "The Devil's Evolution Catalog", 'translated'), ('blue sky', 'blue sky', 'translated'), ('lazy overlord', 'lazy overlord', 'translated'), ('Levelmaker', 'Levelmaker', 'translated'), ('Demon Sword Maiden', 'Demon Sword Maiden', 'translated'), ('the saintess', 'How can the Saintess be a Boy!?', 'translated'), ('disappointing princesses', 'Two as One Disappointing Princesses Want to Live Free ', 'translated'), ('Vampire Princess', 'I Became a Vampire Princess after Reincarnation!? Building The Strongest Yuri Harem Using the Cheat Skill Demon Lord!', 'translated'), ('hero king', 'The Hero King, Reincarnate to Master the Art of War ~And Thus, I Became The Strongest Knight Disciple () in The World~', 'translated'), ('sword saint fox girl', "The Sword Saint's Second Life As a Fox Girl", 'oel'), ('wish of the cipher', 'wish of the cipher', 'oel'), ('Stained Red', 'Stained Red', 'oel'), ('Truth & Myth - The Awakening', 'Truth & Myth - The Awakening', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class AtomTests(TestCase):
def test_truthiness(self):
self.assertEquals(TRUE, Symbol('t'))
def test_falsiness(self):
self.assertEquals(FALSE, List())
def test_atomness(self):
foo = Atom('foo')
another_foo = Atom('foo')
bar = Atom('bar')
baz = Atom('baz')
self.assertTrue((foo == foo))
self.assertTrue((foo == another_foo))
self.assertTrue((foo != bar))
self.assertTrue((baz != bar))
self.assertTrue((foo != bar != baz))
def test_symbolness(self):
foo = Symbol('foo')
another_foo = Symbol('foo')
bar = Symbol('bar')
e = Environment(None, {'foo': foo})
self.assertTrue((foo != bar))
self.assertTrue((foo == another_foo))
self.assertTrue((another_foo == foo))
self.assertTrue((foo.__hash__() == another_foo.__hash__()))
self.assertTrue((foo.eval(e) == foo)) |
class TextSection(SectionWithData):
def get_data_offset(self, addr):
byte_size = self.binary.config.ADDRESS_BYTE_SIZE
if (self.is_in_sec(addr) and self.is_in_sec((addr + byte_size))):
off = (addr - self.addr)
addr = utils.decode_address(self.data[off:(off + byte_size)], self.binary)
return addr |
def get_html(filename_rel, height):
astyle = 'font-size:small; float:right;'
dstyle = 'width: 500px; height: %ipx; align: center; resize:both; overflow: hidden; box-shadow: 5px 5px 5px #777; padding: 4px;'
istyle = 'width: 100%; height: 100%; border: 2px solid #094;'
html = ''
html += ("<a target='new' href='%s' style='%s'>open in new tab</a>" % (filename_rel, astyle))
html += (("<div style='%s'>" % dstyle) % height)
html += ("<iframe src='%s' style='%s'>iframe not supported</iframe>" % (filename_rel, istyle))
html += '</div>'
return html |
class OozieMetaPlugin(object):
def __init__(self, plugins, args):
self.args = args
sorted_plugins = sorted(plugins, key=(lambda p: p.priority.value), reverse=True)
self._plugin_containers = [OoziePluginContainer(name=plugin.name, priority=plugin.priority, plugin=plugin.oozie_plugin_cls(args)) for plugin in sorted_plugins]
def action_builders(self):
return util.merge_lists((pc.plugin.action_builders() for pc in self._plugin_containers))
def plugin_config(self):
all_configs = {plugin_container.name: plugin_container.plugin.plugin_config() for plugin_container in self._plugin_containers}
return {name: config for (name, config) in six.iteritems(all_configs) if config}
def dag_args(self):
return util.merge_dicts((pc.plugin.dag_args() for pc in self._plugin_containers))
def default_task_args(self):
return util.merge_dicts((pc.plugin.default_task_args() for pc in self._plugin_containers))
def dag_imports(self):
return util.merge_dicts((pc.plugin.dag_imports() for pc in self._plugin_containers))
def cluster_config(self):
plugins_with_cluster_configs = [pc for pc in self._plugin_containers if pc.plugin.cluster_config()]
if (not plugins_with_cluster_configs):
raise Exception('No cluster configurations found for oozie parser!')
if (len(plugins_with_cluster_configs) > 1):
logger.info('Multiple cluster configurations found. Choosing configuration from plugin `%s`, with priority `%s`', plugins_with_cluster_configs[0].name, plugins_with_cluster_configs[0].priority)
return plugins_with_cluster_configs[0].plugin.cluster_config()
def upstream_operators(self):
return util.merge_lists((pc.plugin.upstream_operators() for pc in self._plugin_containers))
def jsp_macros(self):
return util.merge_dicts((pc.plugin.jsp_macros() for pc in self._plugin_containers)) |
class BamBlock(nn.Module):
def __init__(self, in_planes, reduction=16):
super(BamBlock, self).__init__()
self.ca = ChannelAttention(in_planes, reduction)
self.sa = SpatialAttention()
def forward(self, x):
ca_ch = self.ca(x)
sa_ch = self.sa(x)
out = (ca_ch.mul(sa_ch) * x)
return out |
class OptCheckboxes(Options):
component_properties = ('icon', 'all_selected', 'tooltip')
def icon(self):
return self._config_get('fas fa-check')
def icon(self, value: str):
self._config(value)
def all_selected(self):
return self._config_get(False)
_selected.setter
def all_selected(self, flag: bool):
self._config(flag)
def tooltip(self):
return self._config_get('')
def tooltip(self, value: str):
self._config(value)
def tooltip_options(self):
return self._config_get({})
_options.setter
def tooltip_options(self, values):
self._config(values) |
def test_builder_with_link_references(registry_package, dummy_ipfs_backend, monkeypatch):
(root, expected_manifest, compiler_output) = registry_package
monkeypatch.chdir(root)
inliner = source_inliner(compiler_output)
manifest = build({}, package_name('solidity-registry'), manifest_version('ethpm/3'), version('2.0.0'), inliner('Authorized'), inliner('IndexedOrderedSetLib'), inliner('PackageDB'), inliner('PackageRegistry'), inliner('PackageRegistryInterface'), inliner('ReleaseDB'), inliner('ReleaseValidator'), contract_type('AuthorityInterface', compiler_output, abi=True, deployment_bytecode=True, runtime_bytecode=True, devdoc=True, source_id=True), contract_type('Authorized', compiler_output, abi=True, compiler=True, deployment_bytecode=True, runtime_bytecode=True, devdoc=True, source_id=True), contract_type('AuthorizedInterface', compiler_output, abi=True, deployment_bytecode=True, runtime_bytecode=True, devdoc=True, source_id=True), contract_type('WhitelistAuthority', compiler_output, abi=True, compiler=True, deployment_bytecode=True, runtime_bytecode=True, devdoc=True, source_id=True), contract_type('WhitelistAuthorityInterface', compiler_output, abi=True, deployment_bytecode=True, runtime_bytecode=True, devdoc=True, source_id=True), contract_type('IndexedOrderedSetLib', compiler_output, abi=True, compiler=True, deployment_bytecode=True, runtime_bytecode=True, devdoc=True, source_id=True), contract_type('PackageDB', compiler_output, abi=True, compiler=True, deployment_bytecode=True, runtime_bytecode=True, devdoc=True, source_id=True), contract_type('PackageRegistry', compiler_output, abi=True, compiler=True, deployment_bytecode=True, runtime_bytecode=True, devdoc=True, source_id=True), contract_type('PackageRegistryInterface', compiler_output, abi=True, deployment_bytecode=True, runtime_bytecode=True, devdoc=True, source_id=True), contract_type('ReleaseDB', compiler_output, abi=True, compiler=True, deployment_bytecode=True, runtime_bytecode=True, devdoc=True, source_id=True), contract_type('ReleaseValidator', compiler_output, abi=True, compiler=True, deployment_bytecode=True, runtime_bytecode=True, devdoc=True, source_id=True), validate())
assert (manifest == expected_manifest) |
class InterlacedDofOrderType(DofOrderInfo):
def __init__(self, model_info='no model info set'):
DofOrderInfo.__init__(self, 'interlaced', model_info=model_info)
def create_DOF_lists(self, ownership_range, num_equations, num_components):
pressureDOF = numpy.arange(start=ownership_range[0], stop=(ownership_range[0] + num_equations), step=num_components, dtype='i')
velocityDOF = []
for start in range(1, num_components):
velocityDOF.append(numpy.arange(start=(ownership_range[0] + start), stop=(ownership_range[0] + num_equations), step=num_components, dtype='i'))
velocityDOF = numpy.vstack(velocityDOF).transpose().flatten()
return [velocityDOF, pressureDOF]
def create_vel_DOF_IS(self, ownership_range, num_equations, num_components):
from . import Comm
comm = Comm.get()
vel_comp_DOF = []
vel_comp_DOF_vel = []
scaled_ownership_range = ((ownership_range[0] * (num_components - 1)) / num_components)
for i in range(1, num_components):
vel_comp_DOF.append(self.create_IS(numpy.arange(start=(ownership_range[0] + i), stop=(ownership_range[0] + num_equations), step=num_components, dtype='i')))
vel_comp_DOF_vel.append(self.create_IS(numpy.arange(start=((scaled_ownership_range + i) - 1), stop=(scaled_ownership_range + int(((num_equations * (num_components - 1)) / num_components))), step=(num_components - 1), dtype='i')))
return (vel_comp_DOF, vel_comp_DOF_vel)
def create_no_dirichlet_bdy_nodes_is(self, ownership_range, num_equations, num_components, bdy_nodes):
(strong_DOF, local_vel_DOF) = self.create_vel_DOF_IS(ownership_range, num_equations, num_components)
strong_DOF = [ele.array for ele in strong_DOF]
local_vel_DOF = [ele.array for ele in local_vel_DOF]
mask = [numpy.ones(len(var), dtype=bool) for var in strong_DOF]
for (i, bdy_node) in enumerate(bdy_nodes):
mask[i][bdy_node] = False
strong_DOF = [strong_DOF[i][mask[i]] for i in range(len(strong_DOF))]
total_vars = int(0)
for var in strong_DOF:
total_vars += int(len(var))
strong_DOF_idx = numpy.empty(total_vars, dtype='int32')
for (i, var_dof) in enumerate(strong_DOF):
strong_DOF_idx[i::2] = var_dof
return self.create_IS(strong_DOF_idx) |
class OptionSeriesParetoSonificationTracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
_os(*metadata.platforms)
def main():
powershell = 'C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe'
unusualext = 'C:\\Users\\Public\\powershell.exe.pdf'
common.copy_file(powershell, unusualext)
common.execute([unusualext], timeout=1, kill=True)
common.remove_file(unusualext) |
def v2ToV1(v2Pdu, origV1Pdu=None):
((debug.logger & debug.FLAG_PRX) and debug.logger(('v2ToV1: v2Pdu %s' % v2Pdu.prettyPrint())))
pduType = v2Pdu.tagSet
if (pduType in V2_TO_V1_PDU_MAP):
v1Pdu = V2_TO_V1_PDU_MAP[pduType].clone()
else:
raise error.ProtocolError('Unsupported PDU type')
v2VarBinds = v2c.apiPDU.getVarBinds(v2Pdu)
v1VarBinds = []
zeroInt = v1.Integer(0)
if (pduType in rfc3411.NOTIFICATION_CLASS_PDUS):
if (len(v2VarBinds) < 2):
raise error.ProtocolError('SNMP v2c TRAP PDU requires at least two var-binds')
(snmpTrapOID, snmpTrapOIDParam) = v2VarBinds[1]
if (snmpTrapOID != v2c.apiTrapPDU.snmpTrapOID):
raise error.ProtocolError('Second OID not snmpTrapOID')
if (snmpTrapOIDParam in V2_TO_V1_TRAP_MAP):
for (oid, val) in v2VarBinds:
if (oid == v2c.apiTrapPDU.snmpTrapEnterprise):
v1.apiTrapPDU.setEnterprise(v1Pdu, val)
break
else:
v1.apiTrapPDU.setEnterprise(v1Pdu, (1, 3, 6, 1, 6, 3, 1, 1, 5))
elif (snmpTrapOIDParam[(- 2)] == 0):
v1.apiTrapPDU.setEnterprise(v1Pdu, snmpTrapOIDParam[:(- 2)])
else:
v1.apiTrapPDU.setEnterprise(v1Pdu, snmpTrapOIDParam[:(- 1)])
for (oid, val) in v2VarBinds:
if (oid == v2c.apiTrapPDU.snmpTrapAddress):
v1.apiTrapPDU.setAgentAddr(v1Pdu, v1.IpAddress(val))
break
else:
v1.apiTrapPDU.setAgentAddr(v1Pdu, v1.IpAddress('0.0.0.0'))
if (snmpTrapOIDParam in V2_TO_V1_TRAP_MAP):
v1.apiTrapPDU.setGenericTrap(v1Pdu, V2_TO_V1_TRAP_MAP[snmpTrapOIDParam])
else:
v1.apiTrapPDU.setGenericTrap(v1Pdu, 6)
if (snmpTrapOIDParam in V2_TO_V1_TRAP_MAP):
v1.apiTrapPDU.setSpecificTrap(v1Pdu, zeroInt)
else:
v1.apiTrapPDU.setSpecificTrap(v1Pdu, snmpTrapOIDParam[(- 1)])
v1.apiTrapPDU.setTimeStamp(v1Pdu, v2VarBinds[0][1])
__v2VarBinds = []
for (oid, val) in v2VarBinds[2:]:
if ((oid in V2_TO_V1_TRAP_MAP) or (oid in (v2c.apiTrapPDU.sysUpTime, v2c.apiTrapPDU.snmpTrapAddress, v2c.apiTrapPDU.snmpTrapEnterprise))):
continue
__v2VarBinds.append((oid, val))
v2VarBinds = __v2VarBinds
else:
v1.apiPDU.setErrorStatus(v1Pdu, zeroInt)
v1.apiPDU.setErrorIndex(v1Pdu, zeroInt)
if (pduType in rfc3411.RESPONSE_CLASS_PDUS):
idx = (len(v2VarBinds) - 1)
while (idx >= 0):
(oid, val) = v2VarBinds[idx]
if (v2c.Counter64.tagSet == val.tagSet):
if (origV1Pdu.tagSet == v1.GetRequestPDU.tagSet):
v1.apiPDU.setErrorStatus(v1Pdu, 2)
v1.apiPDU.setErrorIndex(v1Pdu, (idx + 1))
break
elif (origV1Pdu.tagSet == v1.GetNextRequestPDU.tagSet):
raise error.StatusInformation(idx=idx, pdu=v2Pdu)
else:
raise error.ProtocolError('Counter64 on the way')
if (val.tagSet in (v2c.NoSuchObject.tagSet, v2c.NoSuchInstance.tagSet, v2c.EndOfMibView.tagSet)):
v1.apiPDU.setErrorStatus(v1Pdu, 2)
v1.apiPDU.setErrorIndex(v1Pdu, (idx + 1))
idx -= 1
v2ErrorStatus = v2c.apiPDU.getErrorStatus(v2Pdu)
if v2ErrorStatus:
v1.apiPDU.setErrorStatus(v1Pdu, V2_TO_V1_ERROR_MAP.get(v2ErrorStatus, 5))
v1.apiPDU.setErrorIndex(v1Pdu, v2c.apiPDU.getErrorIndex(v2Pdu, muteErrors=True))
elif (pduType in rfc3411.CONFIRMED_CLASS_PDUS):
v1.apiPDU.setErrorStatus(v1Pdu, 0)
v1.apiPDU.setErrorIndex(v1Pdu, 0)
if ((pduType in rfc3411.RESPONSE_CLASS_PDUS) and v1.apiPDU.getErrorStatus(v1Pdu)):
v1VarBinds = v1.apiPDU.getVarBinds(origV1Pdu)
else:
for (oid, v2Val) in v2VarBinds:
v1VarBinds.append((oid, V2_TO_V1_VALUE_MAP[v2Val.tagSet].clone(v2Val)))
if (pduType in rfc3411.NOTIFICATION_CLASS_PDUS):
v1.apiTrapPDU.setVarBinds(v1Pdu, v1VarBinds)
else:
v1.apiPDU.setVarBinds(v1Pdu, v1VarBinds)
v1.apiPDU.setRequestID(v1Pdu, v2c.apiPDU.getRequestID(v2Pdu))
((debug.logger & debug.FLAG_PRX) and debug.logger(('v2ToV1: v1Pdu %s' % v1Pdu.prettyPrint())))
return v1Pdu |
class SubLearner(object):
def __init__(self, job, parent, estimator, in_index, out_index, in_array, targets, out_array, index):
self.job = job
self.estimator = estimator
self.in_index = in_index
self.out_index = out_index
self.in_array = in_array
self.targets = targets
self.out_array = out_array
self.score_ = None
self.index = tuple(index)
self.path = parent._path
self.attr = parent.attr
self.preprocess = parent.preprocess
self.scorer = parent.scorer
self.raise_on_exception = parent.raise_on_exception
self.verbose = parent.verbose
if (not parent.__no_output__):
self.output_columns = parent.output_columns[index[0]]
self.score_ = None
self.fit_time_ = None
self.pred_time_ = None
self.name = parent.cache_name
self.name_index = '.'.join(([self.name] + [str(i) for i in index]))
if (self.preprocess is not None):
self.preprocess_index = '.'.join(([self.preprocess] + [str(i) for i in index]))
else:
self.processing_index = ''
def __call__(self):
return getattr(self, self.job)()
def fit(self, path=None):
if (path is None):
path = self.path
t0 = time()
transformers = self._load_preprocess(path)
self._fit(transformers)
if (self.out_array is not None):
self._predict(transformers, (self.scorer is not None))
o = IndexedEstimator(estimator=self.estimator, name=self.name_index, index=self.index, in_index=self.in_index, out_index=self.out_index, data=self.data)
save(path, self.name_index, o)
if self.verbose:
msg = '{:<30} {}'.format(self.name_index, 'done')
f = ('stdout' if (self.verbose < (10 - 3)) else 'stderr')
print_time(t0, msg, file=f)
def predict(self, path=None):
if (path is None):
path = self.path
t0 = time()
transformers = self._load_preprocess(path)
self._predict(transformers, False)
if self.verbose:
msg = '{:<30} {}'.format(self.name_index, 'done')
f = ('stdout' if (self.verbose < (10 - 3)) else 'stderr')
print_time(t0, msg, file=f)
def transform(self, path=None):
return self.predict(path)
def _fit(self, transformers):
(xtemp, ytemp) = slice_array(self.in_array, self.targets, self.in_index)
t0 = time()
if transformers:
(xtemp, ytemp) = transformers.transform(xtemp, ytemp)
self.estimator.fit(xtemp, ytemp)
self.fit_time_ = (time() - t0)
def _load_preprocess(self, path):
if (self.preprocess is not None):
obj = load(path, self.preprocess_index, self.raise_on_exception)
return obj.estimator
return
def _predict(self, transformers, score_preds):
n = self.in_array.shape[0]
(xtemp, ytemp) = slice_array(self.in_array, self.targets, self.out_index)
t0 = time()
if transformers:
(xtemp, ytemp) = transformers.transform(xtemp, ytemp)
predictions = getattr(self.estimator, self.attr)(xtemp)
self.pred_time_ = (time() - t0)
assign_predictions(self.out_array, predictions, self.out_index, self.output_columns, n)
if score_preds:
self.score_ = score_predictions(ytemp, predictions, self.scorer, self.name_index, self.name)
def data(self):
out = {'score': self.score_, 'ft': self.fit_time_, 'pt': self.pred_time_}
return out |
def _calculate_snapshots_size(ns, snap_model=None, dc=None):
qs = ns.snapshot_set.exclude(status__in=(snap_model.PENDING, snap_model.LOST), size__isnull=True)
if dc:
qs = qs.filter(vm__dc=dc)
size = qs.aggregate(models.Sum('size')).get('size__sum')
if size:
return b_to_mb(size)
else:
return 0 |
.django_db
def test_program_activity_list_sort_by_gross_outlay_amount(client, agency_account_data, helpers):
query_params = f'?fiscal_year={helpers.get_mocked_current_fiscal_year()}&order=asc&sort=gross_outlay_amount'
resp = client.get(url.format(code='007', query_params=query_params))
expected_result = {'fiscal_year': helpers.get_mocked_current_fiscal_year(), 'toptier_code': '007', 'messages': [], 'page_metadata': {'hasNext': False, 'hasPrevious': False, 'limit': 10, 'next': None, 'page': 1, 'previous': None, 'total': 4}, 'results': [{'gross_outlay_amount': 100000.0, 'name': 'NAME 3', 'obligated_amount': 100.0}, {'gross_outlay_amount': 1000000.0, 'name': 'NAME 2', 'obligated_amount': 10.0}, {'gross_outlay_amount': 1000000.0, 'name': 'NAME 5', 'obligated_amount': 10.0}, {'gross_outlay_amount': .0, 'name': 'NAME 1', 'obligated_amount': 1.0}]}
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.json() == expected_result)
query_params = f'?fiscal_year={helpers.get_mocked_current_fiscal_year()}&order=desc&sort=gross_outlay_amount'
resp = client.get(url.format(code='007', query_params=query_params))
expected_result = {'fiscal_year': helpers.get_mocked_current_fiscal_year(), 'toptier_code': '007', 'messages': [], 'page_metadata': {'hasNext': False, 'hasPrevious': False, 'limit': 10, 'next': None, 'page': 1, 'previous': None, 'total': 4}, 'results': [{'gross_outlay_amount': .0, 'name': 'NAME 1', 'obligated_amount': 1.0}, {'gross_outlay_amount': 1000000.0, 'name': 'NAME 2', 'obligated_amount': 10.0}, {'gross_outlay_amount': 1000000.0, 'name': 'NAME 5', 'obligated_amount': 10.0}, {'gross_outlay_amount': 100000.0, 'name': 'NAME 3', 'obligated_amount': 100.0}]}
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.json() == expected_result) |
class OptionPlotoptionsScatterSonification(Options):
def contextTracks(self) -> 'OptionPlotoptionsScatterSonificationContexttracks':
return self._config_sub_data('contextTracks', OptionPlotoptionsScatterSonificationContexttracks)
def defaultInstrumentOptions(self) -> 'OptionPlotoptionsScatterSonificationDefaultinstrumentoptions':
return self._config_sub_data('defaultInstrumentOptions', OptionPlotoptionsScatterSonificationDefaultinstrumentoptions)
def defaultSpeechOptions(self) -> 'OptionPlotoptionsScatterSonificationDefaultspeechoptions':
return self._config_sub_data('defaultSpeechOptions', OptionPlotoptionsScatterSonificationDefaultspeechoptions)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def pointGrouping(self) -> 'OptionPlotoptionsScatterSonificationPointgrouping':
return self._config_sub_data('pointGrouping', OptionPlotoptionsScatterSonificationPointgrouping)
def tracks(self) -> 'OptionPlotoptionsScatterSonificationTracks':
return self._config_sub_data('tracks', OptionPlotoptionsScatterSonificationTracks) |
class Good(Entity, UnitMixin):
__auto_name__ = False
__tablename__ = 'Goods'
__mapper_args__ = {'polymorphic_identity': 'Good'}
good_id = Column('id', Integer, ForeignKey('Entities.id'), primary_key=True)
price_lists = relationship('PriceList', secondary='PriceList_Goods', primaryjoin='Goods.c.id==PriceList_Goods.c.good_id', secondaryjoin='PriceList_Goods.c.price_list_id==PriceLists.c.id', back_populates='goods', doc='PriceLists that this good is related to.')
cost = Column(Float, default=0.0)
msrp = Column(Float, default=0.0)
unit = Column(String(64))
client_id = Column('client_id', Integer, ForeignKey('Clients.id'))
client = relationship('Client', primaryjoin='Goods.c.client_id==Clients.c.id', back_populates='goods', uselist=False)
def __init__(self, cost=0.0, msrp=0.0, unit='', client=None, **kwargs):
super(Good, self).__init__(**kwargs)
UnitMixin.__init__(self, unit=unit)
self.cost = cost
self.msrp = msrp
self.client = client
('cost')
def _validate_cost(self, key, cost):
if (cost is None):
cost = 0.0
if (not isinstance(cost, (float, int))):
raise TypeError(('%s.cost should be a non-negative number, not %s' % (self.__class__.__name__, cost.__class__.__name__)))
if (cost < 0.0):
raise ValueError(('%s.cost should be a non-negative number' % self.__class__.__name__))
return cost
('msrp')
def _validate_msrp(self, key, msrp):
if (msrp is None):
msrp = 0.0
if (not isinstance(msrp, (float, int))):
raise TypeError(('%s.msrp should be a non-negative number, not %s' % (self.__class__.__name__, msrp.__class__.__name__)))
if (msrp < 0.0):
raise ValueError(('%s.msrp should be a non-negative number' % self.__class__.__name__))
return msrp
('client')
def _validate_client(self, key, client):
if (client is not None):
from stalker import Client
if (not isinstance(client, Client)):
raise TypeError(('%s.client attribute should be a stalker.models.client.Client instance, not %s' % (self.__class__.__name__, client.__class__.__name__)))
return client |
def list_name_extract(list_type):
base_type = list_type[5:(- 1)]
list_name = base_type
if (list_name.find('of_') == 0):
list_name = list_name[3:]
if (list_name[(- 2):] == '_t'):
list_name = list_name[:(- 2)]
list_name = ('of_list_' + list_name)
return (list_name, base_type) |
class OptionPlotoptionsNetworkgraphStatesHover(Options):
def animation(self) -> 'OptionPlotoptionsNetworkgraphStatesHoverAnimation':
return self._config_sub_data('animation', OptionPlotoptionsNetworkgraphStatesHoverAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def halo(self) -> 'OptionPlotoptionsNetworkgraphStatesHoverHalo':
return self._config_sub_data('halo', OptionPlotoptionsNetworkgraphStatesHoverHalo)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def marker(self) -> 'OptionPlotoptionsNetworkgraphStatesHoverMarker':
return self._config_sub_data('marker', OptionPlotoptionsNetworkgraphStatesHoverMarker) |
class ColumnValueListMetric(Metric[ColumnValueListMetricResult]):
column_name: str
values: Optional[list]
def __init__(self, column_name: str, values: Optional[list]=None, options: AnyOptions=None) -> None:
self.values = values
self.column_name = column_name
super().__init__(options=options)
def _calculate_stats(values: list, column: pd.Series) -> ValueListStat:
rows_count = get_rows_count(column)
values_in_list = {}
values_not_in_list = {}
if (rows_count == 0):
number_in_list = 0
number_not_in_list = 0
share_in_list = 0.0
share_not_in_list = 0.0
else:
value_counts = dict(column.value_counts(dropna=True))
for value in value_counts:
if (value in values):
values_in_list[value] = value_counts[value]
else:
values_not_in_list[value] = value_counts[value]
number_in_list = sum(values_in_list.values())
share_in_list = (number_in_list / rows_count)
number_not_in_list = (rows_count - number_in_list)
share_not_in_list = (number_not_in_list / rows_count)
for value in values:
if (value not in values_in_list):
values_in_list[value] = 0
return ValueListStat(number_in_list=number_in_list, number_not_in_list=number_not_in_list, share_in_list=share_in_list, share_not_in_list=share_not_in_list, values_in_list=[(k, v) for (k, v) in values_in_list.items()], values_not_in_list=[(k, v) for (k, v) in values_not_in_list.items()], rows_count=rows_count)
def calculate(self, data: InputData) -> ColumnValueListMetricResult:
if ((data.reference_data is not None) and (self.column_name not in data.reference_data)):
raise ValueError(f"Column '{self.column_name}' is not in reference data.")
if (self.values is None):
if (data.reference_data is None):
raise ValueError('Reference or values list should be present.')
values = list(data.reference_data[self.column_name].unique())
else:
values = self.values
if (not values):
raise ValueError('Values list should not be empty.')
if (self.column_name not in data.current_data):
raise ValueError(f"Column '{self.column_name}' is not in current data.")
current_stats = self._calculate_stats(values, data.current_data[self.column_name])
if (data.reference_data is not None):
reference_stats: Optional[ValueListStat] = self._calculate_stats(values, data.reference_data[self.column_name])
else:
reference_stats = None
return ColumnValueListMetricResult(column_name=self.column_name, values=list(values), current=current_stats, reference=reference_stats) |
class JsClassList():
def __init__(self, js_code: str, component: primitives.HtmlModel=None):
self.varId = js_code
self.component = component
def length(self):
return JsNumber.JsNumber.get(('%s.length' % self.varId))
def style_select(self):
if (self.component is None):
raise ValueError('Cannot use select if select_style not defined for the component')
return self.component.options.style_select
def add(self, cls_names: Union[(list, str)]):
if (not hasattr(cls_names, 'toStr')):
if (not isinstance(cls_names, list)):
cls_names = [cls_names]
cls_names = ', '.join([str(JsUtils.jsConvertData(c, None)) for c in cls_names])
return JsObject.JsObject.get(('%s.add(%s)' % (self.varId, cls_names)))
def contains(self, cls_name: str):
cls_name = JsUtils.jsConvertData(cls_name, None)
return JsBoolean.JsBoolean.get(('%s.contains(%s)' % (self.varId, cls_name)))
def is_missing(self, cls_name: str):
cls_name = JsUtils.jsConvertData(cls_name, None)
return JsBoolean.JsBoolean.get(('!%s.contains(%s)' % (self.varId, cls_name)))
def item(self, index: int):
return JsNumber.JsNumber.get(('%s.item(%s)' % (self.varId, index)))
def items(self):
return JsNumber.JsNumber.get(('%s' % self.varId))
def remove(self, cls_names: Union[(list, str)]):
if (not hasattr(cls_names, 'toStr')):
if (not isinstance(cls_names, list)):
cls_names = [cls_names]
cls_names = ', '.join([str(JsUtils.jsConvertData(c, None)) for c in cls_names])
return JsObject.JsObject.get(('%s.remove(%s)' % (self.varId, cls_names)))
def toggle(self, cls_name: types.JS_DATA_TYPES, flag: types.JS_DATA_TYPES=None):
cls_name = JsUtils.jsConvertData(cls_name, None)
if (flag is None):
return JsObject.JsObject.get(('%s.toggle(%s)' % (self.varId, cls_name)))
flag = JsUtils.jsConvertData(flag, None)
return JsObject.JsObject.get(('%s.toggle(%s, %s)' % (self.varId, cls_name, flag)))
def select(self, flag: bool=True):
if (self.component is None):
raise ValueError('Cannot use select if select_style not defined for the component')
if flag:
return self.add(self.component.options.style_select)
return self.remove(self.component.options.style_select) |
.django_db
def test_category_recipient_subawards(recipient_test_data):
test_payload = {'category': 'recipient', 'subawards': True, 'page': 1, 'limit': 50}
spending_by_category_logic = RecipientViewSet().perform_search(test_payload, {})
expected_response = {'category': 'recipient', 'limit': 50, 'page_metadata': {'page': 1, 'next': None, 'previous': None, 'hasNext': False, 'hasPrevious': False}, 'results': [{'amount': 10000, 'code': None, 'name': 'MULTIPLE RECIPIENTS', 'recipient_id': None}, {'amount': 1100, 'code': '1234JD4321', 'recipient_id': '0b54895d-2393-ea12-48e3-deae990614d9-C', 'name': 'JOHN DOE'}, {'amount': 11, 'code': '00UOP00', 'recipient_id': '2af2a5a5-3126-2c76-3681-dec2cf148f1a-P', 'name': 'UNIVERSITY OF PAWNEE'}], 'messages': [get_time_period_message()]}
assert (expected_response == spending_by_category_logic) |
def create_run_path(run_context: RunContext, substitution_list: SubstitutionList, ert_config: ErtConfig) -> None:
t = time.perf_counter()
substitution_list = copy(substitution_list)
substitution_list['<ERT-CASE>'] = run_context.sim_fs.name
substitution_list['<ERTCASE>'] = run_context.sim_fs.name
for (iens, run_arg) in enumerate(run_context):
run_path = Path(run_arg.runpath)
if run_context.is_active(iens):
run_path.mkdir(parents=True, exist_ok=True)
for (source_file, target_file) in ert_config.ert_templates:
target_file = substitution_list.substitute_real_iter(target_file, run_arg.iens, run_context.iteration)
result = substitution_list.substitute_real_iter(Path(source_file).read_text('utf-8'), run_arg.iens, run_context.iteration)
target = (run_path / target_file)
if (not target.parent.exists()):
os.makedirs(target.parent, exist_ok=True)
target.write_text(result)
model_config = ert_config.model_config
_generate_parameter_files(run_context.sim_fs.experiment.parameter_configuration.values(), model_config.gen_kw_export_name, run_path, run_arg.iens, run_context.sim_fs, run_context.iteration)
path = (run_path / 'jobs.json')
_backup_if_existing(path)
with open((run_path / 'jobs.json'), mode='w', encoding='utf-8') as fptr:
forward_model_output = ert_config.forward_model_data_to_json(run_arg.run_id, run_arg.iens, run_context.iteration)
json.dump(forward_model_output, fptr)
run_context.runpaths.write_runpath_list([run_context.iteration], run_context.active_realizations)
logger.debug(f'create_run_path() time_used {(time.perf_counter() - t):.4f}s') |
class searchInfoNumero():
def search(self, num):
def mob_fix(pfx):
if ((pfx == '06') or (pfx == '07')):
return 'Portable'
elif ((pfx == '08') or (pfx == '09')):
return 'internet'
else:
return 'Fixe'
location = {'01': 'Ile de France.', '02': 'Nord-Ouest de la France.', '03': 'Nord-Est de la France.', '04': 'Sud-Est de la France.', '05': 'Sud-Ouest de la France.'}
num = num.replace(' ', '').replace('+33', '0')
pfx = num[0:2]
url = '
page = requests.get((url + num)).content.decode('utf-8')
p = []
soup = BeautifulSoup(page, 'html.parser')
tags = soup('p')
for n in tags:
line = n.string
p.append(line)
operator = p[2]
ville = p[3]
self.location = location.get(pfx)
self.city = ville
self.operator = operator
if (mob_fix(pfx) == 'Portable'):
self.phone_type = 'Portable'
elif (mob_fix(pfx) == 'internet'):
self.phone_type = 'Voip/FAI'
else:
self.phone_type = 'Fixe' |
('rocm.groupnorm.func_call')
def groupnorm_gen_func_call(func_attrs, indent=' '):
assert (len(func_attrs['outputs']) == 1)
assert (len(func_attrs['inputs']) == 3)
input_name = FUNC_CALL_FP16_PARAM_TEMPLATE.render(name=func_attrs['inputs'][0]._attrs['name'])
gamma_name = FUNC_CALL_FP16_PARAM_TEMPLATE.render(name=func_attrs['inputs'][1]._attrs['name'])
beta_name = FUNC_CALL_FP16_PARAM_TEMPLATE.render(name=func_attrs['inputs'][2]._attrs['name'])
output_name = FUNC_CALL_FP16_PARAM_TEMPLATE.render(name=func_attrs['outputs'][0]._attrs['name'])
shapes = func_attrs['inputs'][0]._attrs['shape']
assert (len(shapes) == 4), f'GroupNorm only supports input with rank == 4, current rank: {len(shapes)}'
N = shapes[0]._attrs['name']
H = shapes[1]._attrs['name']
W = shapes[2]._attrs['name']
G = func_attrs['num_groups']
C = shapes[3]._attrs['name']
return FUNC_CALL_TEMPLATE.render(func_name=func_attrs['name'], input=input_name, gamma=gamma_name, beta=beta_name, output=output_name, N=N, H=H, W=W, G=G, C=C, indent=indent) |
def mock_handler():
data = bytes(Message(Integer(3), HeaderData(123, 65000, V3Flags(False, False, False), 3), bytes(USMSecurityParameters(b'engine-id', 1, 2, b'username', b'auth', b'priv')), ScopedPDU(OctetString(b'engine-id'), OctetString(b'context'), GetResponse(PDUContent(123, [VarBind(ObjectIdentifier(), Integer(10))])))))
mock = AsyncMock(return_value=data)
(yield mock) |
def fetch_price(zone_key: str='NZ', session: (Session | None)=None, target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)) -> dict:
if target_datetime:
raise NotImplementedError('This parser is not able to retrieve data for past dates')
r = (session or Session())
url = '
response = r.get(url, verify=False)
obj = response.json()
region_prices = []
regions = NZ_PRICE_REGIONS
for item in obj.get('items'):
region = item.get('grid_zone_id')
if (region in regions):
time = item.get('timestamp')
price = float(item.get('price'))
region_prices.append(price)
avg_price = (sum(region_prices) / len(region_prices))
datetime = arrow.get(time, tzinfo='UTC')
return {'datetime': datetime.datetime, 'price': avg_price, 'currency': 'NZD', 'source': 'api.em6.co.nz', 'zoneKey': zone_key} |
class TextClassificationInferenceOptions(InferenceConfig):
def __init__(self, *, classification_labels: t.Union[(t.List[str], t.Tuple[(str, ...)])], tokenization: NlpTokenizationConfig, results_field: t.Optional[str]=None, num_top_classes: t.Optional[int]=None):
super().__init__(configuration_type='text_classification')
self.results_field = results_field
self.num_top_classes = num_top_classes
self.tokenization = tokenization
self.classification_labels = classification_labels |
('cuda.dynamic_slice.func_call')
def gen_function_call(func_attrs, indent=' '):
return slice_common.gen_function_call(backend_spec=CUDASpec(), func_name=func_attrs['name'], inputs=func_attrs['inputs'], outputs=func_attrs['outputs'], start_indices=[func_attrs['start_indices']], end_indices=[func_attrs['end_indices']], dim=0, indent=indent) |
def test_differential_all():
outfile = NamedTemporaryFile(suffix='.tar.gz', delete=False)
outfile.close()
args = '-f {} -o {} -om {}'.format((ROOT + 'chicDifferentialTest/differential.hdf5'), outfile.name, 'all').split()
chicExportData.main(args)
file_obj_new = tarfile.open(outfile.name, 'r')
namelist = file_obj_new.getnames()
assert (len(namelist) == 9)
output_folder_new = mkdtemp(prefix='output_')
output_folder_test_data = mkdtemp(prefix='output_')
file_obj_new.extractall(output_folder_new)
file_obj_test_data = tarfile.open((ROOT + 'chicExportData/differential_all.tar.gz'), 'r')
file_obj_test_data.extractall(output_folder_test_data)
files_new = os.listdir(output_folder_new)
files_test_data = os.listdir(output_folder_test_data)
files_new = sorted(files_new)
files_test_data = sorted(files_test_data)
for (file_new, file_test_data) in zip(files_new, files_test_data):
assert are_files_equal(((output_folder_new + '/') + file_new), ((output_folder_test_data + '/') + file_test_data), skip=1) |
class T():
def __init__(self, userConfig):
self.userConfig = userConfig
self.compiler = fc.Compiler(userConfig)
self.f = fractal.T(self.compiler)
def run(self, options):
for path in options.extra_paths:
self.compiler.add_func_path(path)
if (options.flags is not None):
self.compiler.set_flags(options.flags)
width = (options.width or self.userConfig.getint('display', 'width'))
height = (options.height or self.userConfig.getint('display', 'height'))
threads = (options.threads or self.userConfig.getint('general', 'threads'))
if options.paramfile:
self.load(options.paramfile)
self.f.apply_options(options)
self.f.antialias = (options.antialias or self.userConfig.getint('display', 'antialias'))
outfile = self.compile(options)
if (options.buildonly is not None):
self.buildonly(options, outfile)
return
if options.singlepoint:
self.f.drawpoint()
else:
im = image.T(width, height)
self.f.draw(im, threads)
if options.save_filename:
im.save(options.save_filename)
def compile(self, options):
if (options.usebuilt is None):
return self.f.compile()
else:
self.f.set_output_file(options.usebuilt)
def buildonly(self, options, outfile):
outdirname = os.path.dirname(options.buildonly)
if outdirname:
os.makedirs(outdirname, exist_ok=True)
shutil.copy(outfile, options.buildonly)
(base, ext) = os.path.splitext(outfile)
cfile = (base + '.c')
shutil.copy(cfile, (options.buildonly + '.c'))
def load(self, filename):
with open(filename) as fh:
self.f.loadFctFile(fh) |
def _normalize_type_list(k, v):
if isinstance(v, dict):
msg = _('{build_flag} must be list or string, found: {value}')
_warn_or_exception(msg.format(build_flag=k, value=v))
elif (type(v) not in (list, tuple, set)):
v = [v]
return [_normalize_type_string(i) for i in v] |
_custom_acc_mapper_fn(op_and_target=('call_function', torch.addcmul), arg_replacement_tuples=[('input', 'input'), ('tensor1', 'tensor1'), ('tensor2', 'tensor2'), ('value', 'value')])
def addcmul_mapper(node: torch.fx.Node, _: nn.Module) -> torch.fx.Node:
with node.graph.inserting_before(node):
mul_kwargs = {'input': node.kwargs['tensor1'], 'other': node.kwargs['tensor2']}
mul_node = node.graph.create_node('call_function', mul, kwargs=mul_kwargs, name=f'{node.name}_mul')
mul_node.meta = node.meta.copy()
input_node = mul_node
if (node.kwargs['value'] != 1):
value_mul_kwargs = {'input': input_node, 'other': node.kwargs['value']}
new_input_node = node.graph.create_node('call_function', mul, kwargs=value_mul_kwargs, name='{mul_node.name}_value_mul')
new_input_node.meta = input_node.meta.copy()
input_node = new_input_node
add_kwargs = {'input': node.kwargs['input'], 'other': input_node}
add_node = node.graph.create_node('call_function', add, kwargs=add_kwargs, name=f'{node.name}_add')
add_node.meta = node.meta.copy()
return add_node |
def _substitute_token(defines: Defines, token: FileContextToken, expand_env: bool=True) -> FileContextToken:
current: FileContextToken = token
if expand_env:
for (key, val) in os.environ.items():
current = current.replace_value(f'${key}', val)
if (not defines):
return current
prev = None
n = 0
while ((prev != current) and (n < 100)):
n = (n + 1)
for (key, val) in defines:
prev = current
current = current.replace_value(key, str(val))
for (key, val) in defines:
if (key in current):
ConfigWarning.ert_context_warn(f'''Gave up replacing in {token}.
After replacing the value is now: {current}.
This still contains the replacement value: {key}, which would be replaced by {val}. Probably this causes a loop.''', token)
return current |
.parametrize('transformer', _estimators)
def test_transformers_in_pipeline_with_set_output_pandas(transformer):
X = pd.DataFrame({'feature_1': [1, 2, 3, 4, 5], 'feature_2': [6, 7, 8, 9, 10]})
y = pd.Series([0, 1, 0, 1, 0])
pipe = Pipeline([('trs', transformer)]).set_output(transform='pandas')
Xtt = transformer.fit_transform(X)
Xtp = pipe.fit_transform(X, y)
pd.testing.assert_frame_equal(Xtt, Xtp) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.