code stringlengths 281 23.7M |
|---|
class PrivacyNotice(PrivacyNoticeBase, Base):
origin = Column(String, ForeignKey(PrivacyNoticeTemplate.id_field_path), nullable=True)
version = Column(Float, nullable=False, default=1.0)
histories = relationship('PrivacyNoticeHistory', backref='privacy_notice', lazy='dynamic')
def privacy_notice_history_id(self) -> Optional[str]:
history: PrivacyNoticeHistory = self.histories.filter_by(version=self.version).first()
return (history.id if history else None)
def default_preference(self) -> UserConsentPreference:
if (self.consent_mechanism == ConsentMechanism.opt_in):
return UserConsentPreference.opt_out
if (self.consent_mechanism == ConsentMechanism.opt_out):
return UserConsentPreference.opt_in
if (self.consent_mechanism == ConsentMechanism.notice_only):
return UserConsentPreference.acknowledge
raise Exception('Invalid notice consent mechanism.')
def cookies(self) -> List[Cookies]:
db = Session.object_session(self)
return db.query(Cookies).join(PrivacyDeclaration, (PrivacyDeclaration.id == Cookies.privacy_declaration_id)).filter(or_(*[PrivacyDeclaration.data_use.like(f'{notice_use}%') for notice_use in self.data_uses])).all()
def systems_applicable(self) -> bool:
db = Session.object_session(self)
for system in db.query(System):
if self.applies_to_system(system):
return True
return False
def create(cls: Type[PrivacyNotice], db: Session, *, data: dict[(str, Any)], check_name: bool=False) -> PrivacyNotice:
created = super().create(db=db, data=data, check_name=check_name)
data.pop('id', None)
history_data = {**data, 'privacy_notice_id': created.id}
PrivacyNoticeHistory.create(db, data=history_data, check_name=False)
return created
def update(self, db: Session, *, data: dict[(str, Any)]) -> PrivacyNotice:
(resource, updated) = update_if_modified(self, db=db, data=data)
if updated:
history_data = create_historical_data_from_record(resource)
history_data['privacy_notice_id'] = resource.id
PrivacyNoticeHistory.create(db, data=history_data, check_name=False)
return resource |
class OptionPlotoptionsDependencywheelSonificationDefaultspeechoptionsMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def create_fee_validity(appointment):
if patient_has_validity(appointment):
return
fee_validity = frappe.new_doc('Fee Validity')
fee_validity.practitioner = appointment.practitioner
fee_validity.patient = appointment.patient
fee_validity.medical_department = appointment.department
fee_validity.patient_appointment = appointment.name
fee_validity.sales_invoice_ref = frappe.db.get_value('Sales Invoice Item', {'reference_dn': appointment.name}, 'parent')
fee_validity.max_visits = (frappe.db.get_single_value('Healthcare Settings', 'max_visits') or 1)
valid_days = (frappe.db.get_single_value('Healthcare Settings', 'valid_days') or 1)
fee_validity.visited = 0
fee_validity.start_date = getdate(appointment.appointment_date)
fee_validity.valid_till = (getdate(appointment.appointment_date) + datetime.timedelta(days=int(valid_days)))
fee_validity.save(ignore_permissions=True)
return fee_validity |
class OptionSeriesArcdiagramLevelsDatalabels(Options):
def align(self):
return self._config_get('undefined')
def align(self, text: str):
self._config(text, js_type=False)
def allowOverlap(self):
return self._config_get(False)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self) -> 'OptionSeriesArcdiagramLevelsDatalabelsAnimation':
return self._config_sub_data('animation', OptionSeriesArcdiagramLevelsDatalabelsAnimation)
def backgroundColor(self):
return self._config_get('none')
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(0)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def crop(self):
return self._config_get(False)
def crop(self, flag: bool):
self._config(flag, js_type=False)
def defer(self):
return self._config_get(True)
def defer(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def filter(self) -> 'OptionSeriesArcdiagramLevelsDatalabelsFilter':
return self._config_sub_data('filter', OptionSeriesArcdiagramLevelsDatalabelsFilter)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get(None)
def formatter(self, value: Any):
self._config(value, js_type=False)
def inside(self):
return self._config_get(True)
def inside(self, flag: bool):
self._config(flag, js_type=False)
def nodeFormat(self):
return self._config_get('undefined')
def nodeFormat(self, text: str):
self._config(text, js_type=False)
def nodeFormatter(self):
return self._config_get(None)
def nodeFormatter(self, value: Any):
self._config(value, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, flag: bool):
self._config(flag, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def overflow(self):
return self._config_get('justify')
def overflow(self, text: str):
self._config(text, js_type=False)
def padding(self):
return self._config_get(5)
def padding(self, num: float):
self._config(num, js_type=False)
def position(self):
return self._config_get('center')
def position(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def shape(self):
return self._config_get('square')
def shape(self, text: str):
self._config(text, js_type=False)
def style(self):
return self._config_get(None)
def style(self, value: Any):
self._config(value, js_type=False)
def textPath(self) -> 'OptionSeriesArcdiagramLevelsDatalabelsTextpath':
return self._config_sub_data('textPath', OptionSeriesArcdiagramLevelsDatalabelsTextpath)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('undefined')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get('undefined')
def y(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(6)
def zIndex(self, num: float):
self._config(num, js_type=False) |
()
('--profiles-dir', '-p', type=click.Path(exists=True), default=None, help='Which directory to look in for the profiles.yml file. If not set, edr will look in the current working directory first, then HOME/.dbt/')
_context
def debug(ctx, profiles_dir):
config = Config(profiles_dir=profiles_dir)
anonymous_tracking = AnonymousCommandLineTracking(config)
anonymous_tracking.track_cli_start(Command.DEBUG, None, ctx.command.name)
success = Debug(config).run()
if (not success):
sys.exit(1)
anonymous_tracking.track_cli_end(Command.DEBUG, None, ctx.command.name) |
def match_arp_target(self, of_ports, priority=None):
pkt_match = simple_arp_packet()
match = parse.packet_to_flow_match(pkt_match)
self.assertTrue((match is not None), 'Could not generate flow match from pkt')
match.wildcards = (((ofp.OFPFW_ALL ^ ofp.OFPFW_DL_TYPE) ^ ofp.OFPFW_NW_PROTO) ^ ofp.OFPFW_NW_DST_MASK)
match_send_flowadd(self, match, priority, of_ports[1])
return (pkt_match, match) |
class FlattenConcatStateValueNet(FlattenConcatBaseNet):
def __init__(self, obs_shapes: Dict[(str, Sequence[int])], hidden_units: List[int], non_lin: nn.Module):
super().__init__(obs_shapes, hidden_units, non_lin)
self.perception_dict['value'] = LinearOutputBlock(in_keys='latent', out_keys='value', in_shapes=self.perception_dict['latent'].out_shapes(), output_units=1)
module_init = make_module_init_normc(std=0.01)
self.perception_dict['value'].apply(module_init)
self.net = InferenceBlock(in_keys=list(obs_shapes.keys()), out_keys='value', in_shapes=list(obs_shapes.values()), perception_blocks=self.perception_dict)
def forward(self, x):
return self.net(x) |
def send_photo(token, chat_id, photo, caption=None, reply_to_message_id=None, reply_markup=None, parse_mode=None, disable_notification=None, timeout=None, caption_entities=None, allow_sending_without_reply=None, protect_content=None, message_thread_id=None, has_spoiler=None):
method_url = 'sendPhoto'
payload = {'chat_id': chat_id}
files = None
if util.is_string(photo):
payload['photo'] = photo
elif util.is_pil_image(photo):
files = {'photo': util.pil_image_to_file(photo)}
else:
files = {'photo': photo}
if caption:
payload['caption'] = caption
if reply_to_message_id:
payload['reply_to_message_id'] = reply_to_message_id
if reply_markup:
payload['reply_markup'] = _convert_markup(reply_markup)
if parse_mode:
payload['parse_mode'] = parse_mode
if (disable_notification is not None):
payload['disable_notification'] = disable_notification
if timeout:
payload['timeout'] = timeout
if caption_entities:
payload['caption_entities'] = json.dumps(types.MessageEntity.to_list_of_dicts(caption_entities))
if (allow_sending_without_reply is not None):
payload['allow_sending_without_reply'] = allow_sending_without_reply
if (protect_content is not None):
payload['protect_content'] = protect_content
if (message_thread_id is not None):
payload['message_thread_id'] = message_thread_id
if (has_spoiler is not None):
payload['has_spoiler'] = has_spoiler
return _make_request(token, method_url, params=payload, files=files, method='post') |
class TestDailyBarsData():
data_classes = []
if os.path.exists(config['daily_bars_data_path']):
data_classes.append(DailyBarsData)
if (secrets['mongodb_adminusername'] is not None):
data_classes.append(DailyBarsDataMongo)
.parametrize('data_loader_class', data_classes)
.parametrize(['tickers', 'days_count'], [(['AAPL', 'ZRAN', 'TSLA', 'WORK'], 100), (['INTC', 'ZRAN', 'XRDC', 'XOM'], 50), (['INTC', 'ZRAN', 'XRDC', 'XOM'], None), (['NVDA'], 100)])
def test_load(self, tickers, days_count, data_loader_class):
data_loader = data_loader_class(days_count=days_count)
daily_df = data_loader.load(tickers)
assert (type(daily_df) == pd.DataFrame)
assert ('ticker' in daily_df.columns)
assert ('date' in daily_df.columns)
daily_df['date_'] = daily_df['date'].astype(np.datetime64)
daily_df['def_order'] = range(len(daily_df))[::(- 1)]
expected_dates_order = daily_df.sort_values(['ticker', 'date_'], ascending=False)['date'].values
real_dates_order = daily_df.sort_values(['ticker', 'def_order'], ascending=False)['date'].values
np.testing.assert_array_equal(expected_dates_order, real_dates_order)
diffs = (daily_df.groupby('ticker')['date_'].shift(1) - daily_df['date_'])
assert (diffs.dropna() <= np.timedelta64(14, 'D')).min()
if (days_count is not None):
for cnt in daily_df.groupby('ticker').size():
assert (cnt <= days_count) |
def get_unique_duplicated_dict(df, subset=None, only_dupl_entries=False):
is_dupl = df.duplicated(subset=subset, keep=False)
uniq_dupl_dict = _get_unique_duplicated_dict(df[is_dupl], subset)
if (not only_dupl_entries):
others = df.index[(~ is_dupl)]
uniq_empties = {o: [] for o in others}
for (k, v) in uniq_empties.items():
uniq_dupl_dict[k] = v
return uniq_dupl_dict |
def test_event_graph_accumulated_time(mocker: Any) -> None:
message = DeferredMessage(MyMessage, 'unittest_args', kwargs_field='unittest_kwargs')
topic = Topic(MyMessage)
start = Event(message, topic, 0.0, 1.0)
graph = EventGraph(start)
parent = Event(message, topic, 0.0, 1.0)
child_1 = Event(message, topic, (- 0.5), 1.0)
child_2 = Event(message, topic, (- 0.5), 1.0)
graph.add_event_at_end(parent, start)
graph.add_event_at_start(child_1, parent)
graph.add_event_at_start(child_2, parent)
entries = []
while graph.heap:
entries.append(graph.heap.pop())
expected = [(0.0, 0, start), (0.5, 2, child_1), (0.5, 3, child_2), (1.0, 1, parent)]
assert (entries == expected) |
class OptionPlotoptionsWindbarbSonificationContexttracksMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def pytest_generate_tests(metafunc):
if ('fast_math' in metafunc.fixturenames):
fm = metafunc.config.option.fast_math
fms = dict(both=[False, True], no=[False], yes=[True])[fm]
fm_ids = [{False: 'nofm', True: 'fm'}[fm] for fm in fms]
metafunc.parametrize('fast_math', fms, ids=fm_ids)
if ('thr_and_double' in metafunc.fixturenames):
parametrize_thread_tuple(metafunc, 'thr_and_double', pair_thread_with_doubles)
if ('thr' in metafunc.fixturenames):
(tps, tp_ids) = get_threads(metafunc.config)
metafunc.parametrize('thr', tps, ids=tp_ids, indirect=True)
if ('some_thr' in metafunc.fixturenames):
(tps, tp_ids) = get_threads(metafunc.config)
metafunc.parametrize('some_thr', [tps[0]], ids=[tp_ids[0]], indirect=True)
if ('cluda_api' in metafunc.fixturenames):
(apis, api_ids) = get_apis(metafunc.config)
metafunc.parametrize('cluda_api', apis, ids=api_ids) |
.parametrize('vm_class', MAINNET_VMS[:13])
def test_selfdestruct_does_not_issue_deprecation_warning_pre_shanghai(vm_class):
with warnings.catch_warnings():
warnings.simplefilter('error')
run_computation(setup_vm(vm_class), CANONICAL_ADDRESS_B, code=assemble(opcode_values.SELFDESTRUCT)) |
.skipif((not has_torch), reason='needs PyTorch')
.skipif((not has_torch_cuda_gpu), reason='needs a GPU')
.skipif((not has_torch_amp), reason='requires PyTorch with mixed-precision support')
(X=one_of(tensors(), lists(tensors()), tuples(tensors())))
(deadline=None)
def test_scale_random_inputs(X):
import torch
device_id = torch.cuda.current_device()
scaler = PyTorchGradScaler(enabled=True)
scaler.to_(device_id)
if is_torch_array(X):
assert torch.allclose(scaler.scale(X), (X * (2.0 ** 16)))
else:
scaled1 = scaler.scale(X)
scaled2 = [(t * (2.0 ** 16)) for t in X]
for (t1, t2) in zip(scaled1, scaled2):
assert torch.allclose(t1, t2) |
class TestListOverrides(BodhiClientTestCase):
def test_with_user(self, mocker):
client = bindings.BodhiClient()
client.send_request = mocker.MagicMock(return_value='response')
response = client.list_overrides(user='bowlofeggs')
assert (response == 'response')
client.send_request.assert_called_once_with('overrides/', verb='GET', params={'user': 'bowlofeggs'})
def test_without_parameters(self, mocker):
client = bindings.BodhiClient()
client.send_request = mocker.MagicMock(return_value='response')
response = client.list_overrides()
assert (response == 'response')
client.send_request.assert_called_once_with('overrides/', verb='GET', params={})
def test_with_package(self, mocker):
client = bindings.BodhiClient()
client.send_request = mocker.MagicMock(return_value='response')
response = client.list_overrides(packages='bodhi')
assert (response == 'response')
client.send_request.assert_called_once_with('overrides/', verb='GET', params={'packages': 'bodhi'})
def test_with_expired(self, mocker):
client = bindings.BodhiClient()
client.send_request = mocker.MagicMock(return_value='response')
response = client.list_overrides(expired=True)
assert (response == 'response')
client.send_request.assert_called_once_with('overrides/', verb='GET', params={'expired': True})
def test_with_active(self, mocker):
client = bindings.BodhiClient()
client.send_request = mocker.MagicMock(return_value='response')
response = client.list_overrides(expired=False)
assert (response == 'response')
client.send_request.assert_called_once_with('overrides/', verb='GET', params={'expired': False})
def test_with_releases(self, mocker):
client = bindings.BodhiClient()
client.send_request = mocker.MagicMock(return_value='response')
response = client.list_overrides(releases='F24')
assert (response == 'response')
client.send_request.assert_called_once_with('overrides/', verb='GET', params={'releases': 'F24'})
def test_list_overrides_with_builds(self, mocker):
client = bindings.BodhiClient()
client.send_request = mocker.MagicMock(return_value='response')
response = client.list_overrides(builds='python-1.5.6-3.fc26')
assert (response == 'response')
client.send_request.assert_called_once_with('overrides/', verb='GET', params={'builds': 'python-1.5.6-3.fc26'})
def test_list_overrides_with_rows_per_page(self, mocker):
client = bindings.BodhiClient()
client.send_request = mocker.MagicMock(return_value='response')
response = client.list_overrides(rows_per_page=10)
assert (response == 'response')
client.send_request.assert_called_once_with('overrides/', verb='GET', params={'rows_per_page': 10})
def test_list_overrides_with_page(self, mocker):
client = bindings.BodhiClient()
client.send_request = mocker.MagicMock(return_value='response')
response = client.list_overrides(page=5)
assert (response == 'response')
client.send_request.assert_called_once_with('overrides/', verb='GET', params={'page': 5}) |
.parametrize('expiry_date, ok', [(TOMORROW, True), (TWO_DAYS_AGO, False)])
def test_expiry_date(rf: RequestFactory, expiry_date: dt.datetime, ok: bool) -> None:
(_, key) = APIKey.objects.create_key(name='test', expiry_date=expiry_date)
authorization = f'Api-Key {key}'
request = rf.get('/test/', HTTP_AUTHORIZATION=authorization)
response = view(request)
status_code = (200 if ok else 403)
assert (response.status_code == status_code) |
def test_copy_with_replacing_subcontainer_providers():
class X(containers.DeclarativeContainer):
foo = providers.Dependency(instance_of=str)
def build_x():
return X(foo='1')
class A(containers.DeclarativeContainer):
x = providers.DependenciesContainer(**X.providers)
y = x.foo
(A)
class B1(A):
x = providers.Container(build_x)
b1 = B1()
assert (b1.y() == '1') |
class RawSynchronousFlyteClient(object):
_dataproxy_stub: DataProxyServiceStub
def __init__(self, cfg: PlatformConfig, **kwargs):
self._cfg = cfg
self._channel = wrap_exceptions_channel(cfg, upgrade_channel_to_authenticated(cfg, upgrade_channel_to_proxy_authenticated(cfg, get_channel(cfg))))
self._stub = _admin_service.AdminServiceStub(self._channel)
self._signal = signal_service.SignalServiceStub(self._channel)
self._dataproxy_stub = dataproxy_service.DataProxyServiceStub(self._channel)
logger.info(f"Flyte Client configured -> {self._cfg.endpoint} in {('insecure' if self._cfg.insecure else 'secure')} mode.")
self._metadata = None
def with_root_certificate(cls, cfg: PlatformConfig, root_cert_file: str) -> RawSynchronousFlyteClient:
b = None
with open(root_cert_file, 'rb') as fp:
b = fp.read()
return RawSynchronousFlyteClient(cfg, credentials=grpc.ssl_channel_credentials(root_certificates=b))
def url(self) -> str:
return self._cfg.endpoint
def create_task(self, task_create_request):
return self._stub.CreateTask(task_create_request, metadata=self._metadata)
def list_task_ids_paginated(self, identifier_list_request):
return self._stub.ListTaskIds(identifier_list_request, metadata=self._metadata)
def list_tasks_paginated(self, resource_list_request):
return self._stub.ListTasks(resource_list_request, metadata=self._metadata)
def get_task(self, get_object_request):
return self._stub.GetTask(get_object_request, metadata=self._metadata)
def set_signal(self, signal_set_request: SignalSetRequest) -> SignalSetResponse:
return self._signal.SetSignal(signal_set_request, metadata=self._metadata)
def list_signals(self, signal_list_request: SignalListRequest) -> SignalList:
return self._signal.ListSignals(signal_list_request, metadata=self._metadata)
def create_workflow(self, workflow_create_request):
return self._stub.CreateWorkflow(workflow_create_request, metadata=self._metadata)
def list_workflow_ids_paginated(self, identifier_list_request):
return self._stub.ListWorkflowIds(identifier_list_request, metadata=self._metadata)
def list_workflows_paginated(self, resource_list_request):
return self._stub.ListWorkflows(resource_list_request, metadata=self._metadata)
def get_workflow(self, get_object_request):
return self._stub.GetWorkflow(get_object_request, metadata=self._metadata)
def create_launch_plan(self, launch_plan_create_request):
return self._stub.CreateLaunchPlan(launch_plan_create_request, metadata=self._metadata)
def get_launch_plan(self, object_get_request):
return self._stub.GetLaunchPlan(object_get_request, metadata=self._metadata)
def get_active_launch_plan(self, active_launch_plan_request):
return self._stub.GetActiveLaunchPlan(active_launch_plan_request, metadata=self._metadata)
def update_launch_plan(self, update_request):
return self._stub.UpdateLaunchPlan(update_request, metadata=self._metadata)
def list_launch_plan_ids_paginated(self, identifier_list_request):
return self._stub.ListLaunchPlanIds(identifier_list_request, metadata=self._metadata)
def list_launch_plans_paginated(self, resource_list_request):
return self._stub.ListLaunchPlans(resource_list_request, metadata=self._metadata)
def list_active_launch_plans_paginated(self, active_launch_plan_list_request):
return self._stub.ListActiveLaunchPlans(active_launch_plan_list_request, metadata=self._metadata)
def update_named_entity(self, update_named_entity_request):
return self._stub.UpdateNamedEntity(update_named_entity_request, metadata=self._metadata)
def create_execution(self, create_execution_request):
return self._stub.CreateExecution(create_execution_request, metadata=self._metadata)
def recover_execution(self, recover_execution_request):
return self._stub.RecoverExecution(recover_execution_request, metadata=self._metadata)
def get_execution(self, get_object_request):
return self._stub.GetExecution(get_object_request, metadata=self._metadata)
def get_execution_data(self, get_execution_data_request):
return self._stub.GetExecutionData(get_execution_data_request, metadata=self._metadata)
def get_execution_metrics(self, get_execution_metrics_request):
return self._stub.GetExecutionMetrics(get_execution_metrics_request, metadata=self._metadata)
def list_executions_paginated(self, resource_list_request):
return self._stub.ListExecutions(resource_list_request, metadata=self._metadata)
def terminate_execution(self, terminate_execution_request):
return self._stub.TerminateExecution(terminate_execution_request, metadata=self._metadata)
def relaunch_execution(self, relaunch_execution_request):
return self._stub.RelaunchExecution(relaunch_execution_request, metadata=self._metadata)
def get_node_execution(self, node_execution_request):
return self._stub.GetNodeExecution(node_execution_request, metadata=self._metadata)
def get_node_execution_data(self, get_node_execution_data_request):
return self._stub.GetNodeExecutionData(get_node_execution_data_request, metadata=self._metadata)
def list_node_executions_paginated(self, node_execution_list_request):
return self._stub.ListNodeExecutions(node_execution_list_request, metadata=self._metadata)
def list_node_executions_for_task_paginated(self, node_execution_for_task_list_request):
return self._stub.ListNodeExecutionsForTask(node_execution_for_task_list_request, metadata=self._metadata)
def get_task_execution(self, task_execution_request):
return self._stub.GetTaskExecution(task_execution_request, metadata=self._metadata)
def get_task_execution_data(self, get_task_execution_data_request):
return self._stub.GetTaskExecutionData(get_task_execution_data_request, metadata=self._metadata)
def list_task_executions_paginated(self, task_execution_list_request):
return self._stub.ListTaskExecutions(task_execution_list_request, metadata=self._metadata)
def list_projects(self, project_list_request: typing.Optional[ProjectListRequest]=None):
if (project_list_request is None):
project_list_request = ProjectListRequest()
return self._stub.ListProjects(project_list_request, metadata=self._metadata)
def register_project(self, project_register_request):
return self._stub.RegisterProject(project_register_request, metadata=self._metadata)
def update_project(self, project):
return self._stub.UpdateProject(project, metadata=self._metadata)
def update_project_domain_attributes(self, project_domain_attributes_update_request):
return self._stub.UpdateProjectDomainAttributes(project_domain_attributes_update_request, metadata=self._metadata)
def update_workflow_attributes(self, workflow_attributes_update_request):
return self._stub.UpdateWorkflowAttributes(workflow_attributes_update_request, metadata=self._metadata)
def get_project_domain_attributes(self, project_domain_attributes_get_request):
return self._stub.GetProjectDomainAttributes(project_domain_attributes_get_request, metadata=self._metadata)
def get_workflow_attributes(self, workflow_attributes_get_request):
return self._stub.GetWorkflowAttributes(workflow_attributes_get_request, metadata=self._metadata)
def list_matchable_attributes(self, matchable_attributes_list_request):
return self._stub.ListMatchableAttributes(matchable_attributes_list_request, metadata=self._metadata)
def create_upload_location(self, create_upload_location_request: _dataproxy_pb2.CreateUploadLocationRequest) -> _dataproxy_pb2.CreateUploadLocationResponse:
return self._dataproxy_stub.CreateUploadLocation(create_upload_location_request, metadata=self._metadata)
def create_download_location(self, create_download_location_request: _dataproxy_pb2.CreateDownloadLocationRequest) -> _dataproxy_pb2.CreateDownloadLocationResponse:
return self._dataproxy_stub.CreateDownloadLocation(create_download_location_request, metadata=self._metadata)
def create_download_link(self, create_download_link_request: _dataproxy_pb2.CreateDownloadLinkRequest) -> _dataproxy_pb2.CreateDownloadLinkResponse:
return self._dataproxy_stub.CreateDownloadLink(create_download_link_request, metadata=self._metadata) |
def test_elasticsearch_download_cached(common_test_data):
external_load_dates = [{'external_data_type__external_data_type_id': EXTERNAL_DATA_TYPE_DICT['fabs'], 'last_load_date': datetime(2021, 1, 30, 12, 0, 0, 0, timezone.utc)}, {'external_data_type__external_data_type_id': EXTERNAL_DATA_TYPE_DICT['fpds'], 'last_load_date': datetime(2021, 1, 30, 12, 0, 0, 0, timezone.utc)}, {'external_data_type__external_data_type_id': EXTERNAL_DATA_TYPE_DICT['es_transactions'], 'last_load_date': datetime(2021, 1, 17, 12, 0, 0, 0, timezone.utc)}, {'external_data_type__external_data_type_id': EXTERNAL_DATA_TYPE_DICT['es_awards'], 'last_load_date': datetime(2021, 1, 17, 16, 0, 0, 0, timezone.utc)}]
for load_date in external_load_dates:
baker.make('broker.ExternalDataLoadDate', **load_date)
es_transaction_request = {**JSON_REQUEST, 'download_types': ['elasticsearch_transactions', 'sub_awards']}
es_award_request = {**JSON_REQUEST, 'download_types': ['elasticsearch_awards', 'sub_awards']}
download_jobs = [{'download_job_id': 10, 'file_name': 'es_transaction_job_wrong.zip', 'job_status_id': 1, 'json_request': json.dumps(es_transaction_request), 'update_date': datetime(2021, 1, 17, 10, 0, 0, 0, timezone.utc)}, {'download_job_id': 11, 'file_name': 'es_transaction_job_right.zip', 'job_status_id': 1, 'json_request': json.dumps(es_transaction_request), 'update_date': datetime(2021, 1, 17, 12, 30, 0, 0, timezone.utc)}, {'download_job_id': 20, 'file_name': 'es_award_job_wrong.zip', 'job_status_id': 1, 'json_request': json.dumps(es_award_request), 'update_date': datetime(2021, 1, 17, 13, 0, 0, 0, timezone.utc)}, {'download_job_id': 21, 'file_name': 'es_award_job_right.zip', 'job_status_id': 1, 'json_request': json.dumps(es_award_request), 'update_date': datetime(2021, 1, 17, 17, 0, 0, 0, timezone.utc)}]
for job in download_jobs:
with patch('django.utils.timezone.now') as mock_now:
mock_now.return_value = job['update_date']
baker.make('download.DownloadJob', **job)
result = BaseDownloadViewSet._get_cached_download(json.dumps(es_transaction_request), es_transaction_request['download_types'])
assert (result == {'download_job_id': 11, 'file_name': 'es_transaction_job_right.zip'})
result = BaseDownloadViewSet._get_cached_download(json.dumps(es_award_request), es_award_request['download_types'])
assert (result == {'download_job_id': 21, 'file_name': 'es_award_job_right.zip'}) |
class CssInline(Attrs):
classname: str = None
def __init__(self, component: primitives.HtmlModel=None, page: primitives.PageModel=None):
super(CssInline, self).__init__(component, page=page)
def stroke_dasharray(self):
return self.css('stroke-dasharray')
_dasharray.setter
def stroke_dasharray(self, val):
self.css({'stroke-dasharray': val})
def stroke_width(self):
return self.css('stroke-width')
_width.setter
def stroke_width(self, val):
self.css({'stroke-width': val})
def fill(self):
return self.css('fill')
def fill(self, val):
self.css({'fill': val})
def fill_opacity(self):
return self.css('fill-opacity')
_opacity.setter
def fill_opacity(self, num):
self.css({'fill-opacity': num})
def to_dict(self, copy: bool=False):
if copy:
return dict(self.attrs)
return self.attrs
def important(self, attrs: list=None):
if (attrs is None):
for k in self.attrs.items():
self.attrs[k] = ('%s !IMPORTANT' % self.attrs[k])
else:
for k in attrs:
setattr(self, k, ('%s !IMPORTANT' % getattr(self, k)))
def to_class(self, class_name: str=None):
class_name = (class_name or self.classname)
if (class_name is None):
raise Exception('Class Name must be defined to create virtual class')
v_cls = type(class_name, (Style,), {'_attrs': self.attrs})
return v_cls(None)
def define_class(self, class_name: str, page):
v_cls = page.body.style.custom_class({'_attrs': self.attrs}, class_name)
return v_cls
def toStr(self) -> str:
if (self.classname is None):
raise Exception('Class Name must be defined to create virtual class')
return ("'.%s {%s}'" % (self.classname, str(self))) |
class TestHelloWorldSkill(AEATestCaseEmpty):
capture_log = True
def test_hello_world(self):
self.generate_private_key()
self.add_private_key()
self.add_item('skill', 'fetchai/hello_world:0.1.5')
process = self.run_agent()
is_running = self.is_running(process)
assert is_running, 'AEA not running within timeout!'
check_strings = ('Hello World!',)
missing_strings = self.missing_from_output(process, check_strings)
assert (missing_strings == []), "Strings {} didn't appear in agent output.".format(missing_strings) |
class OptionPlotoptionsArearangeMarkerStatesHover(Options):
def animation(self) -> 'OptionPlotoptionsArearangeMarkerStatesHoverAnimation':
return self._config_sub_data('animation', OptionPlotoptionsArearangeMarkerStatesHoverAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def fillColor(self):
return self._config_get(None)
def fillColor(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get(None)
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(None)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def lineWidthPlus(self):
return self._config_get(1)
def lineWidthPlus(self, num: float):
self._config(num, js_type=False)
def radius(self):
return self._config_get(None)
def radius(self, num: float):
self._config(num, js_type=False)
def radiusPlus(self):
return self._config_get(2)
def radiusPlus(self, num: float):
self._config(num, js_type=False) |
class FirewallIPSet():
def __init__(self, fw):
self._fw = fw
self._ipsets = {}
def __repr__(self):
return ('%s(%r)' % (self.__class__, self._ipsets))
def cleanup(self):
self._ipsets.clear()
def check_ipset(self, name):
if (name not in self.get_ipsets()):
raise FirewallError(errors.INVALID_IPSET, name)
def query_ipset(self, name):
return (name in self.get_ipsets())
def get_ipsets(self):
return sorted(self._ipsets.keys())
def has_ipsets(self):
return (len(self._ipsets) > 0)
def get_ipset(self, name, applied=False):
self.check_ipset(name)
obj = self._ipsets[name]
if applied:
self.check_applied_obj(obj)
return obj
def omit_native_ipset(self):
if ((not self._fw.nftables_enabled) or self._fw.direct.has_runtime_configuration()):
return False
return True
def backends(self):
backends = []
if self._fw.nftables_enabled:
backends.append(self._fw.nftables_backend)
if (self._fw.ipset_enabled and (not self.omit_native_ipset())):
backends.append(self._fw.ipset_backend)
return backends
def add_ipset(self, obj):
if (obj.type not in self._fw.ipset_supported_types):
raise FirewallError(errors.INVALID_TYPE, ("'%s' is not supported by ipset." % obj.type))
self._ipsets[obj.name] = obj
def remove_ipset(self, name, keep=False):
obj = self._ipsets[name]
if (obj.applied and (not keep)):
try:
for backend in self.backends():
backend.set_destroy(name)
except Exception as msg:
raise FirewallError(errors.COMMAND_FAILED, msg)
else:
log.debug1("Keeping ipset '%s' because of timeout option", name)
del self._ipsets[name]
def apply_ipset(self, name, backends=None):
obj = self._ipsets[name]
for backend in (backends if backends else self.backends()):
if (backend.name == 'ipset'):
active = backend.set_get_active_terse()
if ((name in active) and (('timeout' not in obj.options) or (obj.options['timeout'] == '0') or (obj.type != active[name][0]) or (rm_def_cr_opts(obj.options) != active[name][1]))):
try:
backend.set_destroy(name)
except Exception as msg:
raise FirewallError(errors.COMMAND_FAILED, msg)
if self._fw._individual_calls:
try:
backend.set_create(obj.name, obj.type, obj.options)
except Exception as msg:
raise FirewallError(errors.COMMAND_FAILED, msg)
else:
obj.applied = True
if (('timeout' in obj.options) and (obj.options['timeout'] != '0')):
continue
try:
backend.set_flush(obj.name)
except Exception as msg:
raise FirewallError(errors.COMMAND_FAILED, msg)
for entry in obj.entries:
try:
backend.set_add(obj.name, entry)
except Exception as msg:
raise FirewallError(errors.COMMAND_FAILED, msg)
else:
try:
backend.set_restore(obj.name, obj.type, obj.entries, obj.options, None)
except Exception as msg:
raise FirewallError(errors.COMMAND_FAILED, msg)
else:
obj.applied = True
def apply_ipsets(self, backends=None):
for name in self.get_ipsets():
obj = self._ipsets[name]
obj.applied = False
log.debug1(("Applying ipset '%s'" % name))
self.apply_ipset(name, backends)
def flush(self):
for backend in self.backends():
if (backend.name == 'nftables'):
continue
for ipset in self.get_ipsets():
try:
self.check_applied(ipset)
backend.set_destroy(ipset)
except FirewallError as msg:
if (msg.code != errors.NOT_APPLIED):
raise msg
def get_type(self, name, applied=True):
return self.get_ipset(name, applied=applied).type
def get_dimension(self, name):
return len(self.get_ipset(name, applied=True).type.split(','))
def check_applied(self, name):
obj = self.get_ipset(name)
self.check_applied_obj(obj)
def check_applied_obj(self, obj):
if (not obj.applied):
raise FirewallError(errors.NOT_APPLIED, obj.name)
def get_family(self, name, applied=True):
obj = self.get_ipset(name, applied=applied)
if ('family' in obj.options):
if (obj.options['family'] == 'inet6'):
return 'ipv6'
return 'ipv4'
def add_entry(self, name, entry):
obj = self.get_ipset(name, applied=True)
entry = normalize_ipset_entry(entry)
IPSet.check_entry(entry, obj.options, obj.type)
if (entry in obj.entries):
raise FirewallError(errors.ALREADY_ENABLED, ("'%s' already is in '%s'" % (entry, name)))
check_entry_overlaps_existing(entry, obj.entries)
try:
for backend in self.backends():
backend.set_add(obj.name, entry)
except Exception as msg:
raise FirewallError(errors.COMMAND_FAILED, msg)
else:
if (('timeout' not in obj.options) or (obj.options['timeout'] == '0')):
obj.entries.append(entry)
def remove_entry(self, name, entry):
obj = self.get_ipset(name, applied=True)
entry = normalize_ipset_entry(entry)
if (entry not in obj.entries):
raise FirewallError(errors.NOT_ENABLED, ("'%s' not in '%s'" % (entry, name)))
try:
for backend in self.backends():
backend.set_delete(obj.name, entry)
except Exception as msg:
raise FirewallError(errors.COMMAND_FAILED, msg)
else:
if (('timeout' not in obj.options) or (obj.options['timeout'] == '0')):
obj.entries.remove(entry)
def query_entry(self, name, entry):
obj = self.get_ipset(name, applied=True)
entry = normalize_ipset_entry(entry)
if (('timeout' in obj.options) and (obj.options['timeout'] != '0')):
raise FirewallError(errors.IPSET_WITH_TIMEOUT, name)
return (entry in obj.entries)
def get_entries(self, name):
obj = self.get_ipset(name, applied=True)
return obj.entries
def set_entries(self, name, entries):
obj = self.get_ipset(name, applied=True)
check_for_overlapping_entries(entries)
for entry in entries:
IPSet.check_entry(entry, obj.options, obj.type)
if (('timeout' not in obj.options) or (obj.options['timeout'] == '0')):
obj.entries = entries
try:
for backend in self.backends():
backend.set_flush(obj.name)
except Exception as msg:
raise FirewallError(errors.COMMAND_FAILED, msg)
else:
obj.applied = True
try:
for backend in self.backends():
if self._fw._individual_calls:
for entry in obj.entries:
backend.set_add(obj.name, entry)
else:
backend.set_restore(obj.name, obj.type, obj.entries, obj.options, None)
except Exception as msg:
raise FirewallError(errors.COMMAND_FAILED, msg)
else:
obj.applied = True
return |
def parse_concessions_from_html(html, url=None):
if (url in UNPARSEABLE_URLS):
return
doc = bs4.BeautifulSoup(html, 'html.parser')
publish_date = get_single_item((parse_date(f"{match['day']} {match['month']} {match['year']}") for match in PUBLISH_DATE_RE.finditer(doc.text)))
date = get_single_item((parse_date(f"1 {match['month']} {match['year']}") for match in MONTH_DATE_RE.finditer(doc.text)))
table = get_single_item((td.find_parent('table') for td in doc.find_all('td') if ((td.text or '').strip().lower() == 'pack size')))
rows = rows_from_table(table)
rows = filter_rows(rows)
headers = next(rows)
assert ([s.lower() for s in headers][:3] == ['drug', 'pack size', 'price concession'])
for row in rows:
if (row == headers):
continue
(yield {'url': url, 'date': date, 'publish_date': publish_date, 'drug': row[0], 'pack_size': row[1], 'price_pence': parse_price(row[2]), 'supplied_vmpp_id': (int(row[3]) if ((len(row) == 4) and row[3]) else None)}) |
class nvidia_arch(nn.Module):
def __str__(self):
return 'this is nvidia architecture'
def __init__(self):
super().__init__()
self.conv1 = nn.Sequential(nn.Conv2d(in_channels=1, out_channels=24, kernel_size=(5, 5), stride=(2, 2)), nn.ReLU(), nn.Conv2d(in_channels=24, out_channels=36, kernel_size=(5, 5), stride=(2, 2)), nn.ReLU(), nn.Conv2d(in_channels=36, out_channels=48, kernel_size=(5, 5), stride=(2, 2)), nn.ReLU(), nn.Conv2d(in_channels=48, out_channels=64, kernel_size=(3, 3), stride=(1, 1), padding=1), nn.ReLU(), nn.Conv2d(in_channels=64, out_channels=64, kernel_size=(3, 3), stride=(1, 1)), nn.Flatten())
self.conv2 = nn.Sequential(nn.Conv2d(in_channels=1, out_channels=6, kernel_size=(5, 5), stride=(2, 2)), nn.ReLU(), nn.Conv2d(in_channels=6, out_channels=12, kernel_size=(5, 5), stride=(2, 2)), nn.ReLU(), nn.Conv2d(in_channels=12, out_channels=18, kernel_size=(5, 5), stride=(2, 2)), nn.Flatten())
self.conv3 = nn.Sequential(nn.Conv2d(in_channels=1, out_channels=6, kernel_size=(5, 5), stride=(2, 2)), nn.ReLU(), nn.Conv2d(in_channels=6, out_channels=12, kernel_size=(5, 5), stride=(2, 2)), nn.Flatten())
self.linear = nn.Sequential(nn.Linear(in_features=1454, out_features=100, bias=False), nn.ReLU(), nn.Linear(in_features=100, out_features=50, bias=False), nn.ReLU(), nn.Linear(in_features=50, out_features=10, bias=False), nn.ReLU(), nn.Linear(in_features=10, out_features=6, bias=False))
def forward(self, x1, x2, x3):
x1 = self.conv1(x1)
x2 = self.conv2(x2)
x3 = self.conv3(x3)
x = torch.concat((x1, x2, x3), dim=1)
x = self.linear(x)
return x |
def gen_struct_typedefs(out):
out.write('\n/* LOCI object typedefs */\n')
for cls in of_g.standard_class_order:
template = 'typedef of_object_t %(cls)s_t;\n'
out.write((template % dict(cls=cls)))
out.write('\n/\n *\n * Additional of_object defines\n * These are needed for some static inline ops, so we put them here.\n *\n /\n\n/* Delete an OpenFlow object without reference to its type */\nextern void of_object_delete(of_object_t *obj);\n\n') |
class OptionSeriesNetworkgraphSonificationTracksMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Dataset(audioset.Dataset):
splits = {'train': ['train_si284'], 'validation': ['dev_93'], 'test': ['eval_92']}
sample_rate = 16000
def __init__(self, data_path, preprocessor, split, augment=False):
augmentation = []
if augment:
augmentation = [torchaudio.transforms.FrequencyMasking(27, iid_masks=True), torchaudio.transforms.FrequencyMasking(27, iid_masks=True), torchaudio.transforms.TimeMasking(100, iid_masks=True), torchaudio.transforms.TimeMasking(100, iid_masks=True)]
super(Dataset, self).__init__(data_path, preprocessor, split, self.splits, augmentation=augmentation, sample_rate=self.sample_rate) |
def test_flush_mid_execution(accounts, tester):
addr = accounts[1]
value = ['blahblah', addr, ['yesyesyes', '0x1234']]
tester.setTuple(value)
with brownie.multicall:
tester.getTuple(addr)
assert (len([x for v in brownie.multicall._pending_calls.values() for x in v]) == 1)
brownie.multicall.flush()
assert (len([x for v in brownie.multicall._pending_calls.values() for x in v]) == 0) |
def check_dependencies(ctx):
nofail = False
safety = which('safety')
if (not safety):
pipx = which('pipx')
if pipx:
safety = f'{pipx} run safety'
else:
safety = 'safety'
nofail = True
ctx.run(f'poetry export -f requirements.txt --without-hashes | {safety} check --stdin --full-report', title='Checking dependencies', pty=PTY, nofail=nofail) |
def create_chunks(ffrom, fto, suffix_array_algorithm, use_mmap):
if (not use_mmap):
return create_chunks_heap(ffrom, fto, suffix_array_algorithm)
try:
return create_chunks_mmap(ffrom, fto, suffix_array_algorithm)
except (io.UnsupportedOperation, ValueError):
return create_chunks_heap(ffrom, fto, suffix_array_algorithm) |
def export_cache_of_all_cacheable_nodes(start_frame=None, end_frame=None, handles=0, step=1, isolate=True, unload_refs=True, cache_format=ALEMBIC):
cacheable_nodes = get_cacheable_nodes()
return export_cache_of_nodes(cacheable_nodes=cacheable_nodes, start_frame=start_frame, end_frame=end_frame, handles=handles, step=step, isolate=isolate, unload_refs=unload_refs, cache_format=cache_format) |
def _generate_security_groups(config_key):
raw_default_groups = validate_key_values(CONFIG, 'base', config_key, default='')
default_groups = _convert_string_to_native(raw_default_groups)
LOG.debug('Default security group for %s is %s', config_key, default_groups)
entries = {}
for env in ENVS:
entries[env] = []
if isinstance(default_groups, list):
groups = _remove_empty_entries(default_groups)
for env in entries:
entries[env] = groups
elif isinstance(default_groups, dict):
entries.update(default_groups)
LOG.debug('Generated security group: %s', entries)
return entries |
def extractLilacbluetranslationBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def _icmp_types_fragments(protocol, type, code=None):
fragments = [{'match': {'left': {'payload': {'protocol': protocol, 'field': 'type'}}, 'op': '==', 'right': type}}]
if (code is not None):
fragments.append({'match': {'left': {'payload': {'protocol': protocol, 'field': 'code'}}, 'op': '==', 'right': code}})
return fragments |
class NonlinearVariationalSolver(OptionsManager, NonlinearVariationalSolverMixin):
DEFAULT_SNES_PARAMETERS = {'snes_type': 'newtonls', 'snes_linesearch_type': 'basic'}
DEFAULT_KSP_PARAMETERS = solving_utils.DEFAULT_KSP_PARAMETERS.copy()
DEFAULT_KSP_PARAMETERS['ksp_rtol'] = 1e-05
.EventDecorator()
_ad_annotate_init
def __init__(self, problem, *, solver_parameters=None, options_prefix=None, nullspace=None, transpose_nullspace=None, near_nullspace=None, appctx=None, pre_jacobian_callback=None, post_jacobian_callback=None, pre_function_callback=None, post_function_callback=None):
assert isinstance(problem, NonlinearVariationalProblem)
solver_parameters = flatten_parameters((solver_parameters or {}))
solver_parameters = solving_utils.set_defaults(solver_parameters, problem.J.arguments(), ksp_defaults=self.DEFAULT_KSP_PARAMETERS, snes_defaults=self.DEFAULT_SNES_PARAMETERS)
super().__init__(solver_parameters, options_prefix)
mat_type = self.parameters.get('mat_type')
pmat_type = self.parameters.get('pmat_type')
ctx = solving_utils._SNESContext(problem, mat_type=mat_type, pmat_type=pmat_type, appctx=appctx, pre_jacobian_callback=pre_jacobian_callback, pre_function_callback=pre_function_callback, post_jacobian_callback=post_jacobian_callback, post_function_callback=post_function_callback, options_prefix=self.options_prefix)
self.snes = PETSc.SNES().create(comm=problem.dm.comm)
self._problem = problem
self._ctx = ctx
self._work = problem.u.dof_dset.layout_vec.duplicate()
self.snes.setDM(problem.dm)
ctx.set_function(self.snes)
ctx.set_jacobian(self.snes)
ctx.set_nullspace(nullspace, problem.J.arguments()[0].function_space()._ises, transpose=False, near=False)
ctx.set_nullspace(transpose_nullspace, problem.J.arguments()[1].function_space()._ises, transpose=True, near=False)
ctx.set_nullspace(near_nullspace, problem.J.arguments()[0].function_space()._ises, transpose=False, near=True)
ctx._nullspace = nullspace
ctx._nullspace_T = transpose_nullspace
ctx._near_nullspace = near_nullspace
dm = self.snes.getDM()
with dmhooks.add_hooks(dm, self, appctx=self._ctx, save=False):
self.set_from_options(self.snes)
self._transfer_operators = ()
self._setup = False
def set_transfer_manager(self, manager):
self._ctx.transfer_manager = manager
.EventDecorator()
_ad_annotate_solve
def solve(self, bounds=None):
self._ctx.set_function(self.snes)
self._ctx.set_jacobian(self.snes)
problem = self._problem
forms = (problem.F, problem.J, problem.Jp)
coefficients = utils.unique(chain.from_iterable((form.coefficients() for form in forms if (form is not None))))
solution_dm = self.snes.getDM()
problem_dms = [V.dm for V in utils.unique((c.function_space() for c in coefficients)) if (V.dm != solution_dm)]
problem_dms.append(solution_dm)
for dbc in problem.dirichlet_bcs():
dbc.apply(problem.u)
if (bounds is not None):
(lower, upper) = bounds
with lower.dat.vec_ro as lb, upper.dat.vec_ro as ub:
self.snes.setVariableBounds(lb, ub)
work = self._work
with problem.u.dat.vec as u:
u.copy(work)
with ExitStack() as stack:
for ctx in chain([self.inserted_options()], [dmhooks.add_hooks(dm, self, appctx=self._ctx) for dm in problem_dms], self._transfer_operators):
stack.enter_context(ctx)
self.snes.solve(None, work)
work.copy(u)
self._setup = True
solving_utils.check_snes_convergence(self.snes)
comm = self._problem.u.function_space().mesh()._comm
PETSc.garbage_cleanup(comm=comm) |
class QtNodeState(lg.State):
app: typing.Optional[QtWidgets.QApplication] = None
keypress_callbacks: typing.Optional[typing.Dict[(int, typing.Callable[(..., typing.Any)])]] = None
view: typing.Optional[MainWindow] = None
current_trial: typing.Optional[Trial] = None
current_trial_started: bool = False
shutdown: bool = False |
(cfg_file_path=plac.Annotation('path to the config file', 'option', 'c'), resume=plac.Annotation('resume crawling from last process', 'flag'), reset_elasticsearch=plac.Annotation('reset Elasticsearch indexes', 'flag'), reset_json=plac.Annotation('reset JSON files', 'flag'), reset_mysql=plac.Annotation('reset MySQL database', 'flag'), reset_postgresql=plac.Annotation('reset Postgresql database', 'flag'), reset_all=plac.Annotation('combines all reset options', 'flag'), no_confirm=plac.Annotation('skip confirm dialogs', 'flag'))
def cli(cfg_file_path, resume, reset_elasticsearch, reset_mysql, reset_postgresql, reset_json, reset_all, no_confirm):
if reset_all:
reset_elasticsearch = True
reset_json = True
reset_mysql = True
reset_postgresql = True
if (cfg_file_path and (not cfg_file_path.endswith(os.path.sep))):
cfg_file_path += os.path.sep
NewsPleaseLauncher(cfg_file_path, resume, reset_elasticsearch, reset_json, reset_mysql, reset_postgresql, no_confirm) |
class flow_add(flow_mod):
version = 6
type = 14
_command = 0
def __init__(self, xid=None, cookie=None, cookie_mask=None, table_id=None, idle_timeout=None, hard_timeout=None, priority=None, buffer_id=None, out_port=None, out_group=None, flags=None, importance=None, match=None, instructions=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (cookie != None):
self.cookie = cookie
else:
self.cookie = 0
if (cookie_mask != None):
self.cookie_mask = cookie_mask
else:
self.cookie_mask = 0
if (table_id != None):
self.table_id = table_id
else:
self.table_id = 0
if (idle_timeout != None):
self.idle_timeout = idle_timeout
else:
self.idle_timeout = 0
if (hard_timeout != None):
self.hard_timeout = hard_timeout
else:
self.hard_timeout = 0
if (priority != None):
self.priority = priority
else:
self.priority = 0
if (buffer_id != None):
self.buffer_id = buffer_id
else:
self.buffer_id = 0
if (out_port != None):
self.out_port = out_port
else:
self.out_port = 0
if (out_group != None):
self.out_group = out_group
else:
self.out_group = 0
if (flags != None):
self.flags = flags
else:
self.flags = 0
if (importance != None):
self.importance = importance
else:
self.importance = 0
if (match != None):
self.match = match
else:
self.match = ofp.match()
if (instructions != None):
self.instructions = instructions
else:
self.instructions = []
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!Q', self.cookie))
packed.append(struct.pack('!Q', self.cookie_mask))
packed.append(struct.pack('!B', self.table_id))
packed.append(util.pack_fm_cmd(self._command))
packed.append(struct.pack('!H', self.idle_timeout))
packed.append(struct.pack('!H', self.hard_timeout))
packed.append(struct.pack('!H', self.priority))
packed.append(struct.pack('!L', self.buffer_id))
packed.append(util.pack_port_no(self.out_port))
packed.append(struct.pack('!L', self.out_group))
packed.append(struct.pack('!H', self.flags))
packed.append(struct.pack('!H', self.importance))
packed.append(self.match.pack())
packed.append(loxi.generic_util.pack_list(self.instructions))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = flow_add()
_version = reader.read('!B')[0]
assert (_version == 6)
_type = reader.read('!B')[0]
assert (_type == 14)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
obj.cookie = reader.read('!Q')[0]
obj.cookie_mask = reader.read('!Q')[0]
obj.table_id = reader.read('!B')[0]
__command = util.unpack_fm_cmd(reader)
assert (__command == 0)
obj.idle_timeout = reader.read('!H')[0]
obj.hard_timeout = reader.read('!H')[0]
obj.priority = reader.read('!H')[0]
obj.buffer_id = reader.read('!L')[0]
obj.out_port = util.unpack_port_no(reader)
obj.out_group = reader.read('!L')[0]
obj.flags = reader.read('!H')[0]
obj.importance = reader.read('!H')[0]
obj.match = ofp.match.unpack(reader)
obj.instructions = loxi.generic_util.unpack_list(reader, ofp.instruction.instruction.unpack)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.cookie != other.cookie):
return False
if (self.cookie_mask != other.cookie_mask):
return False
if (self.table_id != other.table_id):
return False
if (self.idle_timeout != other.idle_timeout):
return False
if (self.hard_timeout != other.hard_timeout):
return False
if (self.priority != other.priority):
return False
if (self.buffer_id != other.buffer_id):
return False
if (self.out_port != other.out_port):
return False
if (self.out_group != other.out_group):
return False
if (self.flags != other.flags):
return False
if (self.importance != other.importance):
return False
if (self.match != other.match):
return False
if (self.instructions != other.instructions):
return False
return True
def pretty_print(self, q):
q.text('flow_add {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('cookie = ')
q.text(('%#x' % self.cookie))
q.text(',')
q.breakable()
q.text('cookie_mask = ')
q.text(('%#x' % self.cookie_mask))
q.text(',')
q.breakable()
q.text('table_id = ')
q.text(('%#x' % self.table_id))
q.text(',')
q.breakable()
q.text('idle_timeout = ')
q.text(('%#x' % self.idle_timeout))
q.text(',')
q.breakable()
q.text('hard_timeout = ')
q.text(('%#x' % self.hard_timeout))
q.text(',')
q.breakable()
q.text('priority = ')
q.text(('%#x' % self.priority))
q.text(',')
q.breakable()
q.text('buffer_id = ')
q.text(('%#x' % self.buffer_id))
q.text(',')
q.breakable()
q.text('out_port = ')
q.text(util.pretty_port(self.out_port))
q.text(',')
q.breakable()
q.text('out_group = ')
q.text(('%#x' % self.out_group))
q.text(',')
q.breakable()
q.text('flags = ')
value_name_map = {1: 'OFPFF_SEND_FLOW_REM', 2: 'OFPFF_CHECK_OVERLAP', 4: 'OFPFF_RESET_COUNTS', 8: 'OFPFF_NO_PKT_COUNTS', 16: 'OFPFF_NO_BYT_COUNTS', 128: 'OFPFF_BSN_SEND_IDLE'}
q.text(util.pretty_flags(self.flags, value_name_map.values()))
q.text(',')
q.breakable()
q.text('importance = ')
q.text(('%#x' % self.importance))
q.text(',')
q.breakable()
q.text('match = ')
q.pp(self.match)
q.text(',')
q.breakable()
q.text('instructions = ')
q.pp(self.instructions)
q.breakable()
q.text('}') |
class FPNRoiAlign(Module):
def __init__(self, num_rois, pooled_size, sampling_ratio, spatial_scale, position_sensitive, continuous_coordinate, im_shape):
super().__init__()
self.op = multi_level_roi_align(num_rois, pooled_size, sampling_ratio, spatial_scale, position_sensitive, continuous_coordinate, im_shape)
def forward(self, *args):
assert (len(args) >= 2)
x = args[0]
rois = args[1]
return self.op(x, rois) |
class OptionSeriesScatterCluster(Options):
def allowOverlap(self):
return self._config_get(True)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self):
return self._config_get({'duration': 500})
def animation(self, flag: bool):
self._config(flag, js_type=False)
def dataLabels(self) -> 'OptionSeriesScatterClusterDatalabels':
return self._config_sub_data('dataLabels', OptionSeriesScatterClusterDatalabels)
def drillToCluster(self):
return self._config_get(True)
def drillToCluster(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(False)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def events(self) -> 'OptionSeriesScatterClusterEvents':
return self._config_sub_data('events', OptionSeriesScatterClusterEvents)
def layoutAlgorithm(self) -> 'OptionSeriesScatterClusterLayoutalgorithm':
return self._config_sub_data('layoutAlgorithm', OptionSeriesScatterClusterLayoutalgorithm)
def marker(self) -> 'OptionSeriesScatterClusterMarker':
return self._config_sub_data('marker', OptionSeriesScatterClusterMarker)
def minimumClusterSize(self):
return self._config_get(2)
def minimumClusterSize(self, num: float):
self._config(num, js_type=False)
def states(self) -> 'OptionSeriesScatterClusterStates':
return self._config_sub_data('states', OptionSeriesScatterClusterStates)
def zones(self) -> 'OptionSeriesScatterClusterZones':
return self._config_sub_data('zones', OptionSeriesScatterClusterZones) |
.usefixtures('use_tmpdir')
def test_that_workflows_with_errors_are_not_loaded():
test_config_file_name = 'test.ert'
Path('WFJOB').write_text('EXECUTABLE echo\n', encoding='utf-8')
Path('wf').write_text('WFJAB hello world\n', encoding='utf-8')
test_config_contents = dedent('\n NUM_REALIZATIONS 1\n JOBNAME JOOOOOB\n LOAD_WORKFLOW_JOB WFJOB\n LOAD_WORKFLOW wf\n ')
with open(test_config_file_name, 'w', encoding='utf-8') as fh:
fh.write(test_config_contents)
with pytest.warns(ConfigWarning, match="Encountered the following error\\(s\\) while reading workflow 'wf'. It will not be loaded: .*WFJAB is not recognized"):
ert_config = ErtConfig.from_file(test_config_file_name)
assert ('wf' not in ert_config.workflows) |
def test_create_speaker_required_fields_complete(db, client, jwt):
speaker = get_simple_custom_form_speaker(db)
data = json.dumps({'data': {'type': 'speaker', 'attributes': {'name': 'Areeb', 'mobile': '', 'speaking-experience': 'Speaking since birth', 'complex-field-values': {'m.night': 'shyamalan'}}, 'relationships': {'event': {'data': {'id': str(speaker.event_id), 'type': 'event'}}}}})
response = client.post('/v1/speakers', content_type='application/vnd.api+json', headers=jwt, data=data)
assert (response.status_code == 201)
speaker = Speaker.query.get(json.loads(response.data)['data']['id'])
assert (speaker.name == 'Areeb')
assert (speaker.mobile == '')
assert (speaker.complex_field_values is None) |
class TestNumericTraitOperators(BaseEvenniaTestCase):
def setUp(self):
self.st = traits.Trait({'name': 'Strength', 'trait_type': 'trait', 'value': 8})
self.at = traits.Trait({'name': 'Attack', 'trait_type': 'trait', 'value': 4})
def tearDown(self):
(self.st, self.at) = (None, None)
def test_pos_shortcut(self):
self.assertIn(type((+ self.st)), (float, int))
self.assertEqual((+ self.st), self.st.value)
self.assertEqual((+ self.st), 8)
def test_add_traits(self):
self.assertEqual((self.st + self.at), 12)
self.assertEqual((self.st + 1), 9)
self.assertEqual((1 + self.st), 9)
def test_sub_traits(self):
self.assertEqual((self.st - self.at), 4)
self.assertEqual((self.st - 1), 7)
self.assertEqual((10 - self.st), 2)
def test_mul_traits(self):
self.assertEqual((self.st * self.at), 32)
self.assertEqual((self.at * 4), 16)
self.assertEqual((4 * self.at), 16)
def test_floordiv(self):
self.assertEqual((self.st // self.at), 2)
self.assertEqual((self.st // 2), 4)
self.assertEqual((18 // self.st), 2)
def test_comparisons_traits(self):
self.assertNotEqual(self.st, self.at)
self.assertLess(self.at, self.st)
self.assertLessEqual(self.at, self.st)
self.assertGreater(self.st, self.at)
self.assertGreaterEqual(self.st, self.at)
def test_comparisons_numeric(self):
self.assertEqual(self.st, 8)
self.assertEqual(8, self.st)
self.assertNotEqual(self.st, 0)
self.assertNotEqual(0, self.st)
self.assertLess(self.st, 10)
self.assertLess(0, self.st)
self.assertLessEqual(self.st, 8)
self.assertLessEqual(8, self.st)
self.assertLessEqual(self.st, 10)
self.assertLessEqual(0, self.st)
self.assertGreater(self.st, 0)
self.assertGreater(10, self.st)
self.assertGreaterEqual(self.st, 8)
self.assertGreaterEqual(8, self.st)
self.assertGreaterEqual(self.st, 0)
self.assertGreaterEqual(10, self.st) |
class OptionSeriesAreasplineDataEvents(Options):
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def drag(self):
return self._config_get(None)
def drag(self, value: Any):
self._config(value, js_type=False)
def dragStart(self):
return self._config_get(None)
def dragStart(self, value: Any):
self._config(value, js_type=False)
def drop(self):
return self._config_get(None)
def drop(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def remove(self):
return self._config_get(None)
def remove(self, value: Any):
self._config(value, js_type=False)
def select(self):
return self._config_get(None)
def select(self, value: Any):
self._config(value, js_type=False)
def unselect(self):
return self._config_get(None)
def unselect(self, value: Any):
self._config(value, js_type=False)
def update(self):
return self._config_get(None)
def update(self, value: Any):
self._config(value, js_type=False) |
class CausalButterFilterNodeConfig(lg.Config):
sample_rate: float = (- 1.0)
initial_buffer_len: int = 10
highpass_cutoff: Optional[float] = 0.01
highpass_order: Optional[int] = 5
lowpass_cutoffs: List = field(default_factory=list)
invert_input: bool = True
pass_through: bool = False
def __post_init__(self):
super().__post_init__()
if ((self.sample_rate < 0) and (self.initial_buffer_len <= 0)):
raise ValueError('initial_buffer_len needs to be greater than 0 when sample rate unspecified') |
class OptionPlotoptionsSunburstSonificationDefaultinstrumentoptionsMappingHighpass(Options):
def frequency(self) -> 'OptionPlotoptionsSunburstSonificationDefaultinstrumentoptionsMappingHighpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsSunburstSonificationDefaultinstrumentoptionsMappingHighpassFrequency)
def resonance(self) -> 'OptionPlotoptionsSunburstSonificationDefaultinstrumentoptionsMappingHighpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsSunburstSonificationDefaultinstrumentoptionsMappingHighpassResonance) |
.usefixtures('mock_roxutils', 'polygon_set_in_roxvalues')
def test_load_polygons_from_roxar():
pol = xtgeo.polygons_from_roxar('project', 'Name', 'Category')
assert_frame_equal(pol.dataframe, pd.DataFrame([[1.0, 2.0, 44.0, 0], [1.1, 2.1, 45.0, 0], [1.2, 2.2, 46.0, 0], [1.3, 2.3, 47.0, 0], [1.4, 2.4, 48.0, 0], [5.0, 8.0, 64.0, 1], [5.1, 8.1, 65.0, 1], [5.2, 8.2, 66.0, 1], [5.3, 8.3, 67.0, 1], [5.4, 8.4, 68.0, 1]], columns=['X_UTME', 'Y_UTMN', 'Z_TVDSS', 'POLY_ID'])) |
class EmailVerify(commands.Cog):
__version__ = '0.1.1'
__author__ = 'flare'
def format_help_for_context(self, ctx):
pre_processed = super().format_help_for_context(ctx)
return f'''{pre_processed}
Cog Version: {self.__version__}
Cog Author: {self.__author__}'''
def __init__(self, bot):
self.bot = bot
self.config = Config.get_conf(self, identifier=, force_registration=True)
self.config.register_guild(email=None, password=None, verified_emails=[], log_channel=None, role=None, domain=None)
self.config.register_member(code=None, verified=False, email=None)
()
()
_only()
async def unverify(self, ctx, *, user: discord.Member):
data = (await self.config.member(user).all())
if (not data['verified']):
return (await ctx.send("This user isn't verified."))
async with self.config.guild(ctx.guild).verified_emails() as emails:
if (data['email'] in emails):
emails.remove(data['email'])
(await self.config.member(user).code.set(None))
(await self.config.member(user).verified.set(False))
(await self.config.member(user).email.set(None))
(await ctx.send('User has been un-verified.'))
()
_only()
async def verify(self, ctx):
(name='email')
async def verify_email(self, ctx, email: str):
with contextlib.suppress():
(await ctx.message.delete())
if ((await self.config.guild(ctx.guild).role()) is None):
(await ctx.send('The server owner must setup a role for this verification method to work.'))
return
if (not (await self.config.guild(ctx.guild).email())):
(await ctx.send('The server owner must setup an email for this verification method to work.'))
return
domain = (await self.config.guild(ctx.guild).domain())
if ((domain is not None) and (not email.endswith(domain))):
(await ctx.send('Your email does not match the domain this server is setup to use.'))
return
if (await self.config.member(ctx.author).verified()):
(await ctx.send('You have already been verified.'))
log_channel = (await self.config.guild(ctx.guild).log_channel())
if (log_channel is not None):
log_channel_obj = ctx.guild.get_channel(log_channel)
if (log_channel_obj is not None):
(await log_channel_obj.send(f'{ctx.author} with the email {email} has tried to verify with an email that has already been verified.'))
return
emails = (await self.config.guild(ctx.guild).verified_emails())
if (email in emails):
(await ctx.send('This email has already been verified.'))
return
code = secrets.token_hex(4)
try:
(await self.send_email(ctx, email, code))
except Exception as e:
(await ctx.send('There was an error sending the email.'))
log.error('Error in email sending.', exc_info=e)
return
(await self.config.member(ctx.author).code.set(code))
(await self.config.member(ctx.author).email.set(email))
(await ctx.send(f'''You will recieve an email shortly. Once it arrives you may complete your verification process by typing:
`{ctx.clean_prefix}verify code <code from email>`'''))
(name='code')
async def verify_code(self, ctx, code):
usercode = (await self.config.member(ctx.author).code())
verified = (await self.config.member(ctx.author).verified())
if verified:
(await ctx.send('You are already verified.'))
return
if (usercode is None):
(await ctx.send("You haven't started the verification process yet. Get started by invoking the .verify email command."))
return
if (code == usercode):
verified = (await self.config.member(ctx.author).verified.set(True))
email = (await self.config.member(ctx.author).email())
async with self.config.guild(ctx.guild).verified_emails() as emails:
emails.append(email)
role = (await self.config.guild(ctx.guild).role())
if (role is not None):
role_obj = ctx.guild.get_role(role)
if (role_obj is not None):
roles = [role_obj]
(await ctx.author.add_roles(*roles, reason=f'Automatically verified - Email: {email}'))
(await ctx.send('Your account has been verified!'))
log_channel = (await self.config.guild(ctx.guild).log_channel())
if (log_channel is not None):
log_channel_obj = ctx.guild.get_channel(log_channel)
if (log_channel_obj is not None):
(await log_channel_obj.send(f'{ctx.author} with the email {email} has verified their account.'))
return
(await ctx.send('This server has not set up a role for verification.'))
else:
(await ctx.send("That code doesn't match the one sent via the email. Try again or request a new code."))
_owner()
()
_only()
async def verifyset(self, ctx):
(name='instructions')
async def verifyset_instructions(self, ctx):
(await ctx.send(f'''To get started, visit on the email you wish to use.
Click on the 'Security' tab and then click on App Passwords and generate one.
Copy the generated code and paste it into the command below.
`{ctx.clean_prefix}verifyset email <email> <app password>`
NOTE: This leaves your account secure as it bypasses 2FA. Use at your own risk'''))
(name='logchannel')
async def verifyset_logchannel(self, ctx, channel: discord.TextChannel):
(await self.config.guild(ctx.guild).log_channel.set(channel.id))
(await ctx.tick())
(name='role')
async def verifyset_role(self, ctx, role: discord.Role):
(await self.config.guild(ctx.guild).role.set(role.id))
(await ctx.tick())
(name='email')
async def verifyset_email(self, ctx, email: str, password: str):
(await self.config.guild(ctx.guild).email.set(email))
(await self.config.guild(ctx.guild).password.set(password))
(await ctx.tick())
(name='domain')
async def verifyset_domain(self, ctx, *, domain: str=None):
(await self.config.guild(ctx.guild).domain.set(domain))
(await ctx.tick())
async def send_email(self, ctx, email, code):
message = EmailMessage()
message['From'] = (await self.config.guild(ctx.guild).email())
message['To'] = email
message['Subject'] = f'Discord Verification for {ctx.guild}'
contents = f'Verification Code for {ctx.guild}: {code}'
message.set_content(contents)
(await aiosmtplib.send(message, recipients=[email], hostname='smtp.gmail.com', port=465, username=(await self.config.guild(ctx.guild).email()), password=(await self.config.guild(ctx.guild).password()), use_tls=True))
()
()
async def profile(self, ctx, user: discord.Member):
embed = discord.Embed(color=user.color, title=f'Profile for {user}')
verif = (await self.config.member(user).verified())
email = (await self.config.member(user).email())
embed.add_field(name='Verified', value=str(verif))
if (not verif):
(await ctx.send(embed=embed))
return
emaill = (email if (email is not None) else 'None')
embed.add_field(name='Email', value=emaill)
(await ctx.send(embed=embed)) |
(frozen=True)
class Entry():
name: str
folder: Optional[str] = ''
username: Optional[str] = ''
_property
def hashed(self) -> str:
m = sha1()
m.update(self.name.encode())
if self.folder:
m.update(self.folder.encode())
if self.username:
m.update(self.username.encode())
return m.hexdigest() |
class BaseMessage(ABC):
__slots__ = ()
__PROTOBUF_FIELDS_BY_NUMBER__: ClassVar[Dict[(int, Tuple[(str, _FieldDescriptor)])]]
__PROTOBUF_FIELDS_BY_NAME__: ClassVar[Dict[(str, _FieldDescriptor)]]
__PROTOBUF_SKIP__: ClassVar[Dict[(WireType, Skip)]] = {WireType.VARINT: skip_varint, WireType.I64: skip_fixed_64, WireType.LEN: skip_bytes, WireType.I32: skip_fixed_32, WireType.SGROUP: skip_no_operation, WireType.EGROUP: skip_no_operation}
def __init_subclass__(cls) -> None:
cls.__PROTOBUF_FIELDS_BY_NUMBER__ = {}
cls.__PROTOBUF_FIELDS_BY_NAME__ = {}
type_hints: Dict[(str, Any)] = get_annotations(cls, eval_str=True)
for (name, hint) in type_hints.items():
descriptor = _FieldDescriptor.from_attribute(cls, hint)
if (descriptor is not None):
cls.__PROTOBUF_FIELDS_BY_NUMBER__[descriptor.number] = (name, descriptor)
cls.__PROTOBUF_FIELDS_BY_NAME__[name] = descriptor
one_of = descriptor.one_of
if (one_of is not None):
one_of._add_field(descriptor.number, name)
def read_from(cls, io: IO[bytes]) -> Self:
values: Dict[(str, Any)] = {}
while True:
try:
tag = Tag.read_from(io)
except EOFError:
break
try:
(name, descriptor) = cls.__PROTOBUF_FIELDS_BY_NUMBER__[tag.field_number]
except KeyError:
cls.__PROTOBUF_SKIP__[tag.wire_type](io)
else:
values[name] = descriptor.accumulate(values.get(name), descriptor.read(io, tag.wire_type))
one_of = descriptor.one_of
if (one_of is not None):
one_of._keep_values(values, descriptor.number)
return cls(**values)
def loads(cls, buffer: bytes) -> Self:
return cls.read_from(BytesIO(buffer))
def write_to(self, io: IO[bytes]) -> None:
for (_, (name, descriptor)) in self.__PROTOBUF_FIELDS_BY_NUMBER__.items():
descriptor.write(getattr(self, name), io)
def __bytes__(self) -> bytes:
return to_bytes(BaseMessage.write_to, self)
def dumps(self) -> bytes:
return bytes(self)
def __setattr__(self, name: str, value: Any) -> None:
super().__setattr__(name, value)
descriptor = self.__PROTOBUF_FIELDS_BY_NAME__[name]
one_of = descriptor.one_of
if (one_of is not None):
one_of._keep_attribute(self, descriptor.number)
def _init_embedded_descriptor(cls) -> RecordDescriptor[Self]:
accumulate = AccumulateMessages(cls)
return RecordDescriptor(wire_type=WireType.LEN, write=WriteLengthDelimited(cls.write_to), read=ReadStrictlyTyped(ReadLengthDelimited(ReadCallback(cls.read_from)), WireType.LEN), accumulate=accumulate, merge=MergeMessages(accumulate)) |
class SignalR():
def __init__(self, url):
self.url = url
def update_res(self, msg):
if (msg != {}):
self.res = msg
def get_value(self, hub, method):
self.res = {}
with Session() as session:
connection = Connection(self.url, session)
chat = connection.register_hub(hub)
chat.client.on(method, self.update_res)
connection.start()
connection.wait(3)
connection.close()
return self.res |
class TLSinTLSStream(NetworkStream):
TLS_RECORD_SIZE = 16384
def __init__(self, sock: socket.socket, ssl_context: ssl.SSLContext, server_hostname: typing.Optional[str]=None, timeout: typing.Optional[float]=None):
self._sock = sock
self._incoming = ssl.MemoryBIO()
self._outgoing = ssl.MemoryBIO()
self.ssl_obj = ssl_context.wrap_bio(incoming=self._incoming, outgoing=self._outgoing, server_hostname=server_hostname)
self._sock.settimeout(timeout)
self._perform_io(self.ssl_obj.do_handshake)
def _perform_io(self, func: typing.Callable[(..., typing.Any)]) -> typing.Any:
ret = None
while True:
errno = None
try:
ret = func()
except (ssl.SSLWantReadError, ssl.SSLWantWriteError) as e:
errno = e.errno
self._sock.sendall(self._outgoing.read())
if (errno == ssl.SSL_ERROR_WANT_READ):
buf = self._sock.recv(self.TLS_RECORD_SIZE)
if buf:
self._incoming.write(buf)
else:
self._incoming.write_eof()
if (errno is None):
return ret
def read(self, max_bytes: int, timeout: typing.Optional[float]=None) -> bytes:
exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError}
with map_exceptions(exc_map):
self._sock.settimeout(timeout)
return typing.cast(bytes, self._perform_io(partial(self.ssl_obj.read, max_bytes)))
def write(self, buffer: bytes, timeout: typing.Optional[float]=None) -> None:
exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError}
with map_exceptions(exc_map):
self._sock.settimeout(timeout)
while buffer:
nsent = self._perform_io(partial(self.ssl_obj.write, buffer))
buffer = buffer[nsent:]
def close(self) -> None:
self._sock.close()
def start_tls(self, ssl_context: ssl.SSLContext, server_hostname: typing.Optional[str]=None, timeout: typing.Optional[float]=None) -> 'NetworkStream':
raise NotImplementedError()
def get_extra_info(self, info: str) -> typing.Any:
if (info == 'ssl_object'):
return self.ssl_obj
if (info == 'client_addr'):
return self._sock.getsockname()
if (info == 'server_addr'):
return self._sock.getpeername()
if (info == 'socket'):
return self._sock
if (info == 'is_readable'):
return is_socket_readable(self._sock)
return None |
def run_async_emulated(simulations: Dict[(str, td.Simulation)], **kwargs) -> BatchData:
task_ids = {task_name: f'task_id={i}' for (i, task_name) in enumerate(simulations.keys())}
task_paths = {task_name: 'NONE' for task_name in simulations.keys()}
sim_data = {task_name: run_emulated(sim) for (task_name, sim) in simulations.items()}
return BatchDataTest(task_paths=task_paths, task_ids=task_ids, sim_data=sim_data) |
class SettingsWindow(Gtk.Window):
def __init__(self, config, parent_window=None):
Gtk.Window.__init__(self, title=('%s - %s' % (_('Settings'), APPLICATION_NAME)), icon=GdkPixbuf.Pixbuf.new_from_file(data_helpers.find_data_path('images/icon_64.png')), default_width=450, modal=True)
self.set_transient_for(parent_window)
self.set_position(Gtk.WindowPosition.CENTER_ON_PARENT)
self._config = config
self._builder = Gtk.Builder()
self._builder.set_translation_domain(APPLICATION_ID)
self._builder.add_from_file(data_helpers.find_data_path('ui/settings-window.glade'))
self._builder.connect_signals(self)
content = self._builder.get_object('settings_window_content')
self.add(content)
self._prepare_theme_combobox()
self.update_interface()
self.connect('destroy', self._on_settings_windows_destroyed)
if (os.name == 'nt'):
gtk_builder_translation_hack(self._builder)
def destroy(self, *args):
Gtk.Window.destroy(self)
def update_interface(self):
threads_adjustment = self._builder.get_object('threads_adjustment')
threads_adjustment.set_value(self._config.getint('optimization', 'threads'))
if (gtk_themes_helpers.get_gtk_theme_name() in gtk_themes_helpers.list_gtk_themes()):
theme_combobox = self._builder.get_object('theme_combobox')
theme_combobox.set_active(gtk_themes_helpers.list_gtk_themes().index(gtk_themes_helpers.get_gtk_theme_name()))
prefer_dark_theme_switch = self._builder.get_object('prefer_dark_theme_switch')
prefer_dark_theme_switch.set_state(self._config.getboolean('interface', 'gtk-application-prefer-dark-theme'))
output_pattern_radiobuttons = {'next-to-file': self._builder.get_object('output_pattern_next_to_file_radiobutton'), 'subfolder': self._builder.get_object('output_pattern_subfolder_radiobutton'), 'custom': self._builder.get_object('output_pattern_custom_radiobutton')}
output_pattern_radiobuttons[self._config.get('output', 'active-pattern')].set_active(True)
output_pattern_custom_entry = self._builder.get_object('output_pattern_custom_entry')
output_pattern_custom_entry.set_text(self._config.get('output', 'custom-pattern'))
output_pattern_custom_entry.set_sensitive((self._config.get('output', 'active-pattern') == 'custom'))
def _prepare_theme_combobox(self):
theme_combobox = self._builder.get_object('theme_combobox')
for theme in gtk_themes_helpers.list_gtk_themes():
theme_combobox.append_text(theme)
def _on_threads_adjustment_value_changed(self, adjustment):
self._config.set('optimization', 'threads', str(int(adjustment.get_value())))
def _on_theme_combobox_changed(self, widget):
gtk_theme = gtk_themes_helpers.list_gtk_themes()[widget.get_active()]
self._config.set('interface', 'gtk-theme-name', gtk_theme)
gtk_themes_helpers.set_gtk_theme_name(gtk_theme)
def _on_prefer_dark_theme_switch_state_setted(self, widget, state):
self._config.set('interface', 'gtk-application-prefer-dark-theme', str(state))
gtk_themes_helpers.set_gtk_application_prefer_dark_theme(state)
def _on_output_pattern_next_to_file_radiobutton_toggled(self, widget):
if (not widget.get_active()):
return
self._config.set('output', 'active-pattern', 'next-to-file')
output_pattern_custom_entry = self._builder.get_object('output_pattern_custom_entry')
output_pattern_custom_entry.set_sensitive(False)
def _on_output_pattern_subfolder_radiobutton_toggled(self, widget):
if (not widget.get_active()):
return
self._config.set('output', 'active-pattern', 'subfolder')
output_pattern_custom_entry = self._builder.get_object('output_pattern_custom_entry')
output_pattern_custom_entry.set_sensitive(False)
def _on_output_pattern_custom_radiobutton_toggled(self, widget):
if (not widget.get_active()):
return
self._config.set('output', 'active-pattern', 'custom')
output_pattern_custom_entry = self._builder.get_object('output_pattern_custom_entry')
output_pattern_custom_entry.set_sensitive(True)
def _on_output_pattern_custom_entry_changed(self, widget):
self._config.set('output', 'custom-pattern', widget.get_text())
def _on_settings_windows_destroyed(self, widget):
save_config(self._config) |
def run_aea_subprocess(*args, cwd: str='.') -> Tuple[(subprocess.Popen, str, str)]:
result = subprocess.Popen([sys.executable, '-m', 'aea.cli', *args], stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd)
result.wait()
(stdout, stderr) = result.communicate()
return (result, stdout.decode('utf-8'), stderr.decode('utf-8')) |
def test_param_jitter():
params = [1, 1]
jitterer = param_jitter(params, [0.5, 0.5])
for (ind, jits) in zip(range(3), jitterer):
for (p1, j1) in zip(jits, params):
assert (p1 != j1)
jitterer = param_jitter(params, [0, 0.5])
for (ind, jits) in zip(range(3), jitterer):
assert (jits[0] == params[1]) |
class DeltaRLastRounds():
def __new__(cls, guesses=_np.arange(64, dtype='uint8'), words=None, ciphertext_tag='ciphertext', key_tag='key'):
return _decorated_selection_function(_AttackSelectionFunctionWrapped, _delta_last_rounds, expected_key_function=_last_key, words=words, guesses=guesses, target_tag=ciphertext_tag, key_tag=key_tag) |
def _NetworkInterfacesAddrs(withMask=False):
try:
from ctypes import Structure, Union, POINTER, pointer, get_errno, cast, c_ushort, c_byte, c_void_p, c_char_p, c_uint, c_int, c_uint16, c_uint32
import ctypes.util
import ctypes
class struct_sockaddr(Structure):
_fields_ = [('sa_family', c_ushort), ('sa_data', (c_byte * 14))]
class struct_sockaddr_in(Structure):
_fields_ = [('sin_family', c_ushort), ('sin_port', c_uint16), ('sin_addr', (c_byte * 4))]
class struct_sockaddr_in6(Structure):
_fields_ = [('sin6_family', c_ushort), ('sin6_port', c_uint16), ('sin6_flowinfo', c_uint32), ('sin6_addr', (c_byte * 16)), ('sin6_scope_id', c_uint32)]
class union_ifa_ifu(Union):
_fields_ = [('ifu_broadaddr', POINTER(struct_sockaddr)), ('ifu_dstaddr', POINTER(struct_sockaddr))]
class struct_ifaddrs(Structure):
pass
struct_ifaddrs._fields_ = [('ifa_next', POINTER(struct_ifaddrs)), ('ifa_name', c_char_p), ('ifa_flags', c_uint), ('ifa_addr', POINTER(struct_sockaddr)), ('ifa_netmask', POINTER(struct_sockaddr)), ('ifa_ifu', union_ifa_ifu), ('ifa_data', c_void_p)]
libc = ctypes.CDLL((ctypes.util.find_library('c') or ''))
if (not libc.getifaddrs):
raise NotImplementedError('libc.getifaddrs is not available')
def ifap_iter(ifap):
ifa = ifap.contents
while True:
(yield ifa)
if (not ifa.ifa_next):
break
ifa = ifa.ifa_next.contents
def getfamaddr(ifa, withMask=False):
sa = ifa.ifa_addr.contents
fam = sa.sa_family
if (fam == socket.AF_INET):
sa = cast(pointer(sa), POINTER(struct_sockaddr_in)).contents
addr = socket.inet_ntop(fam, sa.sin_addr)
if withMask:
nm = ifa.ifa_netmask.contents
if ((nm is not None) and (nm.sa_family == socket.AF_INET)):
nm = cast(pointer(nm), POINTER(struct_sockaddr_in)).contents
addr += ('/' + socket.inet_ntop(fam, nm.sin_addr))
return IPAddr(addr)
elif (fam == socket.AF_INET6):
sa = cast(pointer(sa), POINTER(struct_sockaddr_in6)).contents
addr = socket.inet_ntop(fam, sa.sin6_addr)
if withMask:
nm = ifa.ifa_netmask.contents
if ((nm is not None) and (nm.sa_family == socket.AF_INET6)):
nm = cast(pointer(nm), POINTER(struct_sockaddr_in6)).contents
addr += ('/' + socket.inet_ntop(fam, nm.sin6_addr))
return IPAddr(addr)
return None
def _NetworkInterfacesAddrs(withMask=False):
ifap = POINTER(struct_ifaddrs)()
result = libc.getifaddrs(pointer(ifap))
if (result != 0):
raise OSError(get_errno())
del result
try:
for ifa in ifap_iter(ifap):
name = ifa.ifa_name.decode('UTF-8')
addr = getfamaddr(ifa, withMask)
if addr:
(yield (name, addr))
finally:
libc.freeifaddrs(ifap)
except Exception as e:
_init_error = NotImplementedError(e)
def _NetworkInterfacesAddrs():
raise _init_error
DNSUtils._NetworkInterfacesAddrs = staticmethod(_NetworkInterfacesAddrs)
return _NetworkInterfacesAddrs(withMask) |
class WallCaliper(_WallMountedBox):
def __init__(self) -> None:
super().__init__()
self.buildArgParser(h=100)
self.argparser.add_argument('--width', action='store', type=float, default=18.0, help='width of the long end')
self.argparser.add_argument('--height', action='store', type=float, default=6.0, help='height of the body')
def side(self, move=None):
t = self.thickness
h = self.h
hc = self.height
tw = ((self.edges['b'].spacing() + hc) + (8 * t))
if self.move(tw, h, move, True):
return
self.moveTo(self.edges['b'].startwidth())
self.polyline(((5 * t) + hc), (90, (2 * t)), ((h / 2) - (2 * t)), (180, (1.5 * t)), (0.25 * h), (- 90), hc, (- 90), ((0.75 * h) - (2 * t)), (90, (2 * t)), (2 * t), 90)
self.edges['b'](h)
self.move(tw, h, move)
def render(self):
self.generateWallEdges()
t = self.thickness
h = self.h
self.side(move='right')
self.side(move='right')
w = self.width
self.flangedWall(w, h, flanges=[0, (2 * t), 0, (2 * t)], edges='eeee', r=(2 * t), callback=[(lambda : (self.wallHolesAt((1.5 * t), 0, h, 90), self.wallHolesAt((w + (2.5 * t)), 0, h, 90)))]) |
def _filename_to_module_name(name: str) -> str:
if (not (os.path.isfile(name) and (name.lower().endswith('.py') or name.lower().endswith('.pyc')))):
raise ValueError('Expected a .py file, got {}'.format(name))
if os.path.isabs(name):
name = os.path.relpath(name, os.getcwd())
if name.lower().endswith('.pyc'):
end = (- 4)
else:
end = (- 3)
return name[:end].replace(os.path.sep, '.') |
def get_git_tarball(repo_url: str, commit: str) -> str:
url = canonical_url(repo_url)
path = url.path.split('/')[1:]
assert (len(path) == 2)
owner = path[0]
if path[1].endswith('.git'):
repo = path[1].replace('.git', '')
else:
repo = path[1]
if (url.hostname == 'github.com'):
return f'
elif (url.hostname.split('.')[0] == 'gitlab'):
return f'
elif (url.hostname == 'bitbucket.org'):
return f'
else:
raise ValueError(f"Don't know how to get tarball for {repo_url}") |
def load_osci_general_reports_to_bq(date: datetime.datetime):
report = OSCIGeneralRanking(date=date)
table = BigQueryOSCIGeneralRankingReport
log.debug(f'Load {report.name} for {date:%Y-%m-%d} to {table.table_id}')
report_df = report.read()
report_df = report_df[PublicSchemas.osci_general_report.required]
report_df[table.Columns.position] += 1
report_df = report_df.rename(columns=table.mapping)
report_df[table.Columns.date] = date
return DataLake().big_query.load_dataframe(df=report_df, table_id=table.table_id, schema=table.schema) |
class getStatus_result():
thrift_spec = None
thrift_field_annotations = None
thrift_struct_annotations = None
__init__ = None
def isUnion():
return False
def read(self, iprot):
if ((isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocol) and (iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL))) and isinstance(iprot.trans, TTransport.CReadableTransport) and (self.thrift_spec is not None) and (fastproto is not None)):
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)
self.checkRequired()
return
if ((isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocol) and (iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL))) and isinstance(iprot.trans, TTransport.CReadableTransport) and (self.thrift_spec is not None) and (fastproto is not None)):
fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)
self.checkRequired()
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if (ftype == TType.STOP):
break
if (fid == 0):
if (ftype == TType.I32):
self.success = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.checkRequired()
def checkRequired(self):
return
def write(self, oprot):
if ((isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocol) and (oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL))) and (self.thrift_spec is not None) and (fastproto is not None)):
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0))
return
if ((isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocol) and (oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL))) and (self.thrift_spec is not None) and (fastproto is not None)):
oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2))
return
oprot.writeStructBegin('getStatus_result')
if (self.success != None):
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def __repr__(self):
L = []
padding = (' ' * 4)
value = pprint.pformat(self.success, indent=0)
value = padding.join(value.splitlines(True))
L.append((' success=%s' % value))
return ('%s(\n%s)' % (self.__class__.__name__, ',\n'.join(L)))
def __eq__(self, other):
if (not isinstance(other, self.__class__)):
return False
return (self.__dict__ == other.__dict__)
def __ne__(self, other):
return (not (self == other))
if (not six.PY2):
__hash__ = object.__hash__ |
class TestChoiceFieldWithGroupedChoices(FieldValues):
valid_inputs = {'poor': 'poor', 'medium': 'medium', 'good': 'good'}
invalid_inputs = {'awful': ['"awful" is not a valid choice.']}
outputs = {'good': 'good'}
field = serializers.ChoiceField(choices=[('Category', (('poor', 'Poor quality'), ('medium', 'Medium quality'))), ('good', 'Good quality')]) |
class TestSpotifyChapter():
.xfail(reason='API inconsistencies.')
def test_chapter_no_market_not_found(self, app_client):
with pytest.raises(NotFound):
app_client.chapter(chapter_id)
def test_chapter_with_US_market_found(self, app_client):
chapter = app_client.chapter(chapter_id, market='US')
assert (chapter.id == chapter_id)
assert (from_uri(chapter.uri)[0] == 'episode')
def test_chapter_with_non_US_market_found(self, app_client):
with pytest.raises(NotFound):
app_client.chapter(chapter_id, market='FI')
.xfail(reason='API inconsistencies.')
def test_chapters_no_market_returns_none(self, app_client):
chapters = app_client.chapters(chapter_ids)
assert all(((c is None) for c in chapters))
def test_chapters_US_market_found(self, app_client):
chapters = app_client.chapters(chapter_ids, market='US')
assert (chapter_ids == [c.id for c in chapters])
def test_chapters_non_US_market_returns_none(self, app_client):
chapters = app_client.chapters(chapter_ids, market='FI')
assert all(((c is None) for c in chapters)) |
class TransferManager(object):
class Cache(object):
def __init__(self, element):
self.embedding_element = get_embedding_dg_element(element)
self._dat_versions = {}
self._V_DG_mass = {}
self._DG_inv_mass = {}
self._V_approx_inv_mass = {}
self._V_inv_mass_ksp = {}
self._DG_work = {}
self._work_vec = {}
self._V_dof_weights = {}
def __init__(self, *, native_transfers=None, use_averaging=True):
self.native_transfers = (native_transfers or {})
self.use_averaging = use_averaging
self.caches = {}
def is_native(self, element):
if (element in self.native_transfers.keys()):
return True
if (isinstance(element.cell, ufl.TensorProductCell) and (len(element.sub_elements) > 0)):
return reduce(and_, map(self.is_native, element.sub_elements))
return (element.family() in native)
def _native_transfer(self, element, op):
try:
return self.native_transfers[element][op]
except KeyError:
if self.is_native(element):
ops = (firedrake.prolong, firedrake.restrict, firedrake.inject)
return self.native_transfers.setdefault(element, ops)[op]
return None
def cache(self, element):
try:
return self.caches[element]
except KeyError:
return self.caches.setdefault(element, TransferManager.Cache(element))
def V_dof_weights(self, V):
cache = self.cache(V.ufl_element())
key = V.dim()
try:
return cache._V_dof_weights[key]
except KeyError:
f = firedrake.Function(V)
firedrake.par_loop((('{[i, j]: 0 <= i < A.dofs and 0 <= j < %d}' % V.value_size), 'A[i, j] = A[i, j] + 1'), firedrake.dx, {'A': (f, firedrake.INC)})
with f.dat.vec_ro as fv:
return cache._V_dof_weights.setdefault(key, fv.copy())
def V_DG_mass(self, V, DG):
cache = self.cache(V.ufl_element())
key = V.dim()
try:
return cache._V_DG_mass[key]
except KeyError:
M = firedrake.assemble((firedrake.inner(firedrake.TrialFunction(V), firedrake.TestFunction(DG)) * firedrake.dx))
return cache._V_DG_mass.setdefault(key, M.petscmat)
def DG_inv_mass(self, DG):
cache = self.caches[DG.ufl_element()]
key = DG.dim()
try:
return cache._DG_inv_mass[key]
except KeyError:
M = firedrake.assemble(firedrake.Tensor((firedrake.inner(firedrake.TrialFunction(DG), firedrake.TestFunction(DG)) * firedrake.dx)).inv)
return cache._DG_inv_mass.setdefault(key, M.petscmat)
def V_approx_inv_mass(self, V, DG):
cache = self.cache(V.ufl_element())
key = V.dim()
try:
return cache._V_approx_inv_mass[key]
except KeyError:
a = firedrake.Tensor((firedrake.inner(firedrake.TrialFunction(V), firedrake.TestFunction(V)) * firedrake.dx))
b = firedrake.Tensor((firedrake.inner(firedrake.TrialFunction(DG), firedrake.TestFunction(V)) * firedrake.dx))
M = firedrake.assemble((a.inv * b))
return cache._V_approx_inv_mass.setdefault(key, M.petscmat)
def V_inv_mass_ksp(self, V):
cache = self.cache(V.ufl_element())
key = V.dim()
try:
return cache._V_inv_mass_ksp[key]
except KeyError:
M = firedrake.assemble((firedrake.inner(firedrake.TrialFunction(V), firedrake.TestFunction(V)) * firedrake.dx))
ksp = PETSc.KSP().create(comm=V._comm)
ksp.setOperators(M.petscmat)
ksp.setOptionsPrefix('{}_prolongation_mass_'.format(V.ufl_element()._short_name))
ksp.setType('preonly')
ksp.pc.setType('cholesky')
ksp.setFromOptions()
ksp.setUp()
return cache._V_inv_mass_ksp.setdefault(key, ksp)
def DG_work(self, V):
needs_dual = ufl.duals.is_dual(V)
cache = self.cache(V.ufl_element())
key = (V.dim(), needs_dual)
try:
return cache._DG_work[key]
except KeyError:
if needs_dual:
primal = self.DG_work(V.dual())
dual = primal.riesz_representation(riesz_map='l2')
return cache._DG_work.setdefault(key, dual)
DG = firedrake.FunctionSpace(V.mesh(), cache.embedding_element)
return cache._DG_work.setdefault(key, firedrake.Function(DG))
def work_vec(self, V):
cache = self.cache(V.ufl_element())
key = V.dim()
try:
return cache._work_vec[key]
except KeyError:
return cache._work_vec.setdefault(key, V.dof_dset.layout_vec.duplicate())
def requires_transfer(self, element, transfer_op, source, target):
key = (transfer_op, weakref.ref(source.dat), weakref.ref(target.dat))
dat_versions = (source.dat.dat_version, target.dat.dat_version)
try:
return (self.cache(element)._dat_versions[key] != dat_versions)
except KeyError:
return True
def cache_dat_versions(self, element, transfer_op, source, target):
key = (transfer_op, weakref.ref(source.dat), weakref.ref(target.dat))
dat_versions = (source.dat.dat_version, target.dat.dat_version)
self.cache(element)._dat_versions[key] = dat_versions
.EventDecorator()
def op(self, source, target, transfer_op):
Vs = source.function_space()
Vt = target.function_space()
source_element = Vs.ufl_element()
target_element = Vt.ufl_element()
if (not self.requires_transfer(source_element, transfer_op, source, target)):
return
if (self.is_native(source_element) and self.is_native(target_element)):
self._native_transfer(source_element, transfer_op)(source, target)
elif (type(source_element) is finat.ufl.MixedElement):
assert (type(target_element) is finat.ufl.MixedElement)
for (source_, target_) in zip(source.subfunctions, target.subfunctions):
self.op(source_, target_, transfer_op=transfer_op)
else:
dgsource = self.DG_work(Vs)
dgtarget = self.DG_work(Vt)
VDGs = dgsource.function_space()
VDGt = dgtarget.function_space()
dgwork = self.work_vec(VDGs)
with source.dat.vec_ro as sv, dgsource.dat.vec_wo as dgv:
self.V_DG_mass(Vs, VDGs).mult(sv, dgwork)
self.DG_inv_mass(VDGs).mult(dgwork, dgv)
self.op(dgsource, dgtarget, transfer_op)
with dgtarget.dat.vec_ro as dgv, target.dat.vec_wo as t:
if self.use_averaging:
self.V_approx_inv_mass(Vt, VDGt).mult(dgv, t)
t.pointwiseDivide(t, self.V_dof_weights(Vt))
else:
work = self.work_vec(Vt)
self.V_DG_mass(Vt, VDGt).multTranspose(dgv, work)
self.V_inv_mass_ksp(Vt).solve(work, t)
self.cache_dat_versions(source_element, transfer_op, source, target)
def prolong(self, uc, uf):
self.op(uc, uf, transfer_op=Op.PROLONG)
def inject(self, uf, uc):
self.op(uf, uc, transfer_op=Op.INJECT)
def restrict(self, source, target):
Vs_star = source.function_space()
Vt_star = target.function_space()
source_element = Vs_star.ufl_element()
target_element = Vt_star.ufl_element()
if (not self.requires_transfer(source_element, Op.RESTRICT, source, target)):
return
if (self.is_native(source_element) and self.is_native(target_element)):
self._native_transfer(source_element, Op.RESTRICT)(source, target)
elif (type(source_element) is finat.ufl.MixedElement):
assert (type(target_element) is finat.ufl.MixedElement)
for (source_, target_) in zip(source.subfunctions, target.subfunctions):
self.restrict(source_, target_)
else:
Vs = Vs_star.dual()
Vt = Vt_star.dual()
dgsource = self.DG_work(Vs_star)
dgtarget = self.DG_work(Vt_star)
VDGs = dgsource.function_space().dual()
VDGt = dgtarget.function_space().dual()
work = self.work_vec(Vs)
dgwork = self.work_vec(VDGt)
with source.dat.vec_ro as sv, dgsource.dat.vec_wo as dgv:
if self.use_averaging:
work.pointwiseDivide(sv, self.V_dof_weights(Vs))
self.V_approx_inv_mass(Vs, VDGs).multTranspose(work, dgv)
else:
self.V_inv_mass_ksp(Vs).solve(sv, work)
self.V_DG_mass(Vs, VDGs).mult(work, dgv)
self.restrict(dgsource, dgtarget)
with dgtarget.dat.vec_ro as dgv, target.dat.vec_wo as t:
self.DG_inv_mass(VDGt).mult(dgv, dgwork)
self.V_DG_mass(Vt, VDGt).multTranspose(dgwork, t)
self.cache_dat_versions(source_element, Op.RESTRICT, source, target) |
class SQLAlchemyTask(PythonCustomizedContainerTask[SQLAlchemyConfig], SQLTask[SQLAlchemyConfig]):
_SQLALCHEMY_TASK_TYPE = 'sqlalchemy'
def __init__(self, name: str, query_template: str, task_config: SQLAlchemyConfig, inputs: typing.Optional[typing.Dict[(str, typing.Type)]]=None, output_schema_type: typing.Optional[typing.Type[FlyteSchema]]=FlyteSchema, container_image: str=SQLAlchemyDefaultImages.default_image(), **kwargs):
if output_schema_type:
outputs = kwtypes(results=output_schema_type)
else:
outputs = None
super().__init__(name=name, task_config=task_config, executor_type=SQLAlchemyTaskExecutor, task_type=self._SQLALCHEMY_TASK_TYPE, query_template=query_template, container_image=container_image, inputs=inputs, outputs=outputs, **kwargs)
def output_columns(self) -> typing.Optional[typing.List[str]]:
c = self.python_interface.outputs['results'].column_names()
return (c if c else None)
def get_custom(self, settings: SerializationSettings) -> typing.Dict[(str, typing.Any)]:
return {'query_template': self.query_template, 'uri': self.task_config.uri, 'connect_args': (self.task_config.connect_args or {}), 'secret_connect_args': self.task_config.secret_connect_args_to_dicts()} |
class OptionSeriesTimelineSonificationContexttracksMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def test_sqs_send_batch(instrument, elasticapm_client, sqs_client_and_queue):
(sqs, queue_url) = sqs_client_and_queue
elasticapm_client.begin_transaction('test')
response = sqs.send_message_batch(QueueUrl=queue_url, Entries=[{'Id': 'foo', 'MessageBody': 'foo', 'MessageAttributes': {'string': {'StringValue': 'foo', 'DataType': 'String'}}}])
transaction = elasticapm_client.end_transaction('test', 'test')
span = elasticapm_client.events[constants.SPAN][0]
assert (span['name'] == 'SQS SEND_BATCH to myqueue')
assert (span['type'] == 'messaging')
assert (span['subtype'] == 'sqs')
assert (span['action'] == 'send_batch')
assert (span['context']['destination']['cloud']['region'] == 'us-east-1')
assert (span['context']['destination']['service']['name'] == 'sqs')
assert (span['context']['destination']['service']['resource'] == 'sqs/myqueue')
assert (span['context']['destination']['service']['type'] == 'messaging')
messages = sqs.receive_message(QueueUrl=queue_url, AttributeNames=['All'], MessageAttributeNames=['All'])
message = messages['Messages'][0]
assert ('traceparent' in message['MessageAttributes'])
traceparent = message['MessageAttributes']['traceparent']['StringValue']
assert (transaction.trace_parent.trace_id in traceparent)
assert (span['id'] in traceparent) |
def segment_none(cnarr):
colnames = ['chromosome', 'start', 'end', 'log2', 'gene', 'probes']
rows = [(cnarr.chromosome.iat[0], cnarr.start.iat[0], cnarr.end.iat[(- 1)], segment_mean(cnarr), '-', len(cnarr))]
table = pd.DataFrame.from_records(rows, columns=colnames)
segarr = cnarr.as_dataframe(table)
segarr.sort_columns()
return segarr |
def test_headers_present_in_paginated_request(response_with_body_link):
config = LinkPaginationConfiguration(source='body', path='links.next')
request_params: SaaSRequestParams = SaaSRequestParams(method=HTTPMethod.GET, headers={'x-custom-header': 'abc'}, path='/customers', query_params={'page': 'abc'})
paginator = LinkPaginationStrategy(config)
next_request: Optional[SaaSRequestParams] = paginator.get_next_request(request_params, {}, response_with_body_link, 'customers')
assert (next_request.headers == request_params.headers) |
class PortModPacketIn(base_tests.SimpleDataPlane):
def runTest(self):
logging.info('Running PortModPacketIn Test')
of_ports = config['port_map'].keys()
of_ports.sort()
logging.info('Sends Features Request and retrieve Port Configuration from reply')
(hw_addr, port_config, advert) = port_config_get(self.controller, of_ports[0])
self.assertTrue((port_config is not None), 'Did not get port config')
logging.debug(((('No flood bit port ' + str(of_ports[0])) + ' is now ') + str((port_config & ofp.OFPPC_NO_PACKET_IN))))
logging.info('Modify Port Configuration using Port Modification Message:OFPT_PORT_MOD')
rv = port_config_set(self.controller, of_ports[0], (port_config ^ ofp.OFPPC_NO_PACKET_IN), ofp.OFPPC_NO_PACKET_IN)
self.assertTrue((rv != (- 1)), 'Error sending port mod')
do_barrier(self.controller)
logging.info('Verify the change and then set it back')
(hw_addr, port_config2, advert) = port_config_get(self.controller, of_ports[0])
logging.debug(((('No flood bit port ' + str(of_ports[0])) + ' is now ') + str((port_config2 & ofp.OFPPC_NO_PACKET_IN))))
self.assertTrue((port_config2 is not None), 'Did not get port config2')
self.assertTrue(((port_config2 & ofp.OFPPC_NO_PACKET_IN) != (port_config & ofp.OFPPC_NO_PACKET_IN)), 'Bit change did not take')
rv = port_config_set(self.controller, of_ports[0], port_config, ofp.OFPPC_NO_PACKET_IN)
self.assertTrue((rv != (- 1)), 'Error sending port mod')
do_barrier(self.controller) |
def add_evaluator_args(parser: argparse.ArgumentParser):
parser.add_argument('--quality-metrics', nargs='+', default=['BLEU'], help='Quality metrics')
parser.add_argument('--latency-metrics', nargs='+', default=['LAAL', 'AL', 'AP', 'DAL', 'ATD'], help='Latency metrics')
parser.add_argument('--continue-unfinished', action='store_true', default=False, help='Continue the experiments in output dir.')
parser.add_argument('--computation-aware', action='store_true', default=False, help='Include computational latency.')
parser.add_argument('--no-use-ref-len', action='store_true', default=False, help='Include computational latency.')
parser.add_argument('--eval-latency-unit', type=str, default='word', choices=['word', 'char', 'spm'], help='Basic unit used for latency calculation, choose from words (detokenized) and characters.')
parser.add_argument('--eval-latency-spm-model', type=str, default=None, help='Pass the spm model path if the eval_latency_unit is spm.')
parser.add_argument('--remote-address', default='localhost', help='Address to client backend')
parser.add_argument('--remote-port', default=12321, help='Port to client backend')
parser.add_argument('--no-progress-bar', action='store_true', default=False, help='Do not use progress bar')
parser.add_argument('--start-index', type=int, default=0, help='Start index for evaluation.')
parser.add_argument('--end-index', type=int, default=(- 1), help='The last index for evaluation.')
parser.add_argument('--output', type=str, default=None, help='Output directory. Required if using iterable dataloader.') |
class TestArraySource(unittest.TestCase):
def setUp(self):
d = ArraySource()
self.data = d
def make_2d_data(self):
s = numpy.array([[0, 1], [2, 3]], 'd')
v = numpy.array([[[1, 1, 1], [1, 0, 0]], [[0, 1, 0], [0, 0, 1]]], 'd')
tps = numpy.transpose
(s, v) = (tps(s), tps(v, (1, 0, 2)))
return (s, v)
def make_3d_data(self):
s = numpy.array([[[0, 1], [2, 3]], [[4, 5], [6, 7]]], 'd')
v = numpy.array([[[[0, 0, 0], [1, 0, 0]], [[0, 1, 0], [1, 1, 0]]], [[[0, 0, 1], [1, 0, 1]], [[0, 1, 1], [1, 1, 1]]]], 'd')
tps = numpy.transpose
(s, v) = (tps(s), tps(v, (2, 1, 0, 3)))
return (s, v)
def test_input_validation(self):
obj = self.data
obj.scalar_data = numpy.zeros((2, 2), 'd')
obj.scalar_data = numpy.zeros((2, 2, 2), 'd')
obj.scalar_data = None
obj.vector_data = numpy.zeros((2, 2, 3), 'd')
obj.vector_data = numpy.zeros((2, 2, 2, 3), 'd')
obj.vector_data = None
self.assertRaises(TraitError, setattr, obj, 'scalar_data', [1, 2, 3])
self.assertRaises(TraitError, setattr, obj, 'scalar_data', numpy.zeros((2, 2, 2, 3), 'd'))
obj.scalar_data = None
self.assertRaises(TraitError, setattr, obj, 'vector_data', [[1, 2, 3]])
self.assertRaises(TraitError, setattr, obj, 'vector_data', numpy.zeros((2, 2, 2, 1), 'd'))
obj.vector_data = None
obj.scalar_data = numpy.zeros((2, 2), 'd')
self.assertRaises(TraitError, setattr, obj, 'vector_data', numpy.zeros((4, 4, 3), 'd'))
obj.vector_data = numpy.zeros((2, 2, 3), 'd')
self.assertRaises(TraitError, setattr, obj, 'scalar_data', numpy.zeros((4, 3), 'i'))
self.assertRaises(TraitError, setattr, obj, 'scalar_data', numpy.zeros((2, 2, 2), 'i'))
obj.scalar_data = numpy.zeros((2, 2), 'f')
obj.scalar_data = obj.vector_data = None
def test_2d_data(self):
d = self.data
(sc, vec) = self.make_2d_data()
d.origin = ((- 1), (- 1), 0)
d.scalar_data = sc
d.vector_data = vec
d.start()
o = Outline()
d.add_child(o)
o.start()
self.assertEqual(tuple(o.actor.actor.bounds), ((- 1.0), 0.0, (- 1.0), 0.0, 0.0, 0.0))
surf = Surface()
d.add_child(surf)
self.assertEqual(surf.running, True)
tps = numpy.transpose
expect = [tps(sc), tps(vec, (1, 0, 2))]
sc1 = surf.actor.mapper.input.point_data.scalars.to_array()
self.assertEqual(numpy.allclose(sc1.flatten(), expect[0].flatten()), True)
vec1 = surf.actor.mapper.input.point_data.vectors.to_array()
self.assertEqual(numpy.allclose(vec1.flatten(), expect[1].flatten()), True)
def test_3d_data(self):
d = self.data
(sc, vec) = self.make_3d_data()
d.scalar_data = sc
d.vector_data = vec
d.start()
o = Outline()
d.add_child(o)
o.start()
self.assertEqual(tuple(o.actor.actor.bounds), (0, 1.0, 0.0, 1.0, 0.0, 1.0))
surf = Surface()
d.add_child(surf)
self.assertEqual(surf.running, True)
tps = numpy.transpose
expect = [tps(sc), tps(vec, (2, 1, 0, 3))]
sc2 = surf.actor.mapper.input.point_data.scalars.to_array()
self.assertEqual(numpy.allclose(sc2.flatten(), expect[0].flatten()), True)
vec2 = surf.actor.mapper.input.point_data.vectors.to_array()
self.assertEqual(numpy.allclose(vec2.flatten(), expect[1].flatten()), True)
def test_pickle(self):
d = self.data
(sc, vec) = self.make_3d_data()
d.scalar_data = sc
d.vector_data = vec
d.spacing = [1, 2, 3]
d.origin = [4, 5, 6]
d.start()
o = Outline()
d.add_child(o)
o.start()
surf = Surface()
d.add_child(surf)
data = pickle.dumps(d)
del d, surf, o
d = pickle.loads(data)
d.start()
mm = d.children[0]
(o, surf) = mm.children
self.assertEqual(tuple(o.actor.actor.bounds), (4.0, 5.0, 5.0, 7.0, 6.0, 9.0))
self.assertEqual(surf.running, True)
self.assertEqual(o.running, True)
self.assertEqual(d.running, True)
self.assertEqual(numpy.allclose(d.spacing, [1, 2, 3]), True)
self.assertEqual(numpy.allclose(d.origin, [4, 5, 6]), True)
tps = numpy.transpose
expect = [tps(sc), tps(vec, (2, 1, 0, 3))]
sc2 = surf.actor.mapper.input.point_data.scalars.to_array()
self.assertEqual(numpy.allclose(sc2.flatten(), expect[0].flatten()), True)
vec2 = surf.actor.mapper.input.point_data.vectors.to_array()
self.assertEqual(numpy.allclose(vec2.flatten(), expect[1].flatten()), True) |
def parse_arguments(args: list[str]) -> argparse.Namespace:
for argument in ('-c', '--coordinates', '-b', '--boundary-box'):
if (argument in args):
index: int = (args.index(argument) + 1)
if args[index].startswith('-'):
args[index] = (' ' + args[index])
break
parser: argparse.ArgumentParser = argparse.ArgumentParser(description='Map Machine. OpenStreetMap renderer with custom icon set')
parser.add_argument('-v', '--version', action='version', version=('Map Machine ' + __version__))
subparser = parser.add_subparsers(dest='command')
render_parser = subparser.add_parser('render', description='Render SVG map. Use --boundary-box to specify geo boundaries, --input to specify OSM XML or JSON input file, or --coordinates and --size to specify central point and resulting image size.', help='draw SVG map')
add_render_arguments(render_parser)
add_map_arguments(render_parser)
tile_parser = subparser.add_parser('tile', description='Generate SVG and PNG 256 256 px tiles for slippy maps. You can use server command to run server in order to display generated tiles as a map (e.g. with Leaflet).', help='generate SVG and PNG tiles for slippy maps')
add_tile_arguments(tile_parser)
add_map_arguments(tile_parser)
add_server_arguments(subparser.add_parser('server', description='Run in order to display generated tiles as a map (e.g. with Leaflet).', help='run tile server'))
add_draw_arguments(subparser.add_parser('draw', description='Draw map element separately.', help='draw OSM element: node, way, relation'))
add_mapcss_arguments(subparser.add_parser('mapcss', description='Write directory with MapCSS file and generated Rontgen icons.', help='write MapCSS file'))
subparser.add_parser('icons', description='Generate Rontgen icons as a grid and as separate SVG icons', help='draw Rontgen icons')
subparser.add_parser('taginfo', description='Generate JSON file for Taginfo project.', help='write Taginfo JSON file')
arguments: argparse.Namespace = parser.parse_args(args[1:])
return arguments |
def calculate_masking_accuracy(ground_truth, generated_mask):
gt = cv2.imread(ground_truth, cv2.IMREAD_GRAYSCALE)
gm = cv2.imread(generated_mask, cv2.IMREAD_GRAYSCALE)
print('INFO: Imported images...')
gt_bin = binarise(gt)
gm_bin = binarise(gm)
diff = (gt_bin.astype(np.int16) - gm_bin.astype(np.int16))
FP_FN = np.sum(np.abs(diff))
total_px = (gt.shape[0] * gt.shape[1])
print('INFO: Total number of pixels:', total_px)
print('INFO: Number of incorrect pixels:', FP_FN)
accuracy = ((total_px - FP_FN) / total_px)
return accuracy |
def delete_disabled_builds(apps, apkcache, repodirs):
for (appid, app) in apps.items():
for build in app.get('Builds', []):
if (not build.disable):
continue
apkfilename = common.get_release_filename(app, build)
iconfilename = ('%s.%s.png' % (appid, build.versionCode))
for repodir in repodirs:
files = [os.path.join(repodir, apkfilename), os.path.join(repodir, (apkfilename + '.asc')), os.path.join(repodir, (apkfilename[:(- 4)] + '_src.tar.gz'))]
for density in all_screen_densities:
repo_dir = get_icon_dir(repodir, density)
files.append(os.path.join(repo_dir, iconfilename))
for f in files:
if os.path.exists(f):
logging.info(('Deleting disabled build output ' + f))
os.remove(f)
if (apkfilename in apkcache):
del apkcache[apkfilename] |
class UpdateThread(Thread):
def __init__(self, q, *args, **kwargs):
self.q = q
self.checked_buckets_since_last_update = 0
super().__init__(*args, **kwargs)
def run(self):
global THREAD_EVENT
while (not THREAD_EVENT.is_set()):
checked_buckets = len(self.q.checked_buckets)
if (checked_buckets > 1):
cprint('{0} buckets checked ({1:.0f}b/s), {2} buckets found'.format(checked_buckets, ((checked_buckets - self.checked_buckets_since_last_update) / UPDATE_INTERVAL), FOUND_COUNT), 'cyan')
self.checked_buckets_since_last_update = checked_buckets
THREAD_EVENT.wait(UPDATE_INTERVAL) |
def orgb_to_srgb(lcc: Vector) -> Vector:
theta0 = math.atan2(lcc[2], lcc[1])
theta = theta0
atheta0 = abs(theta0)
if (atheta0 < (math.pi / 2)):
theta = ((2 / 3) * theta0)
elif ((math.pi / 2) <= atheta0 <= math.pi):
theta = math.copysign(((math.pi / 3) + ((4 / 3) * (atheta0 - (math.pi / 2)))), theta0)
return alg.dot(LC1C2_TO_RGB, rotate(lcc, (theta - theta0))) |
def _ask_user(lines: List[str], line: str, idx: int, old_string: str, type_: str, lines_num: int) -> str:
print(('=' * 50))
above_rows = lines[(idx - lines_num):idx]
below_rows = lines[(idx + 1):(idx + lines_num)]
print(''.join(above_rows))
print(line.rstrip().replace(old_string, (('\x1b[91m' + old_string) + '\x1b[0m')))
print(''.join(below_rows))
answer = input(f'Replace for component ({type_}, {old_string})? [y/N]: ')
return answer |
def _serialize_header(oxx, mod, n, buf, offset):
try:
get_desc = getattr(mod, (('_' + oxx) + '_field_desc'))
desc = get_desc(n)
value_len = desc.type.size
except KeyError:
value_len = 0
(n, exp_hdr) = _make_exp_hdr(oxx, mod, n)
exp_hdr_len = len(exp_hdr)
pack_str = ('!I%ds' % (exp_hdr_len,))
msg_pack_into(pack_str, buf, offset, (((n << 9) | (0 << 8)) | (exp_hdr_len + value_len)), bytes(exp_hdr))
return struct.calcsize(pack_str) |
def _desc_ranges(attr, die):
di = die.cu.dwarfinfo
if (not hasattr(di, '_rnglists')):
di._rangelists = di.range_lists()
rangelist = di._rangelists.get_range_list_at_offset(attr.value, die.cu)
base_ip = _get_cu_base(die.cu)
lines = []
addr_str_len = (die.cu.header.address_size * 2)
for entry in rangelist:
if isinstance(entry, RangeEntry):
lines.append((' [0x%0*x, 0x%0*x)' % (addr_str_len, ((0 if entry.is_absolute else base_ip) + entry.begin_offset), addr_str_len, ((0 if entry.is_absolute else base_ip) + entry.end_offset))))
elif isinstance(entry, elftools.dwarf.ranges.BaseAddressEntry):
base_ip = entry.base_address
else:
raise NotImplementedError('Unknown object in a range list')
prefix = (('indexed (0x%x) rangelist = ' % attr.raw_value) if (attr.form == 'DW_FORM_rnglistx') else '')
return (('%s0x%08x\n' % (prefix, attr.value)) + '\n'.join(lines)) |
def set_resolution(width: int, height: int, dpi_scale: int, refresh_rate: int=None):
if (platform.system() == 'Windows'):
devmode = pywintypes.DEVMODEType()
devmode.PelsWidth = width
devmode.PelsHeight = height
devmode.Fields = (win32con.DM_PELSWIDTH | win32con.DM_PELSHEIGHT)
if refresh_rate:
devmode.DisplayFrequency = refresh_rate
devmode.Fields |= win32con.DM_DISPLAYFREQUENCY
win32api.ChangeDisplaySettings(devmode, 0)
if CHANGE_DPI_SCALE:
with suppress(KeyError, IndexError):
ref_idx = get_recommended_dpi_idx()
rel_idx = (dpi_vals_map[dpi_scale] - ref_idx)
ctypes.windll.user32.SystemParametersInfoA(159, rel_idx, 0, 1) |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = 'name'
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'certificate_local': {'required': False, 'type': 'dict', 'default': None, 'no_log': True, 'options': {}}}
for attribute_name in module_spec['options']:
fields['certificate_local']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['certificate_local']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=True)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'certificate_local')
(is_error, has_changed, result, diff) = fortios_certificate(module.params, fos, module.check_mode)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
class TestQueryWithSearchAfterScroll():
('elasticsearch.Elasticsearch')
.asyncio
async def test_search_after_with_pit(self, es):
es.options.return_value = es
pit_op = 'open-point-in-time1'
pit_id = 'abcdef'
params = {'name': 'search-with-pit', 'index': 'test-index', 'operation-type': 'paginated-search', 'with-point-in-time-from': pit_op, 'pages': 'all', 'results-per-page': 2, 'body': {'sort': [{'timestamp': 'asc', 'tie_breaker_id': 'asc'}], 'query': {'match_all': {}}}}
page_1 = {'pit_id': 'fedcba', 'took': 10, 'timed_out': False, 'hits': {'total': {'value': 3, 'relation': 'eq'}, 'hits': [{'_id': '1', 'timestamp': , 'sort': [, '1']}, {'_id': '2', 'timestamp': , 'sort': [, '2']}]}}
page_2 = {'pit_id': 'fedcba', 'took': 10, 'timed_out': False, 'hits': {'total': {'value': '3', 'relation': 'eq'}, 'hits': [{'_id': '3', 'timestamp': , 'sort': [, '3']}]}}
es.perform_request = mock.AsyncMock(side_effect=[io.BytesIO(json.dumps(page_1).encode()), io.BytesIO(json.dumps(page_2).encode())])
r = runner.Query()
async with runner.CompositeContext():
runner.CompositeContext.put(pit_op, pit_id)
(await r(es, params))
assert (runner.CompositeContext.get(pit_op) == 'fedcba')
es.perform_request.assert_has_awaits([mock.call(method='GET', path='/_search', params={}, body={'query': {'match_all': {}}, 'sort': [{'timestamp': 'asc', 'tie_breaker_id': 'asc'}], 'size': 2, 'pit': {'id': 'abcdef', 'keep_alive': '1m'}}, headers=None), mock.call(method='GET', path='/_search', params={}, body={'query': {'match_all': {}}, 'sort': [{'timestamp': 'asc', 'tie_breaker_id': 'asc'}], 'size': 2, 'pit': {'id': 'fedcba', 'keep_alive': '1m'}, 'search_after': [, '2']}, headers=None)])
('elasticsearch.Elasticsearch')
.asyncio
async def test_search_after_without_pit(self, es):
es.options.return_value = es
params = {'name': 'search-with-pit', 'operation-type': 'paginated-search', 'index': 'test-index-1', 'pages': 'all', 'results-per-page': 2, 'body': {'sort': [{'timestamp': 'asc', 'tie_breaker_id': 'asc'}], 'query': {'match_all': {}}}}
page_1 = {'took': 10, 'timed_out': False, 'hits': {'total': {'value': 3, 'relation': 'eq'}, 'hits': [{'_id': '1', 'timestamp': , 'sort': [, '1']}, {'_id': '2', 'timestamp': , 'sort': [, '2']}]}}
page_2 = {'took': 10, 'timed_out': False, 'hits': {'total': {'value': 3, 'relation': 'eq'}, 'hits': [{'_id': '3', 'timestamp': , 'sort': [, '3']}]}}
es.perform_request = mock.AsyncMock(side_effect=[io.BytesIO(json.dumps(page_1).encode()), io.BytesIO(json.dumps(page_2).encode())])
r = runner.Query()
(await r(es, params))
es.perform_request.assert_has_awaits([mock.call(method='GET', path='/test-index-1/_search', params={}, body={'query': {'match_all': {}}, 'sort': [{'timestamp': 'asc', 'tie_breaker_id': 'asc'}], 'size': 2}, headers=None), mock.call(method='GET', path='/test-index-1/_search', params={}, body={'query': {'match_all': {}}, 'sort': [{'timestamp': 'asc', 'tie_breaker_id': 'asc'}], 'size': 2, 'search_after': [, '2']}, headers=None)]) |
def _HKDF_mod_r(*, IKM: bytes, key_info: bytes=b'') -> int:
L = 48
salt = b'BLS-SIG-KEYGEN-SALT-'
SK = 0
while (SK == 0):
salt = SHA256(salt)
okm = HKDF(salt=salt, IKM=(IKM + b'\x00'), L=L, info=(key_info + L.to_bytes(2, 'big')))
SK = (int.from_bytes(okm, byteorder='big') % bls_curve_order)
return SK |
class StrokeMode(BaseMode):
name = Mode.stroke
keymap = {Action.fine_control: False, Action.stroke: True}
def enter(self):
mouse.mode(MouseMode.RELATIVE)
Global.mode_mgr.mode_stroked_from = Global.mode_mgr.last_mode
visual.cue.track_stroke()
visual.cue.show_nodes(ignore=('cue_cseg',))
self.register_keymap_event('f', Action.fine_control, True)
self.register_keymap_event('f-up', Action.fine_control, False)
self.register_keymap_event('s', Action.stroke, True)
self.register_keymap_event('s-up', Action.stroke, False)
tasks.add(self.stroke_task, 'stroke_task')
tasks.add(self.shared_task, 'shared_task')
def exit(self):
tasks.remove('stroke_task')
tasks.remove('shared_task')
cam.store_state(Mode.stroke, overwrite=True)
def stroke_task(self, task):
if self.keymap[Action.stroke]:
if (not Global.game.shot_constraints.can_shoot()):
return task.cont
if self.stroke_cue_stick():
visual.cue.set_object_state_as_render_state()
multisystem.active.strike()
Global.mode_mgr.change_mode(Mode.calculate)
return
else:
visual.cue.get_node('cue_stick').setX(0)
visual.cue.hide_nodes(ignore=('cue_cseg',))
Global.mode_mgr.change_mode(Global.mode_mgr.last_mode)
return
return task.cont
def stroke_cue_stick(self):
max_speed_mouse = (ani.max_stroke_speed / ani.stroke_sensitivity)
max_backstroke = (multisystem.active.cue.specs.length * ani.backstroke_fraction)
with mouse:
dt = mouse.get_dt()
dx = mouse.get_dy()
speed_mouse = (dx / dt)
if (speed_mouse > max_speed_mouse):
dx *= (max_speed_mouse / speed_mouse)
cue_stick_node = visual.cue.get_node('cue_stick')
newX = min(max_backstroke, (cue_stick_node.getX() - (dx * ani.stroke_sensitivity)))
if (newX < 0):
newX = 0
collision = (True if visual.cue.is_shot() else False)
else:
collision = False
cue_stick_node.setX(newX)
visual.cue.append_stroke_data()
return (True if collision else False) |
.django_db
def test_bad_params(client, create_gtas_data):
resp = client.get('/api/v2/references/total_budgetary_resources/?fiscal_period=3')
assert (resp.status_code == status.HTTP_400_BAD_REQUEST)
resp = client.get('/api/v2/references/total_budgetary_resources/?fiscal_year=2015&fiscal_period=1')
assert (resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY) |
.skipif((pyvista is None), reason='requires pyvista')
.parametrize('drop_null_prisms', (False, True))
def test_to_pyvista_drop_null_prisms(dummy_layer, drop_null_prisms):
((easting, northing), surface, reference, density) = dummy_layer
reference -= 1
properties = {'density': density}
layer = prism_layer((easting, northing), surface, reference, properties=properties)
layer.top.values[(0, 0)] = layer.bottom.values[(0, 0)]
layer.top.values[(2, 1)] = layer.bottom.values[(2, 1)]
layer.top.values[(3, 2)] = np.nan
layer.bottom.values[(3, 3)] = np.nan
pv_grid = layer.prism_layer.to_pyvista(drop_null_prisms=drop_null_prisms)
expected_n_prisms = 20
if drop_null_prisms:
expected_n_prisms -= 4
assert (pv_grid.n_cells == expected_n_prisms)
assert (pv_grid.n_points == (expected_n_prisms * 8))
assert (pv_grid.n_arrays == 1)
assert (pv_grid.array_names == ['density'])
assert (pv_grid.get_array('density').ndim == 1)
assert (pv_grid.get_array('density').size == expected_n_prisms) |
class _NodeSeeker(dunodes.GenericNodeVisitor, object):
def __init__(self, node, *args, **kwargs):
include = kwargs.pop('include')
exclude = kwargs.pop('exclude', (dunodes.system_message,))
super(_NodeSeeker, self).__init__(*args, **kwargs)
self.node = node
self.include = include
self.exclude = exclude
self.result = []
def __iter__(self):
return iter(self.result)
def default_visit(self, node):
if (isinstance(node, self.exclude) and (node != self.node)):
raise dunodes.SkipChildren
elif isinstance(node, self.include):
self.result.append(node) |
def main(page: Page):
def button_clicked(e):
t.value = f"Textboxes values are: '{tb1.value}', '{tb2.value}', '{tb3.value}', '{tb4.value}', '{tb5.value}'."
page.update()
t = Text()
tb1 = TextField(label='Standard')
tb2 = TextField(label='Disabled', disabled=True, value='First name')
tb3 = TextField(label='Read-only', read_only=True, value='Last name')
tb4 = TextField(label='With placeholder', hint_text='Please enter text here')
tb5 = TextField(label='With an icon', icon=icons.EMOJI_EMOTIONS)
b = ElevatedButton(text='Submit', on_click=button_clicked)
page.add(tb1, tb2, tb3, tb4, tb5, b, t) |
.django_db
def test_show_collaborators_tab_when_can_register_installations_should_return_true(user1, event1, mocker):
mock_can_register_as_collaborator = mocker.patch('manager.templatetags.filters.can_register_as_collaborator')
mock_can_register_as_collaborator.return_value = False
mock_can_register_as_installer = mocker.patch('manager.templatetags.filters.can_register_as_installer')
mock_can_register_as_installer.return_value = False
mock_can_register_installations = mocker.patch('manager.templatetags.filters.can_register_installations')
mock_can_register_installations.return_value = True
assert filters.show_collaborators_tab(user1, event1)
mock_can_register_as_collaborator.assert_called_once_with(user1, event1)
mock_can_register_as_installer.assert_called_once_with(user1, event1)
mock_can_register_installations.assert_called_once_with(user1, event1) |
class OptionPlotoptionsTreemapDatalabels(Options):
def align(self):
return self._config_get('center')
def align(self, text: str):
self._config(text, js_type=False)
def allowOverlap(self):
return self._config_get(False)
def allowOverlap(self, flag: bool):
self._config(flag, js_type=False)
def animation(self) -> 'OptionPlotoptionsTreemapDatalabelsAnimation':
return self._config_sub_data('animation', OptionPlotoptionsTreemapDatalabelsAnimation)
def backgroundColor(self):
return self._config_get(None)
def backgroundColor(self, text: str):
self._config(text, js_type=False)
def borderColor(self):
return self._config_get(None)
def borderColor(self, text: str):
self._config(text, js_type=False)
def borderRadius(self):
return self._config_get(0)
def borderRadius(self, num: float):
self._config(num, js_type=False)
def borderWidth(self):
return self._config_get(0)
def borderWidth(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def crop(self):
return self._config_get(True)
def crop(self, flag: bool):
self._config(flag, js_type=False)
def defer(self):
return self._config_get(False)
def defer(self, flag: bool):
self._config(flag, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def filter(self) -> 'OptionPlotoptionsTreemapDatalabelsFilter':
return self._config_sub_data('filter', OptionPlotoptionsTreemapDatalabelsFilter)
def format(self):
return self._config_get('point.value')
def format(self, text: str):
self._config(text, js_type=False)
def formatter(self):
return self._config_get(None)
def formatter(self, value: Any):
self._config(value, js_type=False)
def inside(self):
return self._config_get(True)
def inside(self, flag: bool):
self._config(flag, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, flag: bool):
self._config(flag, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def overflow(self):
return self._config_get('justify')
def overflow(self, text: str):
self._config(text, js_type=False)
def padding(self):
return self._config_get(5)
def padding(self, num: float):
self._config(num, js_type=False)
def position(self):
return self._config_get('center')
def position(self, text: str):
self._config(text, js_type=False)
def rotation(self):
return self._config_get(0)
def rotation(self, num: float):
self._config(num, js_type=False)
def shadow(self):
return self._config_get(False)
def shadow(self, flag: bool):
self._config(flag, js_type=False)
def shape(self):
return self._config_get('square')
def shape(self, text: str):
self._config(text, js_type=False)
def style(self):
return self._config_get(None)
def style(self, value: Any):
self._config(value, js_type=False)
def textPath(self) -> 'OptionPlotoptionsTreemapDatalabelsTextpath':
return self._config_sub_data('textPath', OptionPlotoptionsTreemapDatalabelsTextpath)
def useHTML(self):
return self._config_get(False)
def useHTML(self, flag: bool):
self._config(flag, js_type=False)
def verticalAlign(self):
return self._config_get('middle')
def verticalAlign(self, text: str):
self._config(text, js_type=False)
def x(self):
return self._config_get(0)
def x(self, num: float):
self._config(num, js_type=False)
def y(self):
return self._config_get(0)
def y(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(6)
def zIndex(self, num: float):
self._config(num, js_type=False) |
.order((- 1))
.parametrize(('user_id', 'status'), (('1', 200), ('20', 404), ('', 200)))
def test_get_users_posts(user_id, status):
if user_id:
response = client.get(('/posts?user_id=' + str(user_id)))
else:
response = client.get('/posts')
assert (status == response.status_code) |
def test_fiscal_year_period_selection(setup_test_data, client):
resp = client.get((url + '?fiscal_year=2018&fiscal_period=6'))
assert (resp.status_code == status.HTTP_200_OK)
response = resp.json()
assert (len(response['results']) == 3)
expected_results = [{'agency_name': 'Test Agency', 'abbreviation': 'ABC', 'toptier_code': '123', 'agency_id': 1, 'current_total_budget_authority_amount': .97, 'recent_publication_date': '2018-07-03T00:00:00Z', 'recent_publication_date_certified': False, 'tas_account_discrepancies_totals': {'gtas_obligation_total': 1788370.03, 'tas_accounts_total': 100.0, 'tas_obligation_not_in_gtas_total': 11.0, 'missing_tas_accounts_count': 2}, 'obligation_difference': 84931.95, 'unlinked_contract_award_count': 4, 'unlinked_assistance_award_count': 6, 'assurance_statement_url': assurance_statement_1}, {'agency_name': 'Test Agency 2', 'abbreviation': 'XYZ', 'toptier_code': '987', 'agency_id': 2, 'current_total_budget_authority_amount': None, 'recent_publication_date': None, 'recent_publication_date_certified': False, 'tas_account_discrepancies_totals': {'gtas_obligation_total': None, 'tas_accounts_total': None, 'tas_obligation_not_in_gtas_total': None, 'missing_tas_accounts_count': None}, 'obligation_difference': None, 'unlinked_contract_award_count': None, 'unlinked_assistance_award_count': None, 'assurance_statement_url': None}, {'agency_name': 'Test Agency 3', 'abbreviation': 'AAA', 'toptier_code': '001', 'agency_id': 3, 'current_total_budget_authority_amount': None, 'recent_publication_date': None, 'recent_publication_date_certified': False, 'tas_account_discrepancies_totals': {'gtas_obligation_total': None, 'tas_accounts_total': None, 'tas_obligation_not_in_gtas_total': None, 'missing_tas_accounts_count': None}, 'obligation_difference': None, 'unlinked_contract_award_count': None, 'unlinked_assistance_award_count': None, 'assurance_statement_url': None}]
assert (response['results'] == expected_results) |
def extractWwwEvilationsCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def preprocess():
check_processed_folder = (os.getcwd() + '\\training_data\\balanced')
if (not os.path.exists(check_processed_folder)):
os.mkdir(check_processed_folder)
data_name = input('which data do you want to preprocess: ')
file = (os.getcwd() + f''' raining_data
aw\{data_name}''')
if os.path.exists(file):
print('loading data!')
training_dataX = list(np.load((file + f'\{data_name}X.npy'), allow_pickle=True))
training_dataY = list(np.load((file + f'\{data_name}Y.npy'), allow_pickle=True))
else:
print('data doesnt exist!')
return
os.mkdir((os.getcwd() + f' raining_data\processed\{data_name}'))
(forward, right, left, forward_right, forward_left, do_nothing) = ([], [], [], [], [], [])
idx = 0
print('separating data!')
for data in training_dataX:
if (training_dataY[idx][0] == 1):
forward.append(data)
elif (training_dataY[idx][1] == 1):
left.append(data)
elif (training_dataY[idx][2] == 1):
right.append(data)
elif (training_dataY[idx][3] == 1):
forward_left.append(data)
elif (training_dataY[idx][4] == 1):
forward_right.append(data)
elif (training_dataY[idx][5] == 1):
do_nothing.append(data)
idx += 1
(random.shuffle(forward), random.shuffle(forward_left), random.shuffle(forward_right))
(random.shuffle(do_nothing), random.shuffle(left), random.shuffle(right))
lengths = [len(forward), len(forward_left), len(forward_right), len(do_nothing), len(left), len(right)]
minimum_length = min(lengths)
balanced_forward = [forward[i] for i in range(round(minimum_length))]
balanced_forward_left = [forward_left[i] for i in range(round(minimum_length))]
balanced_forward_right = [forward_right[i] for i in range(round(minimum_length))]
balanced_do_nothing = [do_nothing[i] for i in range(round(minimum_length))]
balanced_left = [left[i] for i in range(minimum_length)]
balanced_right = [right[i] for i in range(minimum_length)]
training_dataX = (((((balanced_forward + balanced_left) + balanced_right) + balanced_forward_left) + balanced_forward_right) + balanced_do_nothing)
for i in range(minimum_length):
training_dataY.append([1, 0, 0, 0, 0, 0])
for i in range(minimum_length):
training_dataY.append([0, 1, 0, 0, 0, 0])
for i in range(minimum_length):
training_dataY.append([0, 0, 1, 0, 0, 0])
for i in range(minimum_length):
training_dataY.append([0, 0, 0, 1, 0, 0])
for i in range(minimum_length):
training_dataY.append([0, 0, 0, 0, 1, 0])
for i in range(minimum_length):
training_dataY.append([0, 0, 0, 0, 0, 1])
permutation = np.arange(len(training_dataX))
np.random.shuffle(permutation)
training_dataY = np.array(training_dataY)
training_dataX = np.array(training_dataX)
dataX_shuffled = training_dataX[permutation]
dataY_shuffled = training_dataY[permutation]
np.save((os.getcwd() + f' raining_dataalanced\{data_name}\{data_name}X.npy'), dataX_shuffled)
np.save((os.getcwd() + f' raining_dataalanced\{data_name}\{data_name}Y.npy'), dataY_shuffled)
print('balanced data saved!') |
class OptionPlotoptionsScatterClusterZones(Options):
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def from_(self):
return self._config_get(None)
_.setter
def from_(self, num: float):
self._config(num, js_type=False)
def marker(self) -> 'OptionPlotoptionsScatterClusterZonesMarker':
return self._config_sub_data('marker', OptionPlotoptionsScatterClusterZonesMarker)
def to(self):
return self._config_get(None)
def to(self, num: float):
self._config(num, js_type=False) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.