code stringlengths 281 23.7M |
|---|
class GPInvitation(Document):
def before_insert(self):
frappe.utils.validate_email_address(self.email, True)
if ((self.role == 'Gameplan Guest') and (not (self.teams or self.projects))):
frappe.throw('Project is required to invite as Guest')
if (self.role != 'Gameplan Guest'):
self.teams = None
self.projects = None
self.key = frappe.generate_hash(length=12)
self.invited_by = frappe.session.user
self.status = 'Pending'
def after_insert(self):
self.invite_via_email()
def invite_via_email(self):
invite_link = frappe.utils.get_url(f'/api/method/gameplan.api.accept_invitation?key={self.key}')
if frappe.local.dev_server:
print(f'Invite link for {self.email}: {invite_link}')
title = f'Gameplan'
template = 'gameplan_invitation'
frappe.sendmail(recipients=self.email, subject=f'You have been invited to join {title}', template=template, args={'title': title, 'invite_link': invite_link}, now=True)
self.db_set('email_sent_at', frappe.utils.now())
()
def accept_invitation(self):
frappe.only_for('System Manager')
self.accept()
def accept(self):
if (self.status == 'Expired'):
frappe.throw('Invalid or expired key')
user = self.create_user_if_not_exists()
user.append_roles(self.role)
user.save(ignore_permissions=True)
self.create_guest_access(user)
self.status = 'Accepted'
self.accepted_at = frappe.utils.now()
self.save(ignore_permissions=True)
def create_guest_access(self, user):
if (self.role == 'Gameplan Guest'):
teams = (frappe.parse_json(self.teams) if self.teams else [])
for team in teams:
guest_access = frappe.get_doc(doctype='GP Guest Access')
guest_access.user = user.name
guest_access.team = team
guest_access.save(ignore_permissions=True)
projects = (frappe.parse_json(self.projects) if self.projects else [])
for project in projects:
guest_access = frappe.get_doc(doctype='GP Guest Access')
guest_access.user = user.name
guest_access.project = project
guest_access.save(ignore_permissions=True)
def create_user_if_not_exists(self):
if (not frappe.db.exists('User', self.email)):
first_name = self.email.split('')[0].title()
user = frappe.get_doc(doctype='User', user_type='Website User', email=self.email, send_welcome_email=0, first_name=first_name).insert(ignore_permissions=True)
else:
user = frappe.get_doc('User', self.email)
return user |
class Wei(int):
def __new__(cls, value: Any) -> Any:
return super().__new__(cls, _to_wei(value))
def __hash__(self) -> int:
return super().__hash__()
def __lt__(self, other: Any) -> bool:
return super().__lt__(_to_wei(other))
def __le__(self, other: Any) -> bool:
return super().__le__(_to_wei(other))
def __eq__(self, other: Any) -> bool:
try:
return super().__eq__(_to_wei(other))
except TypeError:
return False
def __ne__(self, other: Any) -> bool:
try:
return super().__ne__(_to_wei(other))
except TypeError:
return True
def __ge__(self, other: Any) -> bool:
return super().__ge__(_to_wei(other))
def __gt__(self, other: Any) -> bool:
return super().__gt__(_to_wei(other))
def __add__(self, other: Any) -> 'Wei':
return Wei(super().__add__(_to_wei(other)))
def __sub__(self, other: Any) -> 'Wei':
return Wei(super().__sub__(_to_wei(other)))
def to(self, unit: str) -> 'Fixed':
try:
return Fixed((self * (Fixed(10) ** (- UNITS[unit]))))
except KeyError:
raise TypeError(f'Cannot convert wei to unknown unit: "{unit}". ') from None |
def zero_crossings(y_axis, window_len=11, window_f='hanning', offset_corrected=False):
length = len(y_axis)
y_axis = _smooth(y_axis, window_len, window_f)[:length]
indices = np.where(np.diff(np.sign(y_axis)))[0]
diff = np.diff(indices)
if ((diff.std() / diff.mean()) > 0.1):
if (((diff[::2].std() / diff[::2].mean()) < 0.1) and ((diff[1::2].std() / diff[1::2].mean()) < 0.1) and (not offset_corrected)):
offset = np.mean([y_axis.max(), y_axis.min()])
return zero_crossings((y_axis - offset), window_len, window_f, True)
print((diff.std() / diff.mean()))
print(np.diff(indices))
raise ValueError('False zero-crossings found, indicates problem {0!s} or {1!s}'.format('with smoothing window', 'unhandled problem with offset'))
if (len(indices) < 1):
raise ValueError('No zero crossings found')
return (indices - ((window_len // 2) - 1)) |
class ReqInventedBy(ReqTagGeneric):
def __init__(self, config):
ReqTagGeneric.__init__(self, config, 'Invented by', set([InputModuleTypes.ctstag, InputModuleTypes.reqtag, InputModuleTypes.testcase]))
def rewrite(self, rid, req):
self.check_mandatory_tag(rid, req, 5)
rtag = req[self.get_tag()].get_content()
if ((rtag not in self.get_config().get_value('requirements.inventors')) and (rtag != 'flonatel')):
raise RMTException(6, ("Invalid invented by '%s'. Must be one of the inventors '%s'" % (rtag, self.get_config().get_value('requirements.inventors'))), rid)
del req[self.get_tag()]
return (self.get_tag(), rtag) |
class SimpleEditor(SimpleTextEditor):
_connections_to_remove = List(Tuple(Any(), Callable()))
filter = filter_trait
def init(self, parent):
self.control = QtGui.QWidget()
layout = QtGui.QHBoxLayout(self.control)
layout.setContentsMargins(0, 0, 0, 0)
self._file_name = control = QtGui.QLineEdit()
layout.addWidget(control)
if self.factory.auto_set:
control.textEdited.connect(self.update_object)
self._connections_to_remove.append((control.textEdited, self.update_object))
else:
control.editingFinished.connect(self.update_object)
self._connections_to_remove.append((control.editingFinished, self.update_object))
button = IconButton(QtGui.QStyle.StandardPixmap.SP_DirIcon, self.show_file_dialog)
layout.addWidget(button)
self.set_tooltip(control)
self.filter = self.factory.filter
self.sync_value(self.factory.filter_name, 'filter', 'from', is_list=True)
def dispose(self):
while self._connections_to_remove:
(signal, handler) = self._connections_to_remove.pop()
signal.disconnect(handler)
Editor.dispose(self)
def update_object(self):
if (self.control is not None):
file_name = str(self._file_name.text())
try:
if self.factory.truncate_ext:
file_name = splitext(file_name)[0]
self.value = file_name
except TraitError as excp:
self._file_name.setText(self.value)
def update_editor(self):
self._file_name.setText(self.str_value)
def show_file_dialog(self):
dlg = self._create_file_dialog()
dlg.open()
if (dlg.return_code == OK):
if self.factory.truncate_ext:
self.value = splitext(dlg.path)[0]
else:
self.value = dlg.path
self.update_editor()
def get_error_control(self):
return self._file_name
def _create_file_dialog(self):
wildcard = ' '.join(self.factory.filter)
dlg = FileDialog(parent=self.get_control_widget(), default_path=self._file_name.text(), action=('save as' if (self.factory.dialog_style == 'save') else 'open'), wildcard=wildcard)
return dlg |
class GradientValueClipping(Callback['Trainer[BaseConfig, Any]']):
def on_backward_end(self, trainer: 'Trainer[BaseConfig, Any]') -> None:
clip_value = trainer.config.training.clip_grad_value
if (clip_value is not None):
clip_gradient_value(parameters=trainer.learnable_parameters, clip_value=clip_value) |
class TestImports(unittest.TestCase):
def test_toplevel(self):
import loxi
self.assertTrue(hasattr(loxi, 'ProtocolError'))
self.assertEquals(loxi.version_names[5], '1.4')
ofp = loxi.protocol(5)
self.assertEquals(ofp.OFP_VERSION, 5)
self.assertTrue(hasattr(ofp, 'action'))
self.assertTrue(hasattr(ofp, 'common'))
self.assertTrue(hasattr(ofp, 'const'))
self.assertTrue(hasattr(ofp, 'message'))
self.assertTrue(hasattr(ofp, 'oxm'))
def test_version(self):
import loxi
self.assertTrue(hasattr(loxi.of14, 'ProtocolError'))
self.assertTrue(hasattr(loxi.of14, 'OFP_VERSION'))
self.assertEquals(loxi.of14.OFP_VERSION, 5)
self.assertTrue(hasattr(loxi.of14, 'action'))
self.assertTrue(hasattr(loxi.of14, 'common'))
self.assertTrue(hasattr(loxi.of14, 'const'))
self.assertTrue(hasattr(loxi.of14, 'message'))
self.assertTrue(hasattr(loxi.of14, 'oxm')) |
class CacheFlowDispatcher(CacheDispatcher):
__slots__ = []
async def dispatch(self, reqargs, response):
(await self._parallel_flow(self.flow_open))
try:
content = (await self.get_data(reqargs, response))
except Exception:
(await self._parallel_flow(self.flow_close))
raise
(await self._parallel_flow(self.flow_close))
return self.response_builder(content, response) |
()
('--model-name', type=str, default='vit_base_patch16_224')
('--use-fp16-acc', type=bool, default=True, help='Whether to use FP16 for accumulation (similar to TensorRT)')
('--use-graph', type=bool, default=True, help='Whether to use CUDA graph')
('--batch-size', type=int, default=0, help='Batch size')
def main(model_name='vit_base_patch16_224', use_fp16_acc=True, use_graph=True, batch_size=0):
if (detect_target().name() == 'rocm'):
use_graph = False
if (batch_size < 1):
for bs in (1, 2, 4, 8, 16, 32, 64, 128, 256):
compile_vit(model_name, bs, use_fp16_acc=use_fp16_acc)
benchmark(model_name, bs, graph_mode=use_graph)
else:
benchmark(model_name, batch_size, graph_mode=use_graph) |
class UserTasksByStatusWidget(QtWidgets.QWidget, UserPropertyMixin, ProjectPropertyMixin):
status_order = ['WFD', 'RTS', 'WIP', 'PREV', 'HREV', 'DREV', 'CMPL', 'OH', 'STOP']
status_colors = {'WFD': {'bg': QtGui.QColor(), 'fg': QtGui.QColor(0)}, 'RTS': {'bg': QtGui.QColor(), 'fg': QtGui.QColor()}, 'WIP': {'bg': QtGui.QColor(), 'fg': QtGui.QColor()}, 'PREV': {'bg': QtGui.QColor(7320544), 'fg': QtGui.QColor()}, 'HREV': {'bg': QtGui.QColor(7290052), 'fg': QtGui.QColor()}, 'DREV': {'bg': QtGui.QColor(7290052), 'fg': QtGui.QColor()}, 'CMPL': {'bg': QtGui.QColor(8564591), 'fg': QtGui.QColor()}, 'OH': {'bg': QtGui.QColor(), 'fg': QtGui.QColor()}, 'STOP': {'bg': QtGui.QColor(5134690), 'fg': QtGui.QColor()}}
def __init__(self, *args, **kwargs):
super(UserTasksByStatusWidget, self).__init__(*args, **kwargs)
UserPropertyMixin.__init__(self)
ProjectPropertyMixin.__init__(self)
self.tabs = []
self._setup()
def _setup(self):
self.main_layout = QtWidgets.QVBoxLayout()
self.main_layout.setMargin(0)
self.setLayout(self.main_layout)
project_combo_box_layout = QtWidgets.QHBoxLayout()
project_combo_box_layout.setMargin(0)
self.main_layout.addLayout(project_combo_box_layout)
self.project_combo_box = ProjectComboBox(self)
self.project_combo_box.show_active_projects = True
self.project_combo_box.currentIndexChanged.connect(partial(self.project_combo_box_changed))
project_combo_box_layout.addWidget(self.project_combo_box)
project_combo_box_layout.addSpacerItem(QtWidgets.QSpacerItem((- 1), (- 1), QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum))
self.main_tab_widget = QtWidgets.QTabWidget(self)
self.main_layout.addWidget(self.main_tab_widget)
def project_combo_box_changed(self, index):
self.project = self.project_combo_box.get_current_project()
.setter
def user(self, user):
UserPropertyMixin.user.fset(self, user)
self.update()
.setter
def project(self, project):
ProjectPropertyMixin.project.fset(self, project)
self.update()
def update(self):
self.main_tab_widget.clear()
if ((not self.user) or (not self.project)):
return
status_codes_and_counts = DBSession.query(distinct(Status.code), count(Status.code)).join(Task.status).filter(Task.resources.contains(self.user)).filter((Task.project == self.project)).group_by(Status.code).all()
status_codes_and_counts = dict(status_codes_and_counts)
for status_code in self.status_order:
if (status_code in status_codes_and_counts):
status = Status.query.filter((Status.code == status_code)).first()
status_tab = QtWidgets.QWidget(self)
status_tab.setAutoFillBackground(True)
palette = status_tab.palette()
palette.setColor(status_tab.backgroundRole(), self.status_colors[status_code]['bg'])
palette.setColor(status_tab.foregroundRole(), self.status_colors[status_code]['fg'])
status_tab.setPalette(palette)
self.main_tab_widget.addTab(status_tab, get_cached_icon(status_code), '{} ({})'.format(status_code, status_codes_and_counts[status_code]))
status_tab_layout = QtWidgets.QVBoxLayout()
status_tab_layout.setMargin(0)
status_tab.setLayout(status_tab_layout)
task_table = TaskTableView(self)
task_table_model = TaskTableModel(self)
task_table.setModel(task_table_model)
status_tab_layout.addWidget(task_table)
tasks = Task.query.filter((Task.project == self.project)).filter(Task.resources.contains(self.user)).filter((Task.status == status)).all()
task_table_model.populate_table(tasks)
task_table.resizeColumnsToContents() |
class ByteString(FancyValidator):
min = None
max = None
not_empty = None
encoding = None
list_joiner = ', '
messages = dict(tooLong=_('Enter a value not more than %(max)i characters long'), tooShort=_('Enter a value %(min)i characters long or more'))
def __initargs__(self, new_attrs):
if ((self.not_empty is None) and self.min):
self.not_empty = True
def _convert_to_python(self, value, state):
if (value is None):
value = ''
elif (not isinstance(value, str)):
try:
value = bytes(value)
except UnicodeEncodeError:
value = str(value)
if ((self.encoding is not None) and isinstance(value, str)):
value = value.encode(self.encoding)
return value
def _convert_from_python(self, value, state):
if (value is None):
value = ''
elif (not isinstance(value, str)):
if isinstance(value, (list, tuple)):
value = self.list_joiner.join((self._convert_from_python(v, state) for v in value))
try:
value = str(value)
except UnicodeEncodeError:
value = str(value)
if ((self.encoding is not None) and isinstance(value, str)):
value = value.encode(self.encoding)
if self.strip:
value = value.strip()
return value
def _validate_other(self, value, state):
if ((self.max is None) and (self.min is None)):
return
if (value is None):
value = ''
elif (not isinstance(value, str)):
value = str(value)
if ((self.max is not None) and (len(value) > self.max)):
raise Invalid(self.message('tooLong', state, max=self.max), value, state)
if ((self.min is not None) and (len(value) < self.min)):
raise Invalid(self.message('tooShort', state, min=self.min), value, state)
def empty_value(self, value):
return '' |
def test_render_command_form(cli, loaded_script_module):
cmd_path = 'cli/command-with-option-and-argument'
click_web._register(loaded_script_module, cli)
ctx_and_commands = click_web.resources.cmd_form._get_commands_by_path(cmd_path)
res = _generate_form_data(ctx_and_commands)
assert (len(res) == 2)
assert (len(res[0]['fields']) == 1)
assert (len(res[1]['fields']) == 2)
pprint.pprint(res) |
def find_common_bits_for_tag_groups(segbits, tag_groups):
bit_groups = []
for tag_group in tag_groups:
bit_group = set()
for (tag, bits) in segbits.items():
if ((tag in tag_group) and isinstance(bits, set)):
ones = set([b for b in bits if b[2]])
bit_group |= ones
bit_groups.append(bit_group)
return bit_groups |
def scrape(url: str='', output_format='json_string'):
url = url.strip().replace(' ', '')
print(f'[scrape] url: {url}')
work_dir = os.getenv('AN_CURRENT_WORKDIR', './')
filename = os.getenv('AN_COLLECTION_FILENAME', '')
filepath = f'{work_dir}/{filename}'
print(f'[scrape] collection file path: {filepath}')
content = ''
try:
content = utils.load_web(url)
except Exception as e:
print(f'[ERROR] Exception occurred: url: {url}, error: {e}')
return ('[]' if (output_format == 'json_string') else [])
print(f'[scrape] content length: {len(content)}, first 20 chars: {content[:20]} ...')
output = content
if (len(content) > 2000):
print(f'[scrape] content length: {len(content)}, summarize it...')
llm_agent = LLMAgentSummary()
llm_agent.init_prompt(translation_enabled=False)
llm_agent.init_llm()
SUMMARY_MAX_LENGTH = int(os.getenv('SUMMARY_MAX_LENGTH', 20000))
print(f'Summary max length: {SUMMARY_MAX_LENGTH}')
content = content[:SUMMARY_MAX_LENGTH]
output = llm_agent.run(content)
res = [{'href': url, 'body': output}]
with open(filepath, 'a+') as f:
f.write(f'''URL: {url}
''')
f.write(f'''Body: {output}
''')
f.write('')
f.write('\n')
if (output_format == 'json_string'):
return json.dumps(res, ensure_ascii=False, indent=4)
else:
return res |
class DNS(dict):
def __getattr__(self, item):
if (item in self):
return self[item]
return None
__setattr__ = dict.__setitem__
def __init__(self, domain=''):
super(dict, self).__init__()
self.domain = domain
self.servfail = False
self.refused = False
self.a = []
self.cname = []
self.ns = []
self._ptr = []
def __hash__(self):
return hash(self.domain)
def __eq__(self, other):
return (self.domain == other.domain)
def __lt__(self, other):
return (self.domain < other.domain) |
def test_early_conditionally_return_with_nothing():
a = early_conditionally_return_with_nothing(1)
assert (type(a) is lmql.LMQLResult), f"Expected return value to be 'LMQLResult' but got '{type(a)}'"
a = early_conditionally_return_with_nothing((- 1))
assert (a == 'Something'), f"Expected return value to be 'Something' but got '{a}'" |
.django_db
def test_two_distinct_recipients(client, monkeypatch, double_fpds_awards_with_distinct_recipients, helpers, elasticsearch_award_index):
setup_elasticsearch_test(monkeypatch, elasticsearch_award_index)
helpers.patch_datetime_now(monkeypatch, 2022, 12, 31)
resp = _default_post(client, helpers)
assert (resp.data['count'] == 2) |
class OptionPlotoptionsWindbarbSonificationDefaultinstrumentoptionsMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def _match_list_pattern(patterns: List[Pattern], test: Any) -> MatchResult:
if ((not isinstance(test, list)) or (len(test) != len(patterns))):
return Fail(test)
submatches = {str(i): match(pattern, test[i]) for (i, pattern) in enumerate(patterns)}
if any((result.is_fail() for result in submatches.values())):
return Fail(test, submatches)
return Success(test, submatches) |
class PacSponsorCandidatePerCycle(db.Model):
__tablename__ = 'ofec_pac_sponsor_candidate_per_cycle_vw'
idx = db.Column(db.Integer, primary_key=True)
committee_id = db.Column(db.String, doc=docs.COMMITTEE_ID)
cycle = db.Column(db.Integer)
sponsor_candidate_id = db.Column(db.String, doc=docs.CANDIDATE_ID)
sponsor_candidate_name = db.Column(db.String(100), doc=docs.CANDIDATE_NAME) |
.benchmark
('pyscf')
.parametrize('fn, geom, charge, mult, ref_energy', BakerTSBm.geom_iter)
def test_baker_ts_dimer(fn, geom, charge, mult, ref_energy, results_bag, this_dir):
init_orients = 'Ns'
with open((this_dir / init_orients), 'rb') as handle:
N_INITS = pickle.load(handle)
id_ = fn[:2]
calc_kwargs = {'charge': charge, 'mult': mult, 'base_name': Path(fn).stem}
calc = PySCF('321g', pal=2, verbose=0, **calc_kwargs)
dimer_kwargs = {'rotation_method': 'fourier', 'calculator': calc, 'N_raw': N_INITS[id_], 'length': 0.0189, 'rotation_tol': 5, 'rotation_disable_pos_curv': True, 'trans_force_f_perp': True}
dimer = Dimer(**dimer_kwargs)
geom.set_calculator(dimer)
opt_kwargs = {'thresh': 'baker', 'precon': True, 'max_step_element': 0.25, 'max_cycles': 50, 'c_stab': 0.103, 'dump': True}
opt = PreconLBFGS(geom, **opt_kwargs)
opt.run()
shutil.copy('final_geometry.xyz', f'{id_}_final_geometry.xyz')
energies_match = (geom.energy == pytest.approx(ref_energy))
results_bag.cycles = (opt.cur_cycle + 1)
results_bag.is_converged = opt.is_converged
results_bag.energies_match = energies_match
results_bag.dimer_force_evals = dimer.force_evals
assert opt.is_converged
assert energies_match
opt_cycs = (opt.cur_cycle + 1)
print(f'{fn} converged: {opt_cycs} optimization cycles, {dimer.force_evals} force evaluations.') |
class SpanTask(BuiltinTaskWithLabels, abc.ABC):
def __init__(self, parse_responses: TaskResponseParser[Self], prompt_example_type: Type[FewshotExample], labels: List[str], template: str, label_definitions: Optional[Dict[(str, str)]], prompt_examples: Optional[List[FewshotExample]], description: Optional[str], normalizer: Optional[Callable[([str], str)]], alignment_mode: Literal[('strict', 'contract', 'expand')], case_sensitive_matching: bool, allow_overlap: bool, single_match: bool, check_label_consistency: SpanTaskLabelCheck):
super().__init__(parse_responses=parse_responses, prompt_example_type=prompt_example_type, template=template, prompt_examples=prompt_examples, labels=labels, label_definitions=label_definitions, normalizer=normalizer)
self._prompt_example_type = typing.cast(Type[SpanExample], self._prompt_example_type)
self._validate_alignment(alignment_mode)
self._alignment_mode = alignment_mode
self._case_sensitive_matching = case_sensitive_matching
self._allow_overlap = allow_overlap
self._single_match = single_match
self._check_label_consistency = check_label_consistency
self._description = description
if self._prompt_examples:
self._prompt_examples = list(self._check_label_consistency(self))
def generate_prompts(self, docs: Iterable[Doc], **kwargs) -> Iterable[str]:
return super().generate_prompts(docs=docs, description=self._description, labels=list(self._label_dict.values()), label_definitions=self._label_definitions, examples=self._prompt_examples, allow_overlap=self._allow_overlap, **kwargs)
def _validate_alignment(alignment_mode: str):
alignment_modes = ('strict', 'contract', 'expand')
if (alignment_mode not in alignment_modes):
raise ValueError(f"Unsupported alignment mode '{alignment_mode}'. Supported modes: {', '.join(alignment_modes)}")
def assign_spans(self, doc: Doc, spans: List[Span]) -> None:
raise NotImplementedError()
def parse_responses(self, docs: Iterable[Doc], responses: Iterable[str]) -> Iterable[Doc]:
for (doc, spans) in zip(docs, self._parse_responses(self, docs, responses)):
self.assign_spans(doc, spans)
(yield doc)
def _cfg_keys(self) -> List[str]:
return ['_label_dict', '_template', '_label_definitions', '_alignment_mode', '_case_sensitive_matching']
def alignment_mode(self) -> Literal[('strict', 'contract', 'expand')]:
return self._alignment_mode
def case_sensitive_matching(self) -> bool:
return self._case_sensitive_matching
def allow_overlap(self) -> bool:
return self._allow_overlap
def prompt_examples(self) -> Optional[Iterable[FewshotExample]]:
return self._prompt_examples
def prompt_example_type(self) -> Union[(Type[SpanExample], Type[SpanCoTExample])]:
return self._prompt_example_type
def single_match(self) -> bool:
return self._single_match |
class DynamicEIT():
def __init__(self, mesh=None, el_pos=None, parser='fmmu', algo='jac', p=0.2, lamb=0.001):
if (algo == 'jac'):
solver = jac.JAC(mesh, el_pos, perm=1.0, parser=parser)
solver.setup(p=p, lamb=lamb, method='kotre')
else:
solver = bp.BP(mesh, el_pos, parser='fmmu', step=1)
solver.setup(weight='simple')
self.solver = solver
def normalize(self, v1, v0):
raise NotImplementedError
def map(self, v):
raise NotImplementedError |
class Ipars(object):
def __init__(self, iparsFile, layered=False):
f = open(iparsFile, 'r')
lines = f.readlines()
values = {}
for (ln, line) in enumerate(lines):
if ('$' in line):
continue
if ('=' in line):
words = line.split('=')
if ('(' in words[0]):
key = words[0].split('(')[0].strip()
else:
key = words[0].strip()
values[key] = []
if (len(words) > 1):
if ('"' in words[1]):
values[key].append(words[1])
else:
for val in words[1].split():
try:
if ('*' in val):
(n, v) = val.split('*')
for i in range(int(n)):
values[key].append(eval(v))
else:
values[key].append(eval(val))
except:
values[key].append(val)
for vline in lines[(ln + 1):]:
if (('=' in vline) or ('\n' == vline) or ('$' in vline)):
break
if ('"' in vline):
values[key].append(vline)
else:
vals = vline.split()
for val in vals:
try:
if ('*' in val):
(n, v) = val.split('*')
for i in range(int(n)):
values[key].append(eval(v))
else:
values[key].append(eval(val))
except:
values[key].append(val)
for (key, value) in values.items():
logEvent(((key + ':') + str(value)))
pf = open((iparsFile + '.poly'), 'w')
self.polyfile = iparsFile
x = values['XYZ111'][0]
y = values['XYZ111'][1]
z = values['XYZ111'][2]
dxList = ([0.0] + values['DX'])
dyList = ([0.0] + values['DY'])
dzList = ([0.0] + values['DZ'])
Lx = sum(dxList)
Ly = sum(dyList)
Lz = sum(dzList)
nnx = len(dxList)
nny = len(dyList)
nnz = len(dzList)
nVertices = ((nnx * nny) * nnz)
pf.write('#vertices\n')
pf.write(('%d 3 0 1\n' % nVertices))
vN = 0
vertices = []
vertexNumber = {}
self.boundaryFlags = {'left': 1, 'right': 2, 'front': 3, 'back': 4, 'bottom': 5, 'top': 6, 'interior': 0}
boundaryFlags = self.boundaryFlags
def getBoundaryFlag(i, j, k):
if (k == 0):
return boundaryFlags['bottom']
if (k == (nnz - 1)):
return boundaryFlags['top']
if (j == 0):
return boundaryFlags['front']
if (j == (nny - 1)):
return boundaryFlags['back']
if (i == 0):
return boundaryFlags['left']
if (i == (nnx - 1)):
return boundaryFlags['right']
else:
return boundaryFlags['interior']
for (i, dx) in enumerate(dxList):
x += dx
y = 0.0
z = 0.0
for (j, dy) in enumerate(dyList):
y += dy
z = 0.0
for (k, dz) in enumerate(dzList):
z += dz
vertexNumber[(i, j, k)] = vN
vertices.append((x, y, z))
pf.write(('%d %12.5e %12.5e %12.5e %d\n' % ((vN + 1), (x * 0.3048), (y * 0.3048), (z * 0.3048), getBoundaryFlag(i, j, k))))
vN += 1
pf.write('#facets\n')
facets = set()
for i in range((nnx - 1)):
for j in range((nny - 1)):
for k in range((nnz - 1)):
facets |= set([(vertexNumber[(i, j, k)], vertexNumber[(i, j, (k + 1))], vertexNumber[(i, (j + 1), (k + 1))], vertexNumber[(i, (j + 1), k)], getBoundaryFlag(i, (- 1), (- 1))), (vertexNumber[((i + 1), j, k)], vertexNumber[((i + 1), j, (k + 1))], vertexNumber[((i + 1), (j + 1), (k + 1))], vertexNumber[((i + 1), (j + 1), k)], getBoundaryFlag((i + 1), (- 1), (- 1))), (vertexNumber[(i, j, k)], vertexNumber[(i, j, (k + 1))], vertexNumber[((i + 1), j, (k + 1))], vertexNumber[((i + 1), j, k)], getBoundaryFlag((- 1), j, (- 1))), (vertexNumber[(i, (j + 1), k)], vertexNumber[(i, (j + 1), (k + 1))], vertexNumber[((i + 1), (j + 1), (k + 1))], vertexNumber[((i + 1), (j + 1), k)], getBoundaryFlag((- 1), (j + 1), (- 1))), (vertexNumber[(i, j, k)], vertexNumber[(i, (j + 1), k)], vertexNumber[((i + 1), (j + 1), k)], vertexNumber[((i + 1), j, k)], getBoundaryFlag((- 1), (- 1), k)), (vertexNumber[(i, j, (k + 1))], vertexNumber[(i, (j + 1), (k + 1))], vertexNumber[((i + 1), (j + 1), (k + 1))], vertexNumber[((i + 1), j, (k + 1))], getBoundaryFlag((- 1), (- 1), (k + 1)))])
pf.write(('%d 1 \n' % (len(facets),)))
for f in facets:
pf.write(('1 0 %d\n' % f[(- 1)]))
pf.write(('4 %d %d %d %d\n' % ((f[0] + 1), (f[1] + 1), (f[2] + 1), (f[3] + 1))))
pf.write('#holes\n')
pf.write('0 \n')
pf.write('#regions\n')
nCells = (((nnx - 1) * (nny - 1)) * (nnz - 1))
if layered:
pf.write(('%d \n' % nCells))
rN = 1
for i in range((nnx - 1)):
for j in range((nny - 1)):
for k in range((nnz - 1)):
p1 = vertices[vertexNumber[(i, j, k)]]
p2 = vertices[vertexNumber[((i + 1), (j + 1), (k + 1))]]
pi = ((0.5 * (p1[0] + p2[0])), (0.5 * (p1[1] + p2[1])), (0.5 * (p1[2] + p2[2])))
pf.write(('%d %12.5e %12.5e %12.5e %d\n' % (rN, (pi[0] * 0.3048), (pi[1] * 0.3048), (pi[2] * 0.3048), k)))
rN += 1
else:
pf.write('1 \n')
pf.write(('1 %12.5e %12.5e %12.5e 0 0.0\n' % (1.0, 1.0, 1.0)))
pf.close()
self.values = values
self.L = ((Lx * 0.3048), (Ly * 0.3048), (Lz * 0.3048)) |
def fortios_hardware(data, fos):
fos.do_member_operation('hardware', 'nic')
if data['hardware_nic']:
resp = hardware_nic(data, fos)
else:
fos._module.fail_json(msg=('missing task body: %s' % 'hardware_nic'))
return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {}) |
def is_limb_pointing(upper, mid, lower):
if is_missing([upper, mid, lower]):
return False
limb_angle = get_angle(upper, mid, lower)
is_in_line = (abs((180 - limb_angle)) < STRAIGHT_LIMB_MARGIN)
if is_in_line:
upper_length = dist.euclidean([upper['x'], upper['y']], [mid['x'], mid['y']])
lower_length = dist.euclidean([lower['x'], lower['y']], [mid['x'], mid['y']])
is_extended = (lower_length > (EXTENDED_LIMB_MARGIN * upper_length))
return is_extended
return False |
.requires_roxar
def test_rox_get_modify_set_get_grid_with_subzones(roxar_project, roxinstance):
grd = xtgeo.grid_from_roxar(roxar_project, GRIDNAME1)
zonation = {}
zonation['intva'] = 4
zonation['intvb'] = 7
zonation['intvc'] = 3
grd.set_subgrids(zonation)
if (not roxinstance.version_required('1.6')):
with pytest.warns(UserWarning, match='Implementation of subgrids is lacking'):
grd.to_roxar(roxar_project, 'NewGrid')
else:
grd.to_roxar(roxar_project, 'NewGrid')
grd1 = xtgeo.grid_from_roxar(roxar_project, 'NewGrid')
for intv in ['intva', 'intvb', 'intvc']:
assert (list(grd.subgrids[intv]) == list(grd1.subgrids[intv])) |
class _VerifyEmail():
def __init__(self):
self.settings = GetFieldFromSettings()
self.token_manager = TokenManager()
def __send_email(self, msg, useremail):
subject = self.settings.get('subject')
send_mail(subject, strip_tags(msg), from_email=self.settings.get('from_alias'), recipient_list=[useremail], html_message=msg)
def send_verification_link(self, request, inactive_user=None, form=None):
if form:
inactive_user = form.save(commit=False)
inactive_user.is_active = False
inactive_user.save()
try:
useremail = (form.cleaned_data.get(self.settings.get('email_field_name')) if form else inactive_user.email)
if (not useremail):
raise KeyError('No key named "email" in your form. Your field should be named as email in form OR set a variable "EMAIL_FIELD_NAME" with the name of current field in settings.py if you want to use current name as email field.')
verification_url = self.token_manager.generate_link(request, inactive_user, useremail)
msg = render_to_string(self.settings.get('html_message_template', raise_exception=True), {'link': verification_url, 'inactive_user': inactive_user}, request=request)
self.__send_email(msg, useremail)
return inactive_user
except Exception:
inactive_user.delete()
raise
def resend_verification_link(self, request, email, **kwargs):
inactive_user = kwargs.get('user')
user_encoded_token = kwargs.get('token')
encoded = kwargs.get('encoded', True)
if encoded:
decoded_encrypted_user_token = self.token_manager.perform_decoding(user_encoded_token)
email = self.token_manager.perform_decoding(email)
inactive_user = self.token_manager.get_user_by_token(email, decoded_encrypted_user_token)
if ((not inactive_user) or (not email)):
raise InvalidTokenOrEmail(f'Either token or email is invalid. user: {inactive_user}, email: {email}')
link = self.token_manager.request_new_link(request, inactive_user, email)
msg = render_to_string(self.settings.get('html_message_template', raise_exception=True), {'link': link}, request=request)
self.__send_email(msg, email)
return True |
def test_plan_properties(session):
plan = PlanData(session=session, id='', time=..., title=..., guests={'1234': GuestStatus.INVITED, '2345': GuestStatus.INVITED, '3456': GuestStatus.GOING, '4567': GuestStatus.DECLINED})
assert (set(plan.invited) == {'1234', '2345'})
assert (plan.going == ['3456'])
assert (plan.declined == ['4567']) |
(logger)
def find_bonds_bends_dihedrals(geom, bond_factor=BOND_FACTOR, min_deg=15, max_deg=175):
log(logger, f'Detecting bonds, bends and dihedrals for {len(geom.atoms)} atoms.')
(bonds, bends) = find_bonds_bends(geom, bond_factor=bond_factor, min_deg=min_deg, max_deg=max_deg)
proper_dihedrals = find_dihedrals(geom.coords3d, bonds, bends, max_deg)
log(logger, f'Found {len(proper_dihedrals)} proper dihedrals.')
return (bonds, bends, proper_dihedrals) |
def get_query(request_parameters: ImmutableMultiDict) -> dict:
try:
query = request_parameters.get('query')
query = json.loads((query if query else '{}'))
except (AttributeError, KeyError):
return {}
except json.JSONDecodeError:
raise ValueError('Query must be a json document')
if (not isinstance(query, dict)):
raise ValueError('Query must be a json document')
return (query if query else {}) |
_from_env
def add_htmllog(outrootdir: str='~/public_html', outrooturi: str='', scriptname: str='', outdir: str='', outuri: str=''):
import html, base64, bottombar
if ((not scriptname) and ((not outdir) or (outrooturi and (not outuri)))):
scriptname = name_of_main()
if outdir:
outdir = pathlib.Path(outdir).expanduser()
else:
outdir = (pathlib.Path(outrootdir).expanduser() / scriptname)
if (not outuri):
outuri = (((outrooturi.rstrip('/') + '/') + scriptname) if outrooturi else outdir.as_uri())
nutils_logo = '<svg version="1.1" xmlns=" xmlns:svg=" style="vertical-align: middle;" width="24" height="24" viewBox="-12 -12 24 24"><path d="M -9 3 v -6 a 6 6 0 0 1 12 0 v 6 M 9 -3 v 6 a 6 6 0 0 1 -12 0 v -6" fill="none" stroke="currentColor" stroke-width="3" stroke-linecap="round"/></svg>'
favicon = ('data:image/svg+xml;base64,' + base64.b64encode((b'<?xml version="1.0" encoding="UTF-8" standalone="no"?>' + nutils_logo.encode())).decode())
htmltitle = '<a href=" {}'.format(nutils_logo, html.escape(scriptname))
with treelog.HtmlLog(outdir, title=scriptname, htmltitle=htmltitle, favicon=favicon) as htmllog:
loguri = ((outuri + '/') + htmllog.filename)
try:
with treelog.add(htmllog), bottombar.add(loguri, label='writing log to'):
(yield)
except Exception as e:
with treelog.set(htmllog):
treelog.error(f'{e.__class__.__name__}: {e}')
raise
finally:
treelog.info(f'log written to: {loguri}') |
class Subpattern(PatternBase):
name: str
subpattern: Pattern
get_subtest: Callable[([Any], Any)]
def __init__(self, name: str, subpattern: Pattern, get_subtest: Callable[([Any], Any)]) -> None:
self.name = name
self.subpattern = subpattern
self.get_subtest = get_subtest
def match(self, test: Any) -> MatchResult:
submatch = match(self.subpattern, self.get_subtest(test))
submatches = {self.name: submatch}
if submatch.is_success():
return Success(test, submatches)
return Fail(test, submatches)
def _to_str(self, test: str) -> str:
return to_pattern(self.subpattern)._to_str(f'{test}.{self.name}') |
class SolidQCSample(QCSample):
def __init__(self, name, qc_dir, paired_end):
QCSample.__init__(self, name, qc_dir)
self.__paired_end = paired_end
def report(self, html):
html.add("<div class='sample'>")
html.add(("<a name='%s'><h2>%s</h2></a>" % (self.name, self.name)))
html.add('<table><tr>')
html.add('<td>')
self.report_boxplots(html, paired_end=self.__paired_end)
html.add('</td>')
html.add('<td>')
self.report_screens(html)
html.add('</td>')
html.add('</tr>')
html.add("<tr><td colspan='2'>")
self.report_programs(html)
html.add('<td></tr></table>')
html.add('</div>')
def verify(self):
status = True
if (not self.boxplots()):
logging.warning(('%s: no boxplots' % self.name))
status = False
elif self.__paired_end:
if (len(self.boxplots()) != 4):
logging.warning(('%s: wrong number of boxplots (expected 4, found %d)' % (self.name, len(self.boxplots()))))
status = False
elif (len(self.boxplots()) != 2):
logging.warning(('%s: wrong number of boxplots (expected 2, found %d)' % (self.name, len(self.boxplots()))))
status = False
if (not self.screens()):
logging.warning(('%s: no screens' % self.name))
status = False
elif (len(self.screens()) != 3):
logging.warning(('%s: wrong number of screens' % self.name))
return status |
class GithubUpdater(Updater):
def __init__(self, session):
self.session = session
def get_latest_release(self, dev=False):
param = ((dev and 'dev') or 'master')
r = self.session.get((' % param))
commit = r.json()[0]
sha = commit['sha']
url = (' % sha)
return UpdateInfo(sha, url, commit['commit']['author']['date'], commit['commit']['message'].replace('\r', ' ').replace('\n', ' '))
def get_src_path_in_archive(self, info):
return ('xeHentai-%s/xeHentai' % info.update_id) |
def add_traceback_cli(parser, *, default=TRACEBACK):
parser.add_argument('--traceback', '--tb', action='store_true', default=default)
parser.add_argument('--no-traceback', '--no-tb', dest='traceback', action='store_const', const=False)
def process_args(args):
ns = vars(args)
showtb = ns.pop('traceback')
def traceback_cm():
try:
(yield)
except BrokenPipeError:
pass
except Exception as exc:
if (not showtb):
sys.exit(f'ERROR: {exc}')
raise
except KeyboardInterrupt:
if (not showtb):
sys.exit('\nINTERRUPTED')
raise
except BaseException as exc:
if (not showtb):
sys.exit(f'{type(exc).__name__}: {exc}')
raise
return traceback_cm()
return process_args |
def load_motions(motion_files, skel, char_info, verbose):
assert (motion_files is not None)
motion_file_names = []
for names in motion_files:
(head, tail) = os.path.split(names)
motion_file_names.append(tail)
if isinstance(motion_files[0], str):
motion_dict = {}
motion_all = []
for (i, file) in enumerate(motion_files):
if (file in motion_dict):
m = motion_dict[file]
else:
if file.endswith('bvh'):
m = bvh.load(motion=Motion(name=file, skel=skel), file=file, scale=1.0, load_skel=False, v_up_skel=char_info.v_up, v_face_skel=char_info.v_face, v_up_env=char_info.v_up_env)
m = MotionWithVelocity.from_motion(m)
elif file.endswith('bin'):
m = pickle.load(open(file, 'rb'))
elif (file.endswith('gzip') or file.endswith('gz')):
with gzip.open(file, 'rb') as f:
m = pickle.load(f)
else:
raise Exception('Unknown Motion File Type')
if verbose:
print(('Loaded: %s' % file))
motion_all.append(m)
elif isinstance(motion_files[0], MotionWithVelocity):
motion_all = motion_files
else:
raise Exception('Unknown Type for Reference Motion')
return (motion_all, motion_file_names) |
def _list_forward(model: Model[(SeqT, SeqT)], Xs: List2d, is_train: bool) -> Tuple[(List2d, Callable)]:
layer: Model[(Array2d, Array2d)] = model.layers[0]
pad = model.attrs['pad']
lengths = NUMPY_OPS.asarray1i([len(seq) for seq in Xs])
Xf = layer.ops.flatten(Xs, pad=pad)
(Yf, get_dXf) = layer(Xf, is_train)
def backprop(dYs: List2d) -> List2d:
dYf = layer.ops.flatten(dYs, pad=pad)
dXf = get_dXf(dYf)
return layer.ops.unflatten(dXf, lengths, pad=pad)
return (layer.ops.unflatten(Yf, lengths, pad=pad), backprop) |
class SpendingViewSet(SpendingMixin, FabaOutlayMixin, ElasticsearchAccountDisasterBase, PaginationMixin):
endpoint_doc = 'usaspending_api/api_contracts/contracts/v2/disaster/federal_account/spending.md'
agg_key = 'financial_accounts_by_award.treasury_account_id'
nested_nonzero_fields = {'obligation': 'transaction_obligated_amount', 'outlay': 'gross_outlay_amount_by_award_cpe'}
query_fields = ['federal_account_symbol', 'federal_account_symbol.contains', 'federal_account_title', 'federal_account_title.contains', 'treasury_account_symbol', 'treasury_account_symbol.contains', 'treasury_account_title', 'treasury_account_title.contains']
top_hits_fields = ['financial_accounts_by_award.federal_account_symbol', 'financial_accounts_by_award.federal_account_title', 'financial_accounts_by_award.treasury_account_symbol', 'financial_accounts_by_award.treasury_account_title', 'financial_accounts_by_award.federal_account_id']
_response()
def post(self, request):
if (self.spending_type == 'award'):
self.has_children = True
return self.perform_elasticsearch_search()
else:
results = list(self.total_queryset)
extra_columns = ['total_budgetary_resources']
response = construct_response(results, self.pagination)
response['totals'] = self.accumulate_total_values(results, extra_columns)
return Response(response)
def build_elasticsearch_result(self, info_buckets: List[dict]) -> List[dict]:
temp_results = {}
child_results = []
for bucket in info_buckets:
child = self._build_child_json_result(bucket)
child_results.append(child)
for child in child_results:
result = self._build_json_result(child)
child.pop('parent_data')
if (result['id'] in temp_results.keys()):
temp_results[result['id']] = {'id': int(result['id']), 'code': result['code'], 'description': result['description'], 'award_count': (temp_results[result['id']]['award_count'] + result['award_count']), 'obligation': (temp_results[result['id']]['obligation'] + result['obligation']), 'outlay': (temp_results[result['id']]['outlay'] + result['outlay']), 'total_budgetary_resources': None, 'children': (temp_results[result['id']]['children'] + result['children'])}
else:
temp_results[result['id']] = result
results = [x for x in temp_results.values()]
return results
def _build_json_result(self, child):
return {'id': child['parent_data'][2], 'code': child['parent_data'][1], 'description': child['parent_data'][0], 'award_count': child['award_count'], 'obligation': child['obligation'], 'outlay': child['outlay'], 'total_budgetary_resources': None, 'children': [child]}
def _build_child_json_result(self, bucket: dict):
return {'id': int(bucket['key']), 'code': bucket['dim_metadata']['hits']['hits'][0]['_source']['treasury_account_symbol'], 'description': bucket['dim_metadata']['hits']['hits'][0]['_source']['treasury_account_title'], 'award_count': int(bucket['count_awards_by_dim']['award_count']['value']), **{key: Decimal(bucket.get(f'sum_{val}', {'value': 0})['value']) for (key, val) in self.nested_nonzero_fields.items()}, 'total_budgetary_resources': None, 'parent_data': [bucket['dim_metadata']['hits']['hits'][0]['_source']['federal_account_title'], bucket['dim_metadata']['hits']['hits'][0]['_source']['federal_account_symbol'], bucket['dim_metadata']['hits']['hits'][0]['_source']['federal_account_id']]}
def total_queryset(self):
filters = [self.is_in_provided_def_codes, self.is_non_zero_total_spending, self.all_closed_defc_submissions, Q(treasury_account__isnull=False), Q(treasury_account__federal_account__isnull=False)]
annotations = {'fa_code': F('treasury_account__federal_account__federal_account_code'), 'description': F('treasury_account__account_title'), 'code': F('treasury_account__tas_rendering_label'), 'id': F('treasury_account__treasury_account_identifier'), 'award_count': Value(None, output_field=IntegerField()), 'fa_description': F('treasury_account__federal_account__account_title'), 'fa_id': F('treasury_account__federal_account_id'), 'obligation': Coalesce(Sum(Case(When(self.final_period_submission_query_filters, then=(F('obligations_incurred_by_program_object_class_cpe') + F('deobligations_recoveries_refund_pri_program_object_class_cpe'))), default=Value(0), output_field=DecimalField(max_digits=23, decimal_places=2))), 0, output_field=DecimalField(max_digits=23, decimal_places=2)), 'outlay': Coalesce(Sum(Case(When(self.final_period_submission_query_filters, then=((F('gross_outlay_amount_by_program_object_class_cpe') + F('ussgl487200_down_adj_pri_ppaid_undel_orders_oblig_refund_cpe')) + F('ussgl497200_down_adj_pri_paid_deliv_orders_oblig_refund_cpe'))), default=Value(0), output_field=DecimalField(max_digits=23, decimal_places=2))), 0, output_field=DecimalField(max_digits=23, decimal_places=2)), 'total_budgetary_resources': Coalesce(Subquery(latest_gtas_of_each_year_queryset().filter(disaster_emergency_fund_id__in=self.def_codes, treasury_account_identifier=OuterRef('treasury_account')).annotate(amount=Func('total_budgetary_resources_cpe', function='Sum'), unobligated_balance=Func('budget_authority_unobligated_balance_brought_forward_cpe', function='Sum'), deobligation=Func('deobligations_or_recoveries_or_refunds_from_prior_year_cpe', function='Sum'), prior_year=Func('prior_year_paid_obligation_recoveries', function='Sum'), unobligated_adjustments=Func('adjustments_to_unobligated_balance_brought_forward_fyb', function='Sum')).annotate(total_budget_authority=((((F('amount') - F('unobligated_balance')) - F('deobligation')) - F('prior_year')) - F('unobligated_adjustments'))).values('total_budget_authority'), output_field=DecimalField(max_digits=23, decimal_places=2)), 0, output_field=DecimalField(max_digits=23, decimal_places=2))}
return FinancialAccountsByProgramActivityObjectClass.objects.filter(*filters).values('treasury_account__federal_account__id', 'treasury_account__federal_account__federal_account_code', 'treasury_account__federal_account__account_title').annotate(**annotations).values(*annotations.keys()) |
class TlsBulkCertificateResponseAttributes(ModelComposed):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'created_at': (datetime, none_type), 'deleted_at': (datetime, none_type), 'updated_at': (datetime, none_type), 'not_after': (datetime,), 'not_before': (datetime,), 'replace': (bool,)}
_property
def discriminator():
return None
attribute_map = {'created_at': 'created_at', 'deleted_at': 'deleted_at', 'updated_at': 'updated_at', 'not_after': 'not_after', 'not_before': 'not_before', 'replace': 'replace'}
read_only_vars = {'created_at', 'deleted_at', 'updated_at', 'not_after', 'not_before', 'replace'}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
_property
def _composed_schemas():
lazy_import()
return {'anyOf': [], 'allOf': [Timestamps, TlsBulkCertificateResponseAttributesAllOf], 'oneOf': []} |
class ACEScg(sRGB):
BASE = 'xyz-d65'
NAME = 'acescg'
SERIALIZE = ('--acescg',)
WHITE = (0.32168, 0.33767)
CHANNELS = (Channel('r', 0.0, 65504.0, bound=True), Channel('g', 0.0, 65504.0, bound=True), Channel('b', 0.0, 65504.0, bound=True))
def to_base(self, coords: Vector) -> Vector:
return acescg_to_xyz(coords)
def from_base(self, coords: Vector) -> Vector:
return xyz_to_acescg(coords) |
class Listener(object):
def __init__(self):
self.loop = asyncio.get_event_loop()
self.aevent = asyncio.Event()
self.user_id = ''
self.channels = set()
self.channel_items = {}
self.overflow = False
self.error = ''
def wake_threadsafe(self):
self.loop.call_soon_threadsafe(self.aevent.set) |
class OptionSeriesDumbbellSonificationContexttracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
class OptionSeriesPackedbubbleSonificationTracksMappingHighpassFrequency(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class TestPOSTAccessed(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
def test_post_accessed_in_post_method(self):
django_request = self.factory.post('/', {'foo': 'bar'})
request = Request(django_request, parsers=[FormParser(), MultiPartParser()])
django_request.POST
assert (request.POST == {'foo': ['bar']})
assert (request.data == {'foo': ['bar']})
def test_post_accessed_in_post_method_with_json_parser(self):
django_request = self.factory.post('/', {'foo': 'bar'})
request = Request(django_request, parsers=[JSONParser()])
django_request.POST
assert (request.POST == {})
assert (request.data == {})
def test_post_accessed_in_put_method(self):
django_request = self.factory.put('/', {'foo': 'bar'})
request = Request(django_request, parsers=[FormParser(), MultiPartParser()])
django_request.POST
assert (request.POST == {'foo': ['bar']})
assert (request.data == {'foo': ['bar']})
def test_request_read_before_parsing(self):
django_request = self.factory.put('/', {'foo': 'bar'})
request = Request(django_request, parsers=[FormParser(), MultiPartParser()])
django_request.read()
with pytest.raises(RawPostDataException):
request.POST
with pytest.raises(RawPostDataException):
request.POST
request.data |
class LoggingAzureblobResponse(ModelComposed):
allowed_values = {('placement',): {'None': None, 'NONE': 'none', 'WAF_DEBUG': 'waf_debug', 'NULL': 'null'}, ('format_version',): {'v1': '1', 'v2': '2'}, ('message_type',): {'CLASSIC': 'classic', 'LOGGLY': 'loggly', 'LOGPLEX': 'logplex', 'BLANK': 'blank'}, ('compression_codec',): {'ZSTD': 'zstd', 'SNAPPY': 'snappy', 'GZIP': 'gzip'}}
validations = {('file_max_bytes',): {'inclusive_minimum': 1048576}}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'name': (str,), 'placement': (str, none_type), 'response_condition': (str, none_type), 'format': (str,), 'format_version': (str,), 'message_type': (str,), 'timestamp_format': (str, none_type), 'compression_codec': (str,), 'period': (str,), 'gzip_level': (str,), 'created_at': (datetime, none_type), 'deleted_at': (datetime, none_type), 'updated_at': (datetime, none_type), 'service_id': (str,), 'version': (str,), 'path': (str, none_type), 'account_name': (str,), 'container': (str,), 'sas_token': (str,), 'public_key': (str, none_type), 'file_max_bytes': (int,)}
_property
def discriminator():
return None
attribute_map = {'name': 'name', 'placement': 'placement', 'response_condition': 'response_condition', 'format': 'format', 'format_version': 'format_version', 'message_type': 'message_type', 'timestamp_format': 'timestamp_format', 'compression_codec': 'compression_codec', 'period': 'period', 'gzip_level': 'gzip_level', 'created_at': 'created_at', 'deleted_at': 'deleted_at', 'updated_at': 'updated_at', 'service_id': 'service_id', 'version': 'version', 'path': 'path', 'account_name': 'account_name', 'container': 'container', 'sas_token': 'sas_token', 'public_key': 'public_key', 'file_max_bytes': 'file_max_bytes'}
read_only_vars = {'timestamp_format', 'created_at', 'deleted_at', 'updated_at', 'service_id', 'version'}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
constant_args = {'_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes}
composed_info = validate_get_composed_info(constant_args, kwargs, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
discarded_args = composed_info[3]
for (var_name, var_value) in kwargs.items():
if ((var_name in discarded_args) and (self._configuration is not None) and self._configuration.discard_unknown_keys and self._additional_properties_model_instances):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
_property
def _composed_schemas():
lazy_import()
return {'anyOf': [], 'allOf': [LoggingAzureblobAdditional, LoggingCommonResponse, LoggingGenericCommonResponse, ServiceIdAndVersionString, Timestamps], 'oneOf': []} |
class OptionPlotoptionsFunnelSonificationTracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class RegistryURIBackend(BaseURIBackend):
def __init__(self) -> None:
from web3 import Web3, WebsocketProvider
w3 = Web3(WebsocketProvider())
self.w3 = w3
def can_translate_uri(self, uri: str) -> bool:
return is_valid_registry_uri(uri)
def can_resolve_uri(self, uri: str) -> bool:
return False
def fetch_uri_contents(self, uri: str) -> URI:
(address, chain_id, pkg_name, pkg_version, _, _) = parse_registry_uri(uri)
if (chain_id != '1'):
raise CannotHandleURI('Currently only mainnet registry uris are supported.')
self.w3.enable_unstable_package_management_api()
self.w3.pm.set_registry(address)
(_, _, manifest_uri) = self.w3.pm.get_release_data(pkg_name, pkg_version)
return URI(manifest_uri) |
_task(bind=True, track_started=True)
def task_tools_raw_json(self, device_ip, device_username, device_password):
self.update_state(state='Getting configuration data')
new_device = new_iosxe_device(device_ip, device_username, device_password)
(r_status, r_content) = new_device.get_running_config_json()
self.update_state(state='COMPLETE')
return (r_status, r_content) |
def _get_temp_dir(pool_folder_name, temp_folder=None):
use_shared_mem = False
if (temp_folder is None):
temp_folder = os.environ.get('JOBLIB_TEMP_FOLDER', None)
if (temp_folder is None):
if os.path.exists(SYSTEM_SHARED_MEM_FS):
try:
temp_folder = SYSTEM_SHARED_MEM_FS
pool_folder = os.path.join(temp_folder, pool_folder_name)
if (not os.path.exists(pool_folder)):
os.makedirs(pool_folder)
use_shared_mem = True
except IOError:
temp_folder = None
if (temp_folder is None):
temp_folder = tempfile.gettempdir()
temp_folder = os.path.abspath(os.path.expanduser(temp_folder))
pool_folder = os.path.join(temp_folder, pool_folder_name)
return (pool_folder, use_shared_mem) |
def replaced_configure_traits(instance, filename=None, view=None, kind=None, edit=True, context=None, handler=None, id='', scrollable=None, **args):
ui_kwargs = dict(view=view, parent=None, kind='live', context=context, handler=handler, id=id, scrollable=scrollable, **args)
with UITester().create_ui(instance, ui_kwargs):
pass |
class MinLengthFilterTestCase(unittest.TestCase):
def setUp(self):
self.sequences = [SeqRecord(Seq('ACGT')), SeqRecord(Seq('ACTTT'))]
def test_none_pass(self):
instance = quality_filter.MinLengthFilter(6)
actual = list(instance.filter_records(self.sequences))
self.assertEqual([], actual)
def test_all_pass(self):
instance = quality_filter.MinLengthFilter(4)
actual = list(instance.filter_records(self.sequences))
self.assertEqual(self.sequences, actual)
def test_some_pass(self):
instance = quality_filter.MinLengthFilter(5)
actual = list(instance.filter_records(self.sequences))
self.assertEqual(self.sequences[1:], actual) |
class RunDialog(QDialog):
simulation_done = Signal(bool, str)
on_run_model_event = Signal(object)
def __init__(self, config_file: str, run_model: BaseRunModel, notifier: ErtNotifier, parent=None):
QDialog.__init__(self, parent)
self.setAttribute(Qt.WA_DeleteOnClose)
self.setWindowFlags(Qt.Window)
self.setWindowFlags((self.windowFlags() & (~ Qt.WindowContextHelpButtonHint)))
self.setWindowTitle(f'Experiment - {config_file}')
self._snapshot_model = SnapshotModel(self)
self._run_model = run_model
self._notifier = notifier
self._isDetailedDialog = False
self._minimum_width = 1200
self._ticker = QTimer(self)
self._ticker.timeout.connect(self._on_ticker)
progress_proxy_model = ProgressProxyModel(self._snapshot_model, parent=self)
self._total_progress_label = QLabel(_TOTAL_PROGRESS_TEMPLATE.format(total_progress=0, phase_name=run_model.getPhaseName()), self)
self._total_progress_bar = QProgressBar(self)
self._total_progress_bar.setRange(0, 100)
self._total_progress_bar.setTextVisible(False)
self._iteration_progress_label = QLabel(self)
self._progress_view = ProgressView(self)
self._progress_view.setModel(progress_proxy_model)
self._progress_view.setIndeterminate(True)
legend_view = LegendView(self)
legend_view.setModel(progress_proxy_model)
self._tab_widget = QTabWidget(self)
self._tab_widget.currentChanged.connect(self._current_tab_changed)
self._snapshot_model.rowsInserted.connect(self.on_snapshot_new_iteration)
self._job_label = QLabel(self)
self._job_model = JobListProxyModel(self, 0, 0)
self._job_model.setSourceModel(self._snapshot_model)
self._job_view = QTableView(self)
self._job_view.setVerticalScrollMode(QAbstractItemView.ScrollPerItem)
self._job_view.setSelectionBehavior(QAbstractItemView.SelectRows)
self._job_view.setSelectionMode(QAbstractItemView.SingleSelection)
self._job_view.clicked.connect(self._job_clicked)
self._job_view.setModel(self._job_model)
self.running_time = QLabel('')
self.plot_tool = PlotTool(config_file, self.parent())
self.plot_button = QPushButton(self.plot_tool.getName())
self.plot_button.clicked.connect(self.plot_tool.trigger)
self.plot_button.setEnabled(True)
self.kill_button = QPushButton('Terminate experiment')
self.done_button = QPushButton('Done')
self.done_button.setHidden(True)
self.restart_button = QPushButton('Restart')
self.restart_button.setHidden(True)
self.show_details_button = QPushButton('Show details')
self.show_details_button.setCheckable(True)
size = 20
spin_movie = QMovie('img:loading.gif')
spin_movie.setSpeed(60)
spin_movie.setScaledSize(QSize(size, size))
spin_movie.start()
self.processing_animation = QLabel()
self.processing_animation.setMaximumSize(QSize(size, size))
self.processing_animation.setMinimumSize(QSize(size, size))
self.processing_animation.setMovie(spin_movie)
button_layout = QHBoxLayout()
button_layout.addWidget(self.processing_animation)
button_layout.addWidget(self.running_time)
button_layout.addStretch()
button_layout.addWidget(self.show_details_button)
button_layout.addWidget(self.plot_button)
button_layout.addWidget(self.kill_button)
button_layout.addWidget(self.done_button)
button_layout.addWidget(self.restart_button)
button_widget_container = QWidget()
button_widget_container.setLayout(button_layout)
layout = QVBoxLayout()
layout.addWidget(self._total_progress_label)
layout.addWidget(self._total_progress_bar)
layout.addWidget(self._iteration_progress_label)
layout.addWidget(self._progress_view)
layout.addWidget(legend_view)
layout.addWidget(self._tab_widget)
layout.addWidget(self._job_label)
layout.addWidget(self._job_view)
layout.addWidget(button_widget_container)
self.setLayout(layout)
self.kill_button.clicked.connect(self.killJobs)
self.done_button.clicked.connect(self.accept)
self.restart_button.clicked.connect(self.restart_failed_realizations)
self.show_details_button.clicked.connect(self.toggle_detailed_progress)
self.simulation_done.connect(self._on_simulation_done)
self.setMinimumWidth(self._minimum_width)
self._setSimpleDialog()
self.finished.connect(self._on_finished)
self._run_model.add_send_event_callback(self.on_run_model_event.emit)
self.on_run_model_event.connect(self._on_event)
def _current_tab_changed(self, index: int) -> None:
for i in range(0, self._tab_widget.count()):
if (i != index):
widget = self._tab_widget.widget(i)
if isinstance(widget, RealizationWidget):
widget.clearSelection()
def _setSimpleDialog(self) -> None:
self._isDetailedDialog = False
self._tab_widget.setVisible(False)
self._job_label.setVisible(False)
self._job_view.setVisible(False)
self.show_details_button.setText('Show details')
def _setDetailedDialog(self) -> None:
self._isDetailedDialog = True
self._tab_widget.setVisible(True)
self._job_label.setVisible(True)
self._job_view.setVisible(True)
self.show_details_button.setText('Hide details')
(QModelIndex, int, int)
def on_snapshot_new_iteration(self, parent: QModelIndex, start: int, end: int) -> None:
if (not parent.isValid()):
index = self._snapshot_model.index(start, 0, parent)
iter_row = start
self._iteration_progress_label.setText(f'Progress for iteration {index.internalPointer().id}')
widget = RealizationWidget(iter_row)
widget.setSnapshotModel(self._snapshot_model)
widget.currentChanged.connect(self._select_real)
self._tab_widget.addTab(widget, f'Realizations for iteration {index.internalPointer().id}')
(QModelIndex)
def _job_clicked(self, index):
if (not index.isValid()):
return
selected_file = index.data(FileRole)
file_dialog = self.findChild(QDialog, name=selected_file)
if (file_dialog and file_dialog.isVisible()):
file_dialog.raise_()
elif selected_file:
job_name = index.siblingAtColumn(0).data()
FileDialog(selected_file, job_name, index.row(), index.data(RealIens), index.data(IterNum), self)
(QModelIndex)
def _select_real(self, index):
real = index.row()
iter_ = index.model().get_iter()
self._job_model.set_real(iter_, real)
self._job_label.setText(f'Realization id {index.data(RealIens)} in iteration {index.data(IterNum)}')
self._job_view.horizontalHeader().setSectionResizeMode(QHeaderView.Stretch)
def closeEvent(self, QCloseEvent):
if self._run_model.isFinished():
self.simulation_done.emit(self._run_model.hasRunFailed(), self._run_model.getFailMessage())
self.accept()
elif (self.killJobs() != QMessageBox.Yes):
QCloseEvent.ignore()
def startSimulation(self):
self._run_model.reset()
self._snapshot_model.reset()
self._tab_widget.clear()
port_range = None
if (self._run_model.queue_system == QueueSystem.LOCAL):
port_range = range(49152, 51819)
evaluator_server_config = EvaluatorServerConfig(custom_port_range=port_range)
def run():
self._run_model.startSimulations(evaluator_server_config=evaluator_server_config)
simulation_thread = Thread(name='ert_gui_simulation_thread', target=run, daemon=True)
simulation_thread.start()
self._ticker.start(1000)
self._tracker = EvaluatorTracker(self._run_model, ee_con_info=evaluator_server_config.get_connection_info())
worker = TrackerWorker(self._tracker.track)
worker_thread = QThread()
worker.done.connect(worker_thread.quit)
worker.consumed_event.connect(self._on_event)
worker.moveToThread(worker_thread)
self.simulation_done.connect(worker.stop)
self._worker = worker
self._worker_thread = worker_thread
worker_thread.started.connect(worker.consume_and_emit)
self._worker_thread.finished.connect(self._show_done_button)
self._worker_thread.start()
self._notifier.set_is_simulation_running(True)
def killJobs(self):
msg = 'Are you sure you want to terminate the currently running experiment?'
kill_job = QMessageBox.question(self, 'Terminate experiment', msg, (QMessageBox.Yes | QMessageBox.No))
if (kill_job == QMessageBox.Yes):
self._tracker.request_termination()
self._worker_thread.quit()
self._worker_thread.wait()
self._on_finished()
self.finished.emit((- 1))
return kill_job
(bool, str)
def _on_simulation_done(self, failed, failed_msg):
self.processing_animation.hide()
self.kill_button.setHidden(True)
self.restart_button.setVisible(self._run_model.has_failed_realizations())
self.restart_button.setEnabled(self._run_model.support_restart)
self._total_progress_bar.setValue(100)
self._total_progress_label.setText(_TOTAL_PROGRESS_TEMPLATE.format(total_progress=100, phase_name=self._run_model.getPhaseName()))
self._notifier.set_is_simulation_running(False)
if failed:
self.fail_msg_box = ErtMessageBox('ERT experiment failed!', failed_msg, self)
self.fail_msg_box.exec_()
def _show_done_button(self):
self.done_button.setHidden(False)
()
def _on_ticker(self):
runtime = self._run_model.get_runtime()
self.running_time.setText(format_running_time(runtime))
(object)
def _on_event(self, event: object):
if isinstance(event, EndEvent):
self.simulation_done.emit(event.failed, event.failed_msg)
self._worker.stop()
self._ticker.stop()
elif isinstance(event, FullSnapshotEvent):
if (event.snapshot is not None):
self._snapshot_model._add_snapshot(event.snapshot, event.iteration)
self._progress_view.setIndeterminate(event.indeterminate)
progress = int((event.progress * 100))
self.validate_percentage_range(progress)
self._total_progress_bar.setValue(progress)
self._total_progress_label.setText(_TOTAL_PROGRESS_TEMPLATE.format(total_progress=progress, phase_name=event.phase_name))
elif isinstance(event, SnapshotUpdateEvent):
if (event.partial_snapshot is not None):
self._snapshot_model._add_partial_snapshot(event.partial_snapshot, event.iteration)
self._progress_view.setIndeterminate(event.indeterminate)
progress = int((event.progress * 100))
self.validate_percentage_range(progress)
self._total_progress_bar.setValue(progress)
self._total_progress_label.setText(_TOTAL_PROGRESS_TEMPLATE.format(total_progress=progress, phase_name=event.phase_name))
elif isinstance(event, RunModelUpdateBeginEvent):
iteration = event.iteration
widget = UpdateWidget(iteration)
self._tab_widget.addTab(widget, f'Update {iteration}')
elif isinstance(event, RunModelUpdateEndEvent):
if ((widget := self._get_update_widget(event.iteration)) is not None):
widget.end(event)
elif (isinstance(event, (RunModelStatusEvent, RunModelTimeEvent)) and ((widget := self._get_update_widget(event.iteration)) is not None)):
widget.update_status(event)
def _get_update_widget(self, iteration: int) -> Optional[UpdateWidget]:
for i in range(0, self._tab_widget.count()):
widget = self._tab_widget.widget(i)
if (isinstance(widget, UpdateWidget) and (widget.iteration == iteration)):
return widget
return None
def validate_percentage_range(self, progress: int):
if (not (0 <= progress <= 100)):
logger = logging.getLogger(__name__)
logger.warning(f'Total progress bar exceeds [0-100] range: {progress}')
def restart_failed_realizations(self):
msg = QMessageBox(self)
msg.setIcon(QMessageBox.Information)
msg.setText('Note that workflows will only be executed on the restarted realizations and that this might have unexpected consequences.')
msg.setWindowTitle('Restart failed realizations')
msg.setStandardButtons((QMessageBox.Ok | QMessageBox.Cancel))
msg.setObjectName('restart_prompt')
result = msg.exec_()
if (result == QMessageBox.Ok):
self.restart_button.setVisible(False)
self.kill_button.setVisible(True)
self.done_button.setVisible(False)
self._run_model.restart()
self.startSimulation()
()
def toggle_detailed_progress(self):
if self._isDetailedDialog:
self._setSimpleDialog()
else:
self._setDetailedDialog()
self.adjustSize()
def _on_finished(self):
for file_dialog in self.findChildren(FileDialog):
file_dialog.close()
def keyPressEvent(self, q_key_event):
if (q_key_event.key() == Qt.Key_Escape):
self.close()
else:
QDialog.keyPressEvent(self, q_key_event) |
def sart_projection_update(image: 'float64[:,:]', theta: float, projection: 'float64[:]', projection_shift: float=0.0):
image_update = np.zeros_like(image)
for i in range(projection.shape[0]):
ray_position = (i + projection_shift)
bilinear_ray_update(image, image_update, theta, ray_position, projection[i])
return image_update |
def causal_type_to_kernel_str(causal_type: CausalType) -> str:
if (causal_type == CausalType.NO_CAUSAL):
return 'CausalType::NO_CAUSAL'
elif (causal_type == CausalType.UPPER_RIGHT_EMPTY):
return 'CausalType::UPPER_RIGHT_EMPTY'
elif (causal_type == CausalType.LOWER_LEFT_EMPTY):
return 'CausalType::LOWER_LEFT_EMPTY'
else:
raise RuntimeError(f'Unsupported causal type causal_type={causal_type!r}') |
def _find_symbol_table(root, qualname):
assert isinstance(root, symtable.SymbolTable)
curtable = root
(name, _, remainder) = qualname.partition('.')
while name:
if (name != '<locals>'):
for child in (curtable.get_children() or ()):
if (not isinstance(child, symtable.SymbolTable)):
continue
if (child.get_name() == name):
curtable = child
break
else:
raise NotImplementedError((curtable, qualname, name, remainder))
(name, _, remainder) = remainder.partition('.')
return curtable |
class TestConfig():
def test_load_non_existing_config(self):
cfg = config.Config(config_file_class=InMemoryConfigStore)
assert (not cfg.config_present())
assert (cfg.opts('provisioning', 'node.name.prefix') == 'rally-node')
def test_load_existing_config(self):
cfg = config.Config(config_file_class=InMemoryConfigStore)
assert (not cfg.config_present())
sample_config = {'tests': {'sample.key': 'value'}, 'meta': {'config.version': config.Config.CURRENT_CONFIG_VERSION}}
cfg.config_file.store(sample_config)
assert cfg.config_present()
cfg.load_config()
assert (cfg.opts('provisioning', 'node.name.prefix') == 'rally-node')
assert (cfg.opts('tests', 'sample.key') == 'value')
cfg.add(config.Scope.applicationOverride, 'tests', 'sample.key', 'override')
assert (cfg.opts('tests', 'sample.key') == 'override')
def test_load_all_opts_in_section(self):
cfg = config.Config(config_file_class=InMemoryConfigStore)
assert (not cfg.config_present())
sample_config = {'distributions': {'release.url': ' 'release.cache': 'true', 'snapshot.url': ' 'snapshot.cache': 'false'}, 'system': {'env.name': 'local'}, 'meta': {'config.version': config.Config.CURRENT_CONFIG_VERSION}}
cfg.config_file.store(sample_config)
assert cfg.config_present()
cfg.load_config()
cfg.add(config.Scope.applicationOverride, 'distributions', 'snapshot.cache', 'true')
assert (cfg.all_opts('distributions') == {'release.url': ' 'release.cache': 'true', 'snapshot.url': ' 'snapshot.cache': 'true'})
def test_add_all_in_section(self):
source_cfg = config.Config(config_file_class=InMemoryConfigStore)
sample_config = {'tests': {'sample.key': 'value', 'sample.key2': 'value'}, 'no_copy': {'other.key': 'value'}, 'meta': {'config.version': config.Config.CURRENT_CONFIG_VERSION}}
source_cfg.config_file.store(sample_config)
source_cfg.load_config()
target_cfg = config.Config(config_file_class=InMemoryConfigStore)
assert (target_cfg.opts('tests', 'sample.key', mandatory=False) is None)
target_cfg.add_all(source=source_cfg, section='tests')
assert (target_cfg.opts('tests', 'sample.key') == 'value')
assert (target_cfg.opts('no_copy', 'other.key', mandatory=False) is None)
target_cfg.add_all(source=source_cfg, section='this section does not exist') |
class MetaRule():
def __init__(self, rules, relation):
for (assertion, error_message) in [((relation in [any, all]), 'only any or all are allowed in MetaRule'), (all((isinstance(rule, SingleRule) for rule in rules)), 'all rules in MetaRule must be of type Rule')]:
if (not assertion):
raise BadRuleError(error_message)
self.relation = relation
self.rules = rules |
def gen_sites():
db = Database(util.get_db_root(), util.get_part())
grid = db.grid()
for tile_name in sorted(grid.tiles()):
loc = grid.loc_of_tilename(tile_name)
gridinfo = grid.gridinfo_at_loc(loc)
sites = []
for (site_name, site_type) in gridinfo.sites.items():
if (site_type == 'IDELAYE2_FINEDELAY'):
sites.append(site_name)
if (len(sites) == 0):
continue
sites_y = [int(re.match('IDELAY_X[0-9]+Y([0-9]+)', site).group(1)) for site in sites]
(sites, _) = zip(*sorted(zip(sites, sites_y), key=(lambda x: x[1])))
if (gridinfo.tile_type[0] == 'L'):
int_grid_x = (loc.grid_x + 3)
pad_grid_x = (loc.grid_x - 1)
int_tile_type = 'INT_L'
else:
int_grid_x = (loc.grid_x - 3)
pad_grid_x = (loc.grid_x + 1)
int_tile_type = 'INT_R'
int_tile_locs = [(int_grid_x, loc.grid_y)]
pad_gridinfo = grid.gridinfo_at_loc((pad_grid_x, loc.grid_y))
pad_sites = pad_gridinfo.sites.keys()
pad_sites_y = [int(re.match('IOB_X[0-9]+Y([0-9]+)', site).group(1)) for site in pad_sites]
(pad_sites, _) = zip(*sorted(zip(pad_sites, pad_sites_y), key=(lambda x: x[1])))
if (not gridinfo.tile_type.endswith('_SING')):
int_tile_locs.append((int_grid_x, (loc.grid_y - 1)))
assert (len(sites) == len(int_tile_locs)), (tile_name, sites, int_tile_locs)
assert (len(sites) == len(pad_sites)), (sites, pad_sites)
for (site_name, pad_site, int_tile_loc) in zip(sites, pad_sites, int_tile_locs):
int_tile_name = grid.tilename_at_loc(int_tile_loc)
assert int_tile_name.startswith(int_tile_type), (int_tile_name, site_name, int_tile_loc)
(yield (int_tile_name, site_name, pad_site)) |
class ResourceManagerFolder(resource_class_factory('folder', None)):
def fetch(cls, client, resource_key, root=True):
try:
(data, metadata) = client.fetch_crm_folder(resource_key)
return FACTORIES['folder'].create_new(data, metadata=metadata, root=root)
except (api_errors.ApiExecutionError, ResourceNotSupported) as e:
err_msg = ('Unable to fetch Folder from API %s: %s, creating fake resource.' % (resource_key, e))
LOGGER.warning(err_msg)
data = {'name': resource_key}
resource = FACTORIES['folder'].create_new(data, root=root)
resource.add_warning(err_msg)
return resource
def key(self):
return self['name'].split('/', 1)[(- 1)]
def should_dispatch(self):
return True
('iam_policy')
def get_iam_policy(self, client=None):
try:
(data, _) = client.fetch_crm_folder_iam_policy(self['name'])
return data
except (api_errors.ApiExecutionError, ResourceNotSupported) as e:
err_msg = ('Could not get IAM policy for folder %s: %s' % (self.key(), e))
LOGGER.warning(err_msg)
self.add_warning(err_msg)
return None
('org_policy')
def get_org_policy(self, client=None):
try:
org_policies = []
org_policies_iter = client.iter_crm_folder_org_policies(self['name'])
for org_policy in org_policies_iter:
org_policies.append(org_policy)
return org_policies
except (api_errors.ApiExecutionError, ResourceNotSupported) as e:
LOGGER.warning('Could not get Org policy: %s', e)
self.add_warning(e)
return None |
def extractStreetofeyesCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
class FilesystemBasedCache(Cache):
_SUBDIR = 'flatpak-node-generator'
_KEY_CHAR_ESCAPE_RE = re.compile('[^A-Za-z0-9._\\-]')
def __init__(self, cache_root: Optional[Path]=None) -> None:
self._cache_root = (cache_root or self._default_cache_root())
def _escape_key(key: str) -> str:
return FilesystemBasedCache._KEY_CHAR_ESCAPE_RE.sub((lambda m: f'_{ord(m.group()):02X}'), key)
class FilesystemBucketReader(Cache.BucketReader):
def __init__(self, file: IO[bytes]) -> None:
self.file = file
def close(self) -> None:
self.file.close()
def read_parts(self, size: int) -> Iterator[bytes]:
while True:
data = self.file.read(size)
if (not data):
break
(yield data)
def read_all(self) -> bytes:
return self.file.read()
class FilesystemBucketWriter(Cache.BucketWriter):
def __init__(self, file: IO[bytes], temp: Path, target: Path) -> None:
self.file = file
self.temp = temp
self.target = target
def write(self, data: bytes) -> None:
self.file.write(data)
def cancel(self) -> None:
self.file.close()
self.temp.unlink()
def seal(self) -> None:
self.file.close()
self.temp.rename(self.target)
class FilesystemBucketRef(Cache.BucketRef):
def __init__(self, key: str, cache_root: Path) -> None:
super().__init__(key)
self._cache_root = cache_root
self._cache_path = (self._cache_root / FilesystemBasedCache._escape_key(key))
def open_read(self) -> Optional[Cache.BucketReader]:
try:
fp = self._cache_path.open('rb')
except FileNotFoundError:
return None
else:
return FilesystemBasedCache.FilesystemBucketReader(fp)
def open_write(self) -> Cache.BucketWriter:
target = self._cache_path
if (not target.parent.exists()):
target.parent.mkdir(exist_ok=True, parents=True)
(fd, temp) = tempfile.mkstemp(dir=self._cache_root, prefix='__temp__')
return FilesystemBasedCache.FilesystemBucketWriter(os.fdopen(fd, 'wb'), Path(temp), target)
def _default_cache_root(cls) -> Path:
xdg_cache_home = os.environ.get('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
return (Path(xdg_cache_home) / cls._SUBDIR)
def get(self, key: str) -> Cache.BucketRef:
return FilesystemBasedCache.FilesystemBucketRef(key, self._cache_root) |
class LoopNameGenerator(PipelineStage):
name = 'loop-name-generator'
def run(self, task: DecompilerTask):
rename_while_loops: bool = task.options.getboolean('loop-name-generator.rename_while_loop_variables', fallback=False)
for_loop_names: List[str] = task.options.getlist('loop-name-generator.for_loop_variable_names', fallback=[])
if rename_while_loops:
WhileLoopVariableRenamer(task._ast).rename()
if for_loop_names:
ForLoopVariableRenamer(task._ast, for_loop_names).rename() |
class CrawlerRunnerProcess(Process):
def __init__(self, spider):
Process.__init__(self)
self.runner = CrawlerRunner(get_project_settings())
self.spider = spider
def run(self):
deferred = self.runner.crawl(self.spider)
deferred.addBoth((lambda _: reactor.stop()))
reactor.run(installSignalHandlers=False) |
class SupervisorNode(BaseNode):
def __init__(self, env_node: EnvNode, **kwargs):
super().__init__(**kwargs)
self.subjects = None
self.env_node = env_node
self.last_image = None
self._image_event = Event()
self.render_toggle = False
self.pub_get_last_image = self.backend.Publisher(f'{self.ns}/env/render/get_last_image', 'bool')
self.sub_set_last_image = self.backend.Subscriber(f'{self.ns}/env/render/set_last_image', 'uint8', self._last_image_callback)
self.render_toggle_pub = self.backend.Publisher(f'{self.ns}/env/render/toggle', 'bool')
self.state_buffer = dict()
for (cname, i) in self.states.items():
if isinstance(i['processor'], dict):
from eagerx.core.specs import ProcessorSpec
i['processor'] = initialize_processor(ProcessorSpec(i['processor']))
if isinstance(i['space'], dict):
i['space'] = eagerx.Space.from_dict(i['space'])
assert (i['space'] is not None), f'No space defined for state {cname}.'
assert i['space'].is_fully_defined, f'The space for state {cname} is not fully defined (low, high, shape, dtype).'
self.state_buffer[cname] = {'msg': None, 'processor': i['processor'], 'space': i['space']}
self._step_counter = 0
def _set_subjects(self, subjects):
self.subjects = subjects
def start_render(self):
if (not self.render_toggle):
self.render_toggle = True
self.render_toggle_pub.publish(self.render_toggle)
def stop_render(self):
if self.render_toggle:
self.render_toggle = False
self.render_toggle_pub.publish(self.render_toggle)
def get_last_image(self):
self._image_event.clear()
self.pub_get_last_image.publish(True)
self._image_event.wait()
return self.last_image
def _last_image_callback(self, msg):
self.last_image = msg
self._image_event.set()
def _get_states(self, reset_msg):
msgs = dict()
for (name, buffer) in self.state_buffer.items():
if (buffer['msg'] is None):
msgs[(name + '/done')] = True
else:
msgs[(name + '/done')] = False
msgs[name] = buffer['msg']
buffer['msg'] = None
return msgs
def reset(self):
self.env_node.obs_event.clear()
self.env_node.must_reset = True
self.env_node.action_event.set()
self.subjects['start_reset'].on_next(0)
self._step_counter = 0
try:
flag = self.env_node.obs_event.wait()
if (not flag):
raise KeyboardInterrupt
except (KeyboardInterrupt, SystemExit):
self.backend.logdebug('[reset] KEYBOARD INTERRUPT')
raise
self.backend.logdebug('FIRST OBS RECEIVED!')
def step(self):
self.env_node.obs_event.clear()
self.env_node.action_event.set()
self._step_counter += 1
try:
flag = self.env_node.obs_event.wait()
if (not flag):
raise KeyboardInterrupt
except (KeyboardInterrupt, SystemExit):
self.backend.logdebug('[step] KEYBOARD INTERRUPT')
raise
self.backend.logdebug('STEP END')
def shutdown(self):
self.env_node.action_event.set()
self.pub_get_last_image.unregister()
self.sub_set_last_image.unregister()
self.render_toggle_pub.unregister() |
def xtb_hessian(geom, gfn=None):
calc = geom.calculator
xtb_kwargs = {'charge': calc.charge, 'mult': calc.mult, 'pal': calc.pal}
if (gfn is not None):
xtb_kwargs['gfn'] = gfn
xtb_calc = XTB(**xtb_kwargs)
geom_ = geom.copy()
geom_.set_calculator(xtb_calc)
return geom_.hessian |
class OptionSeriesSankeyLevelsStates(Options):
def hover(self) -> 'OptionSeriesSankeyLevelsStatesHover':
return self._config_sub_data('hover', OptionSeriesSankeyLevelsStatesHover)
def inactive(self) -> 'OptionSeriesSankeyLevelsStatesInactive':
return self._config_sub_data('inactive', OptionSeriesSankeyLevelsStatesInactive)
def normal(self) -> 'OptionSeriesSankeyLevelsStatesNormal':
return self._config_sub_data('normal', OptionSeriesSankeyLevelsStatesNormal)
def select(self) -> 'OptionSeriesSankeyLevelsStatesSelect':
return self._config_sub_data('select', OptionSeriesSankeyLevelsStatesSelect) |
def lazy_import():
from fastly.model.historical import Historical
from fastly.model.historical_meta import HistoricalMeta
from fastly.model.historical_usage_aggregated_response_all_of import HistoricalUsageAggregatedResponseAllOf
from fastly.model.historical_usage_data import HistoricalUsageData
globals()['Historical'] = Historical
globals()['HistoricalMeta'] = HistoricalMeta
globals()['HistoricalUsageAggregatedResponseAllOf'] = HistoricalUsageAggregatedResponseAllOf
globals()['HistoricalUsageData'] = HistoricalUsageData |
def test_very_large_response_protocol_v2(pysoa_client_protocol_v2):
with pytest.raises(pysoa_client_protocol_v2.JobError) as error_context:
pysoa_client_protocol_v2.call_action('meta', 'very_large_response')
assert (error_context.value.errors[0].code == ERROR_CODE_RESPONSE_TOO_LARGE) |
class OptionSeriesPictorialSonificationTracksMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionsHighcharts(OptChart.OptionsChart):
def defs(self) -> odefs.OptionDefs:
return self._config_sub_data('defs', odefs.OptionDefs)
def navigation(self) -> onavigation.OptionNavigation:
return self._config_sub_data('navigation', onavigation.OptionNavigation)
def responsive(self) -> oresponsive.OptionResponsive:
return self._config_sub_data('responsive', oresponsive.OptionResponsive)
def drilldown(self) -> odrilldown.OptionDrilldown:
return self._config_sub_data('drilldown', odrilldown.OptionDrilldown)
def series_(self) -> oseries.OptionSeries:
return self._config_sub_data_enum('series', oseries.OptionSeries)
def subtitle(self) -> osubtitle.OptionSubtitle:
return self._config_sub_data('subtitle', osubtitle.OptionSubtitle)
def annotations(self) -> oannotations.OptionAnnotations:
return self._config_sub_data('annotations', oannotations.OptionAnnotations)
def yAxis(self) -> oyAxis.OptionYaxis:
return self._config_sub_data('yAxis', oyAxis.OptionYaxis)
def accessibility(self) -> oaccessibility.OptionAccessibility:
return self._config_sub_data('accessibility', oaccessibility.OptionAccessibility)
def credits(self) -> ocredits.OptionCredits:
return self._config_sub_data('credits', ocredits.OptionCredits)
def data(self) -> odata.OptionData:
return self._config_sub_data('data', odata.OptionData)
def exporting(self) -> oexporting.OptionExporting:
return self._config_sub_data('exporting', oexporting.OptionExporting)
def caption(self) -> ocaption.OptionCaption:
return self._config_sub_data('caption', ocaption.OptionCaption)
def colorAxis(self) -> ocolorAxis.OptionColoraxis:
return self._config_sub_data('colorAxis', ocolorAxis.OptionColoraxis)
def title(self) -> otitle.OptionTitle:
return self._config_sub_data('title', otitle.OptionTitle)
def pane(self) -> opane.OptionPane:
return self._config_sub_data('pane', opane.OptionPane)
def time(self) -> otime.OptionTime:
return self._config_sub_data('time', otime.OptionTime)
def chart(self) -> ochart.OptionChart:
return self._config_sub_data('chart', ochart.OptionChart)
def zAxis(self) -> ozAxis.OptionZaxis:
return self._config_sub_data('zAxis', ozAxis.OptionZaxis)
def global_(self) -> oglobal.OptionGlobal:
return self._config_sub_data('global', oglobal.OptionGlobal)
def noData(self) -> onoData.OptionNodata:
return self._config_sub_data('noData', onoData.OptionNodata)
def sonification(self) -> osonification.OptionSonification:
return self._config_sub_data('sonification', osonification.OptionSonification)
def legend(self) -> olegend.OptionLegend:
return self._config_sub_data('legend', olegend.OptionLegend)
def loading(self) -> oloading.OptionLoading:
return self._config_sub_data('loading', oloading.OptionLoading)
def boost(self) -> oboost.OptionBoost:
return self._config_sub_data('boost', oboost.OptionBoost)
def tooltip(self) -> otooltip.OptionTooltip:
return self._config_sub_data('tooltip', otooltip.OptionTooltip)
def xAxis(self) -> oxAxis.OptionXaxis:
return self._config_sub_data('xAxis', oxAxis.OptionXaxis)
def plotOptions(self) -> oplotOptions.OptionPlotoptions:
return self._config_sub_data('plotOptions', oplotOptions.OptionPlotoptions)
def lang(self) -> olang.OptionLang:
return self._config_sub_data('lang', olang.OptionLang) |
class Editor(Enums):
def input(self, search: bool=True, element_attributes: dict=None, **kwargs):
editor_params = {'search': search}
if (element_attributes is not None):
editor_params['elementAttributes'] = element_attributes
if kwargs:
editor_params.update(kwargs)
self._set_value(value=editor_params, name='editorParams')
return self._set_value()
def textarea(self, vertical_navigation: str='editor', element_attributes: dict=None, **kwargs):
editor_params = {'verticalNavigation': vertical_navigation, 'elementAttributes': element_attributes}
if kwargs:
editor_params.update(kwargs)
self._set_value(value=editor_params, name='editorParams')
return self._set_value()
def number(self, min: float=None, max: float=None, step: int=1, element_attributes: dict=None, vertical_navigation: str='table', **kwargs):
editor_params = {'step': step, 'verticalNavigation': vertical_navigation, 'elementAttributes': element_attributes}
if (min is not None):
editor_params['min'] = min
if (max is not None):
editor_params['max'] = max
if kwargs:
editor_params.update(kwargs)
self._set_value(value=editor_params, name='editorParams')
return self._set_value()
def range(self, min: float=None, max: float=None, step: int=1, element_attributes: dict=None, **kwargs):
editor_params = {'step': step, 'elementAttributes': element_attributes}
if (min is not None):
editor_params['min'] = min
if (max is not None):
editor_params['max'] = max
if kwargs:
editor_params.update(kwargs)
self._set_value(value=editor_params, name='editorParams')
return self._set_value()
def tick(self, tristate: bool=False, indeterminate_value: str=None, element_attributes: dict=None, **kwargs):
editor_params = {'tristate': tristate, 'indeterminateValue': indeterminate_value}
if (element_attributes is not None):
editor_params['elementAttributes'] = element_attributes
if kwargs:
editor_params.update(kwargs)
self._set_value(value=editor_params, name='editorParams')
return self._set_value()
def stars(self, element_attributes: dict=None, **kwargs):
editor_params = {}
if (element_attributes is not None):
editor_params['elementAttributes'] = element_attributes
if kwargs:
editor_params.update(kwargs)
self._set_value(value=editor_params, name='editorParams')
return self._set_value()
def select(self, values: list=True, default_value: str=None, element_attributes: dict=None, vertical_navigation: str='hybrid', **kwargs):
editor_params = {'values': (json.dumps(values) if (values is True) else values)}
if (element_attributes is not None):
editor_params['elementAttributes'] = element_attributes
if (default_value is not None):
editor_params['defaultValue'] = default_value
if (vertical_navigation is not None):
editor_params['verticalNavigation'] = vertical_navigation
if kwargs:
editor_params.update(kwargs)
for c in ['defaultValue', 'verticalNavigation']:
if (c in editor_params):
editor_params[c] = json.dumps(editor_params[c])
self._set_value(value=('{%s}' % ', '.join([('%s: %s' % (k, v)) for (k, v) in editor_params.items()])), name='editorParams', js_type=True)
return self._set_value()
def list(self, values: list=True, default_value: str=None, element_attributes: dict=None, vertical_navigation: str='hybrid', **kwargs):
editor_params = {'values': (json.dumps(values) if (values is True) else values)}
if (element_attributes is not None):
editor_params['elementAttributes'] = element_attributes
if (default_value is not None):
editor_params['defaultValue'] = default_value
if (vertical_navigation is not None):
editor_params['verticalNavigation'] = vertical_navigation
if kwargs:
editor_params.update(kwargs)
for c in ['defaultValue', 'verticalNavigation']:
if (c in editor_params):
editor_params[c] = json.dumps(editor_params[c])
self._set_value(value=('{%s}' % ', '.join([('%s: %s' % (k, v)) for (k, v) in editor_params.items()])), name='editorParams', js_type=True)
return self._set_value()
def autocompletes(self) -> EditorAutocomplete:
self._set_value()
return EditorAutocomplete(self, 'editorParams')
def autocomplete(self, values: list=True, default_value=None, element_attributes: dict=None, vertical_navigation: str='hybrid', **kwargs):
editor_params = {'values': (json.dumps(values) if (values is True) else values)}
if (element_attributes is not None):
editor_params['elementAttributes'] = element_attributes
if (default_value is not None):
editor_params['defaultValue'] = default_value
if (vertical_navigation is not None):
editor_params['verticalNavigation'] = vertical_navigation
if kwargs:
editor_params.update(kwargs)
for c in ['showListOnEmpty', 'freetext', 'allowEmpty', 'defaultValue', 'verticalNavigation']:
if (c in editor_params):
editor_params[c] = json.dumps(editor_params[c])
self._set_value(value=('{%s}' % ', '.join([('%s: %s' % (k, v)) for (k, v) in editor_params.items()])), name='editorParams', js_type=True)
return self._set_value()
def custom(self, func_name: str, js_funcs: types.JS_FUNCS_TYPES=None, editor_params: dict=None, profile: types.PROFILE_TYPE=None, func_ref: bool=False):
if (js_funcs is None):
self._set_value(js_type=True)
else:
if (not isinstance(js_funcs, list)):
js_funcs = [js_funcs]
str_func = JsUtils.jsConvertFncs(js_funcs, toStr=True, profile=profile)
if ((not str_func.startswith('function(cell, onRendered, success, cancel, editorParams)')) and (not func_ref)):
str_func = ('function(cell, onRendered, success, cancel, editorParams){%s}' % str_func)
self.component.page.extendModule('edit', 'editors', func_name, str_func)
self._set_value()
if (editor_params is not None):
self._set_value(value=editor_params, name='editorParams')
return self |
class OptionPlotoptionsNetworkgraphTooltip(Options):
def clusterFormat(self):
return self._config_get('Clustered points: {point.clusterPointsAmount}')
def clusterFormat(self, text: str):
self._config(text, js_type=False)
def dateTimeLabelFormats(self) -> 'OptionPlotoptionsNetworkgraphTooltipDatetimelabelformats':
return self._config_sub_data('dateTimeLabelFormats', OptionPlotoptionsNetworkgraphTooltipDatetimelabelformats)
def distance(self):
return self._config_get(16)
def distance(self, num: float):
self._config(num, js_type=False)
def followPointer(self):
return self._config_get(False)
def followPointer(self, flag: bool):
self._config(flag, js_type=False)
def followTouchMove(self):
return self._config_get(True)
def followTouchMove(self, flag: bool):
self._config(flag, js_type=False)
def footerFormat(self):
return self._config_get('')
def footerFormat(self, text: str):
self._config(text, js_type=False)
def format(self):
return self._config_get('undefined')
def format(self, text: str):
self._config(text, js_type=False)
def headerFormat(self):
return self._config_get(None)
def headerFormat(self, text: str):
self._config(text, js_type=False)
def nullFormat(self):
return self._config_get(None)
def nullFormat(self, text: str):
self._config(text, js_type=False)
def nullFormatter(self):
return self._config_get(None)
def nullFormatter(self, value: Any):
self._config(value, js_type=False)
def pointFormat(self):
return self._config_get(None)
def pointFormat(self, text: str):
self._config(text, js_type=False)
def pointFormatter(self):
return self._config_get(None)
def pointFormatter(self, value: Any):
self._config(value, js_type=False)
def valueDecimals(self):
return self._config_get(None)
def valueDecimals(self, num: float):
self._config(num, js_type=False)
def valuePrefix(self):
return self._config_get(None)
def valuePrefix(self, text: str):
self._config(text, js_type=False)
def valueSuffix(self):
return self._config_get(None)
def valueSuffix(self, text: str):
self._config(text, js_type=False)
def xDateFormat(self):
return self._config_get(None)
def xDateFormat(self, text: str):
self._config(text, js_type=False) |
def _get_kwargs(*, client: Client) -> Dict[(str, Any)]:
url = '{}/billing/setup_intent_key'.format(client.base_url)
headers: Dict[(str, str)] = client.get_headers()
cookies: Dict[(str, Any)] = client.get_cookies()
return {'method': 'get', 'url': url, 'headers': headers, 'cookies': cookies, 'timeout': client.get_timeout(), 'follow_redirects': client.follow_redirects} |
def filter_ips_rule_data(json):
option_list = ['action', 'application', 'date', 'group', 'location', 'log', 'log_packet', 'metadata', 'name', 'os', 'rev', 'rule_id', 'service', 'severity', 'status']
json = remove_invalid_fields(json)
dictionary = {}
for attribute in option_list:
if ((attribute in json) and (json[attribute] is not None)):
dictionary[attribute] = json[attribute]
return dictionary |
class LaunchRequestHandler(AbstractRequestHandler):
def can_handle(self, handler_input):
return ask_utils.is_request_type('LaunchRequest')(handler_input)
def handle(self, handler_input):
speak_output = "Hi there, I'm a skill that forwards requests to Fixie. Let me know how I can help."
return handler_input.response_builder.speak(speak_output).ask(speak_output).response |
def test_enum():
class TestEnum(Enum):
ZERO = 0
ONE = 1
TWO = 2
assert (TestEnum.ONE == 1)
one = TestEnum(TestEnum.ONE)
assert isinstance(one, TestEnum)
assert (str(one) == 'ONE')
assert (int(one) == 1)
assert (one == 1)
assert (TestEnum.enums() == TestEnum.enums())
assert (TestEnum.enums() == [0, 1, 2]) |
class TestUdp(tests.LimitedTestCase):
def setUp(self):
self.query = greendns.dns.message.Message()
self.query.flags = greendns.dns.flags.QR
self.query_wire = self.query.to_wire()
super().setUp()
def test_udp_ipv4(self):
with tests.mock.patch('eventlet.support.greendns.socket.socket.recvfrom', return_value=(self.query_wire, ('127.0.0.1', 53))):
greendns.udp(self.query, '127.0.0.1')
def test_udp_ipv4_timeout(self):
with tests.mock.patch('eventlet.support.greendns.socket.socket.recvfrom', side_effect=socket.timeout):
with tests.assert_raises(dns.exception.Timeout):
greendns.udp(self.query, '127.0.0.1', timeout=0.1)
def test_udp_ipv4_wrong_addr_ignore(self):
with tests.mock.patch('eventlet.support.greendns.socket.socket.recvfrom', side_effect=socket.timeout):
with tests.assert_raises(dns.exception.Timeout):
greendns.udp(self.query, '127.0.0.1', timeout=0.1, ignore_unexpected=True)
def test_udp_ipv4_wrong_addr(self):
with tests.mock.patch('eventlet.support.greendns.socket.socket.recvfrom', return_value=(self.query_wire, ('127.0.0.2', 53))):
with tests.assert_raises(dns.query.UnexpectedSource):
greendns.udp(self.query, '127.0.0.1')
def test_udp_ipv6(self):
with tests.mock.patch('eventlet.support.greendns.socket.socket.recvfrom', return_value=(self.query_wire, ('::1', 53, 0, 0))):
greendns.udp(self.query, '::1')
def test_udp_ipv6_timeout(self):
with tests.mock.patch('eventlet.support.greendns.socket.socket.recvfrom', side_effect=socket.timeout):
with tests.assert_raises(dns.exception.Timeout):
greendns.udp(self.query, '::1', timeout=0.1)
def test_udp_ipv6_addr_zeroes(self):
with tests.mock.patch('eventlet.support.greendns.socket.socket.recvfrom', return_value=(self.query_wire, ('0:00:0000::1', 53, 0, 0))):
greendns.udp(self.query, '::1')
def test_udp_ipv6_wrong_addr_ignore(self):
with tests.mock.patch('eventlet.support.greendns.socket.socket.recvfrom', side_effect=socket.timeout):
with tests.assert_raises(dns.exception.Timeout):
greendns.udp(self.query, '::1', timeout=0.1, ignore_unexpected=True)
def test_udp_ipv6_wrong_addr(self):
with tests.mock.patch('eventlet.support.greendns.socket.socket.recvfrom', return_value=(self.query_wire, ('ffff:0000::1', 53, 0, 0))):
with tests.assert_raises(dns.query.UnexpectedSource):
greendns.udp(self.query, '::1') |
def check_instances(estimators=None, preprocessing=None, namespace=None):
if (not namespace):
namespace = list()
assert_correct_format(estimators, preprocessing)
preprocessing = _check_instances(preprocessing)
if ((estimators is not None) and (not isinstance(estimators, (dict, list)))):
estimators = [estimators]
if preprocessing:
if isinstance(preprocessing, list):
preprocessing = {'pr': preprocessing}
estimators = ({'pr': estimators} if estimators else dict())
preprocessing = [(n, l) for (n, l) in sorted(preprocessing.items())]
namespace += [n for (n, l) in preprocessing]
if estimators:
estimators = _check_instances(estimators, namespace=namespace)
estimators = _flatten(estimators)
(out_prep, out_est, cases) = (list(), list(), list())
if preprocessing:
for (preprocess_name, tr) in sorted(preprocessing):
if tr:
out_prep.append((preprocess_name, [(n, clone(t)) for (n, t) in tr]))
cases.append(preprocess_name)
if estimators:
for (preprocess_name, learner_name, est) in estimators:
pr_name = (preprocess_name if (preprocess_name in cases) else None)
out_est.append((pr_name, learner_name, clone(est)))
return (out_prep, out_est) |
class WorkbenchPlugin(Plugin):
ACTION_SETS = (PKG + '.action_sets')
PERSPECTIVES = (PKG + '.perspectives')
PREFERENCES_PAGES = (PKG + '.preferences_pages')
WORKBENCH_SERVICE_OFFERS = (PKG + '.service_offers')
VIEWS = (PKG + '.views')
PREFERENCES = 'envisage.preferences'
SERVICE_OFFERS = 'envisage.service_offers'
id = 'envisage.ui.workbench'
name = 'Workbench'
action_sets = ExtensionPoint(List(Callable), id=ACTION_SETS, desc="\n\n An action set contains the toobars, menus, groups and actions that you\n would like to add to top-level workbench windows (i.e. the main\n application window). You can create new toolbars, menus and groups\n and/or add to existing ones.\n\n Each contribution to this extension point must be a factory that\n creates an action set, where 'factory' means any callable with the\n following signature::\n\n callable(**traits) -> IActionSet\n\n The easiest way to contribute such a factory is to create a class\n that derives from 'envisage.ui.action.api.ActionSet'.\n\n ")
perspectives = ExtensionPoint(List(Callable), id=PERSPECTIVES, desc="\n\n A perspective is simply an arrangment of views around the (optionally\n hidden) editor area.\n\n Each contribution to this extension point must be a factory that\n creates a perspective, where 'factory' means any callable with the\n following signature::\n\n callable(**traits) -> IPerspective\n\n The easiest way to contribute such a factory is to create a class\n that derives from 'pyface.workbench.api.IPerspective'.\n\n ")
preferences_pages = ExtensionPoint(List(Callable), id=PREFERENCES_PAGES, desc="\n\n A preferences page appears in the preferences dialog to allow the user\n to manipulate some preference values.\n\n Each contribution to this extension point must be a factory that\n creates a preferences page, where 'factory' means any callable with the\n following signature::\n\n callable(**traits) -> IPreferencesPage\n\n The easiest way to contribute such a factory is to create a class\n that derives from 'apptools.preferences.ui.api.IPreferencesPage'.\n\n ")
service_offers = ExtensionPoint(List(ServiceOffer), id=WORKBENCH_SERVICE_OFFERS, desc="\n\n Services are simply objects that a plugin wants to make available to\n other plugins. This extension point allows you to offer 'per\n window' services that are created 'on-demand' (where 'on demand' means\n the first time somebody looks up a service of the appropriate\n protocol).\n .\n\n e.g.\n\n my_service_offer = ServiceOffer(\n protocol = 'acme.IMyService',\n factory = an_object_or_a_callable_that_creates_one,\n properties = {'a dictionary' : 'that is passed to the factory'}\n )\n\n Any properties specified are passed as keywrod arguments to the\n factory, i.e. the factory signature is::\n\n callable(**properties)\n\n ")
views = ExtensionPoint(List(Callable), id=VIEWS, desc="\n\n A view provides information to the user to support their current\n task. Views can contain anything you like(!) and are arranged around\n the (optionally hidden) editor area. The user can re-arrange views as\n he/she sees fit.\n\n Each contribution to this extension point must be a factory that\n creates a view, where 'factory' means any callable with the following\n signature::\n\n callable(**traits) -> IView\n\n The easiest way to contribute such a factory is to create a class\n that derives from 'pyface.workbench.api.View'.\n\n It is also common to use a simple function (especially when a view\n is a representation of a service) e.g::\n\n def foo_view_factory(**traits):\n ' Create a view that is a representation of a service. '\n foo = self.application.get_service('IFoo')\n\n return FooView(foo=foo, **traits)\n\n ")
my_action_sets = List(contributes_to=ACTION_SETS)
def _my_action_sets_default(self):
from .default_action_set import DefaultActionSet
return [DefaultActionSet]
my_preferences = List(contributes_to=PREFERENCES)
def _my_preferences_default(self):
return ['pkgfile://envisage.ui.workbench/preferences.ini']
my_preferences_pages = List(contributes_to=PREFERENCES_PAGES)
def _my_preferences_pages_default(self):
from .workbench_preferences_page import WorkbenchPreferencesPage
return [WorkbenchPreferencesPage]
my_service_offers = List(contributes_to=SERVICE_OFFERS)
def _my_service_offers_default(self):
preferences_manager_service_offer = ServiceOffer(protocol='apptools.preferences.ui.preferences_manager.PreferencesManager', factory=self._create_preferences_manager_service)
workbench_service_offer = ServiceOffer(protocol='envisage.ui.workbench.workbench.Workbench', factory=self._create_workbench_service)
return [preferences_manager_service_offer, workbench_service_offer]
def _create_preferences_manager_service(self, **properties):
from apptools.preferences.ui.api import PreferencesManager
preferences_manager = PreferencesManager(pages=[factory() for factory in self.preferences_pages])
return preferences_manager
def _create_workbench_service(self, **properties):
return getattr(self.application, 'workbench', None) |
class OptionPlotoptionsColumnSonificationDefaultspeechoptionsPointgrouping(Options):
def algorithm(self):
return self._config_get('last')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def groupTimespan(self):
return self._config_get(15)
def groupTimespan(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get('y')
def prop(self, text: str):
self._config(text, js_type=False) |
class TraitChangeEvent():
def __init__(self, *, object, name, old, new):
self.object = object
self.name = name
self.old = old
self.new = new
def __repr__(self):
return '{event.__class__.__name__}(object={event.object!r}, name={event.name!r}, old={event.old!r}, new={event.new!r})'.format(event=self) |
class _IndexedCustomCheckListEditor(BaseSourceWithLocation):
source_class = CustomEditor
locator_class = Index
handlers = [(MouseClick, (lambda wrapper, _: _interaction_helpers.mouse_click_qlayout(layout=wrapper._target.source.control.layout(), index=convert_index(layout=wrapper._target.source.control.layout(), index=wrapper._target.location.index), delay=wrapper.delay)))] |
class FileSystem():
def __init__(self, base_path: 'str | PathLike | None'=None):
if base_path:
self._fs = fs.open_fs(os.fspath(base_path))
else:
self._fs = fs.open_fs('mem://')
def ls(self, glob: 'str | None'=None, regex: str='.*', show_hidden: bool=False):
if glob:
result = [glob.path[1:] for glob in self._fs.glob(glob)]
else:
result = [file[1:] for file in self._fs.walk.files(path='/')]
pattern = re.compile(regex)
result_after_regex_match = [s for s in result if pattern.match(s)]
if show_hidden:
import warnings
warnings.warn("argument 'show_hidden' not implemented in foundry_dev_tools")
for result_path in result_after_regex_match:
(yield FileStatus(result_path, 'size not implemented', 'modified not implemented'))
def open(self, path, mode='w', **kwargs):
return self._fs.open(path, mode, **kwargs) |
def _patch_fn(module: ModuleType, name: str, fn: Callable[(..., Any)], providers_map: ProvidersMap) -> None:
if (not _is_patched(fn)):
(reference_injections, reference_closing) = _fetch_reference_injections(fn)
if (not reference_injections):
return
fn = _get_patched(fn, reference_injections, reference_closing)
_bind_injections(fn, providers_map)
setattr(module, name, fn) |
def get_cache(should_log: Optional[bool]=False) -> FidesopsRedis:
global _connection
if (_connection is None):
logger.debug('Creating new Redis connection...')
_connection = FidesopsRedis(charset=CONFIG.redis.charset, decode_responses=CONFIG.redis.decode_responses, host=CONFIG.redis.host, port=CONFIG.redis.port, db=CONFIG.redis.db_index, username=CONFIG.redis.user, password=CONFIG.redis.password, ssl=CONFIG.redis.ssl, ssl_ca_certs=CONFIG.redis.ssl_ca_certs, ssl_cert_reqs=CONFIG.redis.ssl_cert_reqs)
if should_log:
logger.debug('New Redis connection created.')
if should_log:
logger.debug('Testing Redis connection...')
try:
connected = _connection.ping()
except ConnectionErrorFromRedis:
connected = False
else:
if should_log:
logger.debug('Redis connection succeeded.')
if (not connected):
logger.debug('Redis connection failed.')
raise common_exceptions.RedisConnectionError('Unable to establish Redis connection. Fidesops is unable to accept PrivacyRequsts.')
return _connection |
class PollActivityTaskQueueResponse(betterproto.Message):
task_token: bytes = betterproto.bytes_field(1)
workflow_namespace: str = betterproto.string_field(2)
workflow_type: v1common.WorkflowType = betterproto.message_field(3)
workflow_execution: v1common.WorkflowExecution = betterproto.message_field(4)
activity_type: v1common.ActivityType = betterproto.message_field(5)
activity_id: str = betterproto.string_field(6)
header: v1common.Header = betterproto.message_field(7)
input: v1common.Payloads = betterproto.message_field(8)
heartbeat_details: v1common.Payloads = betterproto.message_field(9)
scheduled_time: datetime = betterproto.message_field(10)
current_attempt_scheduled_time: datetime = betterproto.message_field(11)
started_time: datetime = betterproto.message_field(12)
attempt: int = betterproto.int32_field(13)
schedule_to_close_timeout: timedelta = betterproto.message_field(14)
start_to_close_timeout: timedelta = betterproto.message_field(15)
heartbeat_timeout: timedelta = betterproto.message_field(16)
retry_policy: v1common.RetryPolicy = betterproto.message_field(17) |
class FaucetUntaggedPortSwapIPv4InterVLANRouteTest(FaucetUntaggedTest):
FAUCET_MAC2 = '0e:00:00:00:00:02'
CONFIG_GLOBAL = ('\nvlans:\n vlana:\n vid: 100\n faucet_vips: ["10.100.0.254/24", "169.254.1.1/24"]\n vlanb:\n vid: 200\n faucet_vips: ["10.200.0.254/24", "169.254.2.1/24"]\n faucet_mac: "%s"\nrouters:\n router-1:\n vlans: [vlana, vlanb]\n' % FAUCET_MAC2)
CONFIG = '\n arp_neighbor_timeout: 2\n max_resolve_backoff_time: 1\n proactive_learn_v4: True\n interfaces:\n %(port_1)d:\n native_vlan: vlana\n %(port_2)d:\n native_vlan: vlanb\n'
def test_untagged(self):
first_host_ip = ipaddress.ip_interface('10.100.0.1/24')
first_faucet_vip = ipaddress.ip_interface('10.100.0.254/24')
second_host_ip = ipaddress.ip_interface('10.200.0.1/24')
second_faucet_vip = ipaddress.ip_interface('10.200.0.254/24')
(first_host, second_host, third_host) = self.hosts_name_ordered()[:3]
def test_connectivity(host_a, host_b):
host_a.setIP(str(first_host_ip.ip), prefixLen=24)
host_b.setIP(str(second_host_ip.ip), prefixLen=24)
self.add_host_route(host_a, second_host_ip, first_faucet_vip.ip)
self.add_host_route(host_b, first_host_ip, second_faucet_vip.ip)
self.one_ipv4_ping(host_a, second_host_ip.ip)
self.one_ipv4_ping(host_b, first_host_ip.ip)
self.assertEqual(self._ip_neigh(host_a, first_faucet_vip.ip, 4), self.FAUCET_MAC)
self.assertEqual(self._ip_neigh(host_b, second_faucet_vip.ip, 4), self.FAUCET_MAC2)
test_connectivity(first_host, second_host)
self.change_port_config(self.port_map['port_1'], None, None, restart=False, cold_start=False)
self.add_port_config(self.port_map['port_3'], {'native_vlan': 'vlana'}, restart=True, cold_start=True)
test_connectivity(third_host, second_host) |
class TestNthChild(util.TestCase):
MARKUP = '\n <p id="0"></p>\n <p id="1"></p>\n <span id="2" class="test"></span>\n <span id="3"></span>\n <span id="4" class="test"></span>\n <span id="5"></span>\n <span id="6" class="test"></span>\n <p id="7"></p>\n <p id="8" class="test"></p>\n <p id="9"></p>\n <p id="10" class="test"></p>\n <span id="11"></span>\n '
def test_nth_child_of_s_simple(self):
self.assert_selector(self.MARKUP, ':nth-child(-n+3 of p)', ['0', '1', '7'], flags=util.HTML)
def test_nth_child_of_s_complex(self):
self.assert_selector(self.MARKUP, ':nth-child(2n + 1 of :is(p, span).test)', ['2', '6', '10'], flags=util.HTML)
self.assert_selector(self.MARKUP, ':nth-child(2n + 1 OF :is(p, span).test)', ['2', '6', '10'], flags=util.HTML) |
class OptionPlotoptionsScatterSonificationTracksMappingPitch(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get('y')
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get('c6')
def max(self, text: str):
self._config(text, js_type=False)
def min(self):
return self._config_get('c2')
def min(self, text: str):
self._config(text, js_type=False)
def scale(self):
return self._config_get(None)
def scale(self, value: Any):
self._config(value, js_type=False)
def within(self):
return self._config_get('yAxis')
def within(self, text: str):
self._config(text, js_type=False) |
class GameWinnersDetectorTest(unittest.TestCase):
def test_get_winners(self):
player1 = Player('player-1', 'Player One', 1000.0)
player2 = Player('player-2', 'Player Two', 1000.0)
player3 = Player('player-3', 'Player Three', 1000.0)
player4 = Player('player-4', 'Player Four', 1000.0)
players = GamePlayers([player1, player2, player3, player4])
class ScoreMock():
def __init__(self, value):
self.value = value
def cmp(self, other):
if (self.value < other.value):
return (- 1)
elif (self.value > other.value):
return 1
else:
return 0
class GameScoresMock():
def player_score(self, player_id):
if (player_id == 'player-1'):
return ScoreMock(3)
elif (player_id == 'player-2'):
return ScoreMock(2)
elif (player_id == 'player-3'):
return ScoreMock(2)
elif (player_id == 'player-4'):
return ScoreMock(1)
else:
raise ValueError('Unknown player id')
winner_detector = GameWinnersDetector(players)
winners = winner_detector.get_winners([player2, player3, player4], GameScoresMock())
self.assertListEqual([player2, player3], winners)
winners = winner_detector.get_winners([player1, player2, player3, player4], GameScoresMock())
self.assertListEqual([player1], winners)
players.fold('player-1')
winners = winner_detector.get_winners([player1, player2, player3, player4], GameScoresMock())
self.assertListEqual([player2, player3], winners)
players.fold('player-2')
winners = winner_detector.get_winners([player1, player2, player3, player4], GameScoresMock())
self.assertListEqual([player3], winners)
players.fold('player-3')
winners = winner_detector.get_winners([player1, player2, player3, player4], GameScoresMock())
self.assertListEqual([player4], winners)
players.fold('player-4')
winners = winner_detector.get_winners([player1, player2, player3, player4], GameScoresMock())
self.assertListEqual([], winners) |
def test_brightness_logging_no_max(caplog):
init_log(logging.INFO)
with tempfile.TemporaryDirectory() as tempdir:
with open(os.path.join(tempdir, 'brightness'), 'w') as f:
f.write(str(500))
_ = qtile_extras.widget.BrightnessControl(max_brightness_path=None, max_brightness=None, device=tempdir)
assert (caplog.record_tuples == [('libqtile', logging.WARNING, 'No maximum brightness defined. Setting to default value of 500. The script may behave unexpectedly.')]) |
class Solution():
def fizzBuzz(self, n: int) -> List[str]:
ret = []
for i in range(1, (n + 1)):
(r3, r5) = ((i % 3), (i % 5))
if ((r3 == 0) and (r5 == 0)):
ret.append('FizzBuzz')
elif (r3 == 0):
ret.append('Fizz')
elif (r5 == 0):
ret.append('Buzz')
else:
ret.append(str(i))
return ret |
class Doctor(GraphCanvas.Canvas):
name = 'Skin Doctor'
_option_cls = OptSkins.OptionsSkin
_js__builder__ = '\n var ctx = htmlObj.getContext ( "2d" );\n ctx.save (); ctx.shadowColor = \'#555555\'; ctx.shadowBlur = 10; ctx.shadowOffsetX = 2; ctx.shadowOffsetY = 2;\n ctx.beginPath (); ctx.lineWidth = 1; \n ctx.strokeStyle = \'rgba( 20, 50, 20, 1 )\'; ctx.rect ( 0, 0, htmlObj.width, htmlObj.height ); ctx.fill ();\n ctx.stroke (); ctx.closePath (); ctx.beginPath (); ctx.arc(350, 25, 20, 0, 2 * Math.PI, false );\n ctx.fillStyle = \'rgba( 200, 200, 100, 1 )\'; ctx.fill(); ctx.lineWidth = 1; \n ctx.strokeStyle = \'rgba( 180, 179, 80, 1 )\'; ctx.stroke(); ctx.closePath (); ctx.beginPath ();\n ctx.arc( 350, 85, 20, 0, 2 * Math.PI, false ); ctx.fillStyle = \'rgba( 200, 200, 100, 1 )\'; ctx.fill();\n ctx.lineWidth = 1; ctx.strokeStyle = \'rgba( 180, 179, 80, 1 )\'; ctx.stroke();\n ctx.closePath (); var screenWidth = 400, screenHeight = 150, screenTop = 0, screenLeft = 0;\n \n function screenBackgroundRender ( a ) {\n ctx.beginPath (); ctx.fillStyle = \'rgba( 20, 20, 20, \' + a + \' )\';\n ctx.fillRect ( screenLeft, screenTop, screenWidth, screenHeight ); ctx.closePath (); ctx.beginPath ();\n for ( var j = 10 + screenTop; j < screenTop + screenHeight; j = j + 10 ) {\n ctx.moveTo( screenLeft, j ); ctx.lineTo( screenLeft + screenWidth, j )}\n for ( var i = 10 + screenLeft; i < screenLeft + screenWidth; i = i + 10 ) {\n ctx.moveTo( i, screenTop ); ctx.lineTo( i, screenTop + screenHeight )}\n ctx.lineWidth = 1; ctx.strokeStyle = \'rgba( 20, 50, 20, \' + a + \' )\'; ctx.stroke (); ctx.closePath ()\n };\n \n ctx.shadowBlur = 0; ctx.shadowOffsetX = 0; ctx.shadowOffsetY = 0; \n PosX = screenLeft; PosY = screenTop + screenHeight / 2;\n setInterval ( function () {\n ctx.restore (); screenBackgroundRender ( 0.06 )\n ctx.beginPath (); ctx.moveTo( PosX, PosY );\n PosX = PosX + 1;\n if ( PosX >= screenLeft + screenWidth * 40 / 100 && PosX < screenLeft + screenWidth * 45 / 100 ) {\n PosY = PosY - screenHeight * 3 / 100;\n }\n if ( PosX >= screenLeft + screenWidth * 45 / 100 && PosX < screenLeft + screenWidth * 55 / 100 ) {\n PosY = PosY + screenHeight * 3 / 100;\n }\n if ( PosX >= screenLeft + screenWidth * 55 / 100 && PosX < screenLeft + screenWidth * 60 / 100 ) {\n PosY = PosY - screenHeight * 3 / 100;\n }\n if ( PosX >= screenLeft + screenWidth * 60 / 100 && PosX <= screenLeft + screenWidth ) {\n PosY = screenTop + screenHeight / 2;\n }\n if ( PosX > screenLeft + screenWidth ) { PosX = screenLeft; ctx.moveTo( PosX, PosY )}\n ctx.lineTo( PosX, PosY ); ctx.lineWidth = 2; ctx.strokeStyle = \'#33ff33\'; ctx.stroke (); ctx.closePath ();\n }, 6 );\n '
def __str__(self):
self.page.properties.js.add_builders(self.refresh())
return ('<canvas %s>Your browser does not support the HTML5 canvas tag.</canvas>' % self.get_attrs(css_class_names=self.style.get_classes())) |
class SimpleEditor(Editor):
def init(self, parent):
self.control = wx.CheckBox(parent, (- 1), '')
self.control.Bind(wx.EVT_CHECKBOX, self.update_object)
self.set_tooltip()
def dispose(self):
self.control.Unbind(wx.EVT_CHECKBOX)
def update_object(self, event):
self.value = (self.control.GetValue() != 0)
def update_editor(self):
self.control.SetValue(self.value) |
.skipcomplexnoslate
.parametrize(['family', 'cell'], [('RT', 'tetrahedron'), ('NCF', 'hexahedron')])
def test_hiptmair_hdiv(family, cell):
mesh = mesh_hierarchy(cell)[(- 1)]
V = FunctionSpace(mesh, family, degree=1)
assert (run_riesz_map(V, 'aij') <= 12)
assert (run_riesz_map(V, 'matfree') <= 12) |
class Rule(object):
def __init__(self, rule_name, rule_index, rule_mode, rule_key, rule_values):
self.rule_name = rule_name
self.rule_index = rule_index
self.rule_mode = rule_mode
self.rule_key = rule_key
self.rule_values = rule_values
self.rule_jmespath = jmespath.compile(self.rule_key)
def find_violations(self, ke_cluster):
violations = []
try:
actual = self.rule_jmespath.search(ke_cluster.as_dict)
except jmespath.exceptions.JMESPathError as e:
LOGGER.warning('JMESPath error processing KE cluster %s: %s', ke_cluster.id, e)
return violations
LOGGER.debug('actual jmespath result: %s', actual)
if (self.rule_mode == 'whitelist'):
if (actual not in self.rule_values):
violations.append(self._make_violation(ke_cluster, ('%s has value %s, which is not in the whitelist (%s)' % (self.rule_jmespath.expression, actual, self.rule_values)), actual))
if (self.rule_mode == 'blacklist'):
if (actual in self.rule_values):
violations.append(self._make_violation(ke_cluster, ('%s has value %s, which is in the blacklist (%s)' % (self.rule_jmespath.expression, actual, self.rule_values)), actual))
return violations
def _make_violation(self, ke_cluster, violation_reason, actual):
return RuleViolation(resource_type=resource_mod.ResourceType.KE_CLUSTER, resource_id=ke_cluster.id, full_name=ke_cluster.full_name, rule_name=self.rule_name, rule_index=self.rule_index, rule_mode=self.rule_mode, rule_values=self.rule_values, actual_value=actual, violation_type='KE_VIOLATION', violation_reason=violation_reason, project_id=ke_cluster.parent.id, cluster_name=ke_cluster.name, resource_data=ke_cluster.data, resource_name=ke_cluster.name)
def __eq__(self, other):
if (not isinstance(other, type(self))):
return NotImplemented
return all((self.rule_name == other.rule_name), (self.rule_index == other.rule_index), (self.rule_mode == other.rule_mode), (self.rule_values == other.rule_values))
def __ne__(self, other):
return (not (self == other))
def __hash__(self):
return hash(self.rule_index) |
class BlueIrisEntity(Entity):
hass: HomeAssistant = None
integration_name: str = None
entity: EntityData = None
remove_dispatcher = None
current_domain: str = None
ha = None
entity_manager = None
device_manager = None
api = None
def initialize(self, hass: HomeAssistant, integration_name: str, entity: EntityData, current_domain: str):
self.hass = hass
self.integration_name = integration_name
self.entity = entity
self.remove_dispatcher = None
self.current_domain = current_domain
self.ha = get_ha(self.hass, self.integration_name)
self.entity_manager = self.ha.entity_manager
self.device_manager = self.ha.device_manager
self.api = self.ha.api
def unique_id(self) -> Optional[str]:
return self.entity.unique_id
def device_info(self):
return self.device_manager.get(self.entity.device_name)
def name(self):
return self.entity.name
def should_poll(self):
return False
def extra_state_attributes(self):
return self.entity.attributes
async def async_added_to_hass(self):
async_dispatcher_connect(self.hass, SIGNALS[self.current_domain], self._schedule_immediate_update)
(await self.async_added_to_hass_local())
async def async_will_remove_from_hass(self) -> None:
if (self.remove_dispatcher is not None):
self.remove_dispatcher()
self.remove_dispatcher = None
(await self.async_will_remove_from_hass_local())
def _schedule_immediate_update(self):
self.hass.async_create_task(self._async_schedule_immediate_update())
async def _async_schedule_immediate_update(self):
if (self.entity_manager is None):
_LOGGER.debug(f'Cannot update {self.current_domain} - Entity Manager is None | {self.name}')
elif (self.entity is not None):
previous_state = self.entity.state
entity = self.entity_manager.get_entity(self.current_domain, self.name)
if (entity is None):
_LOGGER.debug(f'Skip updating {self.name}, Entity is None')
elif entity.disabled:
_LOGGER.debug(f'Skip updating {self.name}, Entity is disabled')
else:
self.entity = entity
if (self.entity is not None):
self._immediate_update(previous_state)
async def async_added_to_hass_local(self):
pass
async def async_will_remove_from_hass_local(self):
pass
def _immediate_update(self, previous_state: bool):
self.async_schedule_update_ha_state(True) |
def test_MaterialItem():
variant1 = VariantItem(medium=td.PoleResidue(), reference=[ReferenceData(doi='etc.com', journal='paper', url='www')])
variant2 = VariantItem(medium=td.PoleResidue(), reference=[ReferenceData(doi='etc2.com', journal='paper2', url='www2')])
material = MaterialItem(name='material', variants=dict(v1=variant1, v2=variant2), default='v1')
assert (material['v1'] == material.medium)
with pytest.raises(pydantic.ValidationError):
material = MaterialItem(name='material', variants=dict(v1=variant1, v2=variant2), default='v3') |
class TransformationTree():
def __init__(self, root_parameters):
self.root_names = []
self.root_parameters = {}
self.nodes = {}
self.leaf_parameters = {}
for param in root_parameters:
self.root_names.append(param.name)
if ((param.name in self.root_parameters) and (param != self.root_parameters[param.name])):
new_ann = param.annotation
old_param = self.root_parameters[param.name]
old_ann = old_param.annotation
assert (old_ann.type == new_ann.type)
assert (old_param.default == param.default)
new_param = Parameter(param.name, Annotation(new_ann.type, 'io'), default=param.default)
self.root_parameters[param.name] = new_param
self.leaf_parameters[param.name] = new_param
else:
self.nodes[param.name] = Node()
self.root_parameters[param.name] = param
self.leaf_parameters[param.name] = param
def _get_subtree_names(self, names, ignore, visited, leaves_only=False):
result = []
for (i, name) in enumerate(names):
if ((name in ignore) or (name in visited)):
continue
visited.add(name)
ignore_in_children = names[(i + 1):]
node = self.nodes[name]
(children_before, children_after) = node.get_child_names()
subtree_before = self._get_subtree_names(children_before, ignore_in_children, visited, leaves_only=leaves_only)
subtree_after = self._get_subtree_names(children_after, ignore_in_children, visited, leaves_only=leaves_only)
if (not leaves_only):
result.append(name)
result += subtree_before
if (leaves_only and (name in self.leaf_parameters)):
result.append(name)
result += subtree_after
return result
def get_subtree_names(self, root_names=None, leaves_only=False):
if (root_names is None):
root_names = self.root_names
return self._get_subtree_names(root_names, [], set(), leaves_only=leaves_only)
def get_root_annotations(self):
return dict(((name, param.annotation) for (name, param) in self.root_parameters.items()))
def get_root_parameters(self):
return [self.root_parameters[name] for name in self.root_names]
def get_leaf_parameters(self, root_names=None):
leaf_names = self.get_subtree_names(root_names=root_names, leaves_only=True)
return [self.leaf_parameters[name] for name in leaf_names]
def _connect(self, ntr):
for tr_param in ntr.trf.signature.parameters.values():
node_name = ntr.node_from_tr[tr_param.name]
if (node_name == ntr.connector_node_name):
ann = self.leaf_parameters[node_name].annotation
if (ann.input and ann.output):
updated_role = ('i' if ntr.output else 'o')
self.leaf_parameters[node_name] = Parameter(node_name, Annotation(ann.type, role=updated_role))
else:
del self.leaf_parameters[node_name]
elif ((node_name in self.leaf_parameters) and self.leaf_parameters[node_name].annotation.array):
ann = self.leaf_parameters[node_name].annotation
if ((ann.input and ntr.output) or (ann.output and (not ntr.output))):
self.leaf_parameters[node_name] = Parameter(node_name, Annotation(ann.type, role='io'))
else:
self.leaf_parameters[node_name] = tr_param.rename(node_name)
if (node_name not in self.nodes):
self.nodes[node_name] = Node()
self.nodes[ntr.connector_node_name] = self.nodes[ntr.connector_node_name].connect(ntr)
def connect(self, comp_connector, trf, comp_from_tr):
ntr = NodeTransformation(comp_connector, trf, comp_from_tr)
for (tr_name, node_name) in comp_from_tr.items():
if (node_name not in self.leaf_parameters):
if (node_name == comp_connector):
raise ValueError((("Parameter '" + node_name) + "' is not a part of the signature"))
elif (node_name in self.nodes):
raise ValueError((("Parameter '" + node_name) + "' is hidden by transformations"))
if (node_name not in self.leaf_parameters):
continue
node_ann = self.leaf_parameters[node_name].annotation
tr_ann = trf.signature.parameters[tr_name].annotation
if (tr_ann.type != node_ann.type):
raise ValueError("Incompatible types of the transformation parameter '{tr_name}' ({tr_type}) and the node '{node_name}' ({node_type})".format(node_name=node_name, tr_name=tr_name, node_type=node_ann.type, tr_type=tr_ann.type))
if (not tr_ann.array):
continue
if (node_name == comp_connector):
if (ntr.output and (not node_ann.output)):
raise ValueError((("'" + node_name) + "' is not an output node"))
if ((not ntr.output) and (not node_ann.input)):
raise ValueError((("'" + node_name) + "' is not an input node"))
elif (ntr.output and node_ann.output):
raise ValueError("Cannot connect transformation parameter '{tr_name}' to an existing output node '{node_name}'".format(tr_name=tr_name, node_name=node_name))
self._connect(ntr)
def reconnect(self, other_tree, translator=None):
for ntr in other_tree.connections():
if (translator is not None):
ntr = ntr.translate_node_names(translator)
if (ntr.connector_node_name not in self.leaf_parameters):
continue
ann = self.leaf_parameters[ntr.connector_node_name].annotation
if ((ntr.output and ann.output) or ((not ntr.output) and ann.input)):
self._connect(ntr)
def connections(self):
node_names = self.get_subtree_names(leaves_only=False)
for name in node_names:
node = self.nodes[name]
for ntr in node.get_connections():
(yield ntr)
def translate(self, translator):
root_params = self.get_root_parameters()
new_root_params = [param.rename(translator(param.name)) for param in root_params]
new_tree = TransformationTree(new_root_params)
new_tree.reconnect(self, translator=translator)
return new_tree
def get_subtree(self, parameters):
subtree_params = []
for param in parameters:
if (param.name in self.root_parameters):
assert (self.root_parameters[param.name].annotation == param.annotation)
subtree_params.append(self.root_parameters[param.name])
else:
subtree_params.append(param)
new_tree = TransformationTree(subtree_params)
new_tree.reconnect(self)
return new_tree
def get_kernel_declaration(self, kernel_name, skip_constants=False):
leaf_params = self.get_leaf_parameters()
if skip_constants:
leaf_params = [param for param in leaf_params if (not param.annotation.constant)]
decl = kernel_declaration(kernel_name, leaf_params)
leaf_names = [param.name for param in leaf_params]
return (decl, leaf_names)
def _get_transformation_module(self, annotation, ntr):
param = Parameter(ntr.connector_node_name, annotation)
tr_args = [Indices(param.annotation.type.shape)]
connection_names = []
for tr_param in ntr.trf.signature.parameters.values():
connection_name = ntr.node_from_tr[tr_param.name]
connection_names.append(connection_name)
if (connection_name == ntr.connector_node_name):
if ntr.output:
load_same = node_connector(ntr.output)
tr_args.append(KernelParameter(param.name, param.annotation.type, load_same=load_same))
else:
store_same = node_connector(ntr.output)
tr_args.append(KernelParameter(param.name, param.annotation.type, store_same=store_same))
else:
tr_args.append(self._get_kernel_argobject(connection_name, tr_param.annotation))
subtree_params = self.get_leaf_parameters([ntr.connector_node_name])
return module_transformation(ntr.output, param, subtree_params, ntr.trf.snippet, tr_args)
def _get_connection_modules(self, output, name, annotation):
node = self.nodes[name]
param = Parameter(name, annotation)
ntr = (node.output_ntr if output else node.input_ntr)
m_idx = None
m_same = None
m_combined = None
if (ntr is None):
m_idx = module_leaf_macro(output, param)
else:
m_idx = self._get_transformation_module(annotation, ntr)
subtree_params = self.get_leaf_parameters([name])
m_same = module_same_indices(output, param, subtree_params, m_idx)
m_combined = module_combined(output, param, subtree_params, m_idx)
return (m_idx, m_same, m_combined)
def _get_kernel_argobject(self, name, annotation):
if (not annotation.array):
return KernelParameter(name, annotation.type)
(load_idx, load_same, load_combined_idx) = self._get_connection_modules(False, name, annotation)
(store_idx, store_same, store_combined_idx) = self._get_connection_modules(True, name, annotation)
return KernelParameter(name, annotation.type, load_idx=load_idx, store_idx=store_idx, load_same=load_same, store_same=store_same, load_combined_idx=load_combined_idx, store_combined_idx=store_combined_idx)
def get_kernel_argobjects(self):
return [self._get_kernel_argobject(name, self.root_parameters[name].annotation) for name in self.root_names] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.