code stringlengths 281 23.7M |
|---|
('model')
def check_no_references(progress_controller=None):
if (progress_controller is None):
progress_controller = ProgressControllerBase()
if len(pm.listReferences()):
progress_controller.complete()
raise PublishError('There should be no <b>References</b> in a <b>Model</b> scene.')
progress_controller.complete() |
def fix_copr(args, opts, copr_full_name):
log.info('Going to fix %s', copr_full_name)
(owner, coprname) = tuple(copr_full_name.split('/'))
copr_path = os.path.abspath(os.path.join(opts.destdir, owner, coprname))
if (not os.path.isdir(copr_path)):
log.info('Ignoring %s. Directory does not exist.', copr_path)
return
log.info('Generate key-pair on copr-keygen (if not generated) for email %s', create_gpg_email(owner, coprname, opts.sign_domain))
create_user_keys(owner, coprname, opts)
log.info('Regenerate pubkey.gpg in copr %s', copr_path)
get_pubkey(owner, coprname, log, opts.sign_domain, os.path.join(copr_path, 'pubkey.gpg'))
builddir_matcher = re.compile('\\d{8,}-')
log.info("Re-sign rpms and call createrepo in copr's chroots")
for chroot in os.listdir(copr_path):
dir_path = os.path.join(copr_path, chroot)
if (not os.path.isdir(dir_path)):
log.debug('Ignoring %s, not a directory', dir_path)
continue
if (chroot in ['srpm-builds', 'modules', 'repodata']):
log.debug('Ignoring %s, not a chroot', chroot)
continue
if args.chroot:
parts = chroot.split('-')
parts.pop()
chroot_without_arch = '-'.join(parts)
if (not ((chroot in args.chroot) or (chroot_without_arch in args.chroot))):
log.info('Skipping %s, not included by --chroot (%s)', chroot, ', '.join(args.chroot))
continue
log.info('Signing in %s chroot', chroot)
for builddir_name in os.listdir(dir_path):
builddir_path = os.path.join(dir_path, builddir_name)
if (not os.path.isdir(builddir_path)):
continue
if (not builddir_matcher.match(builddir_name)):
log.debug('Skipping %s, not a build dir', builddir_name)
continue
log.info('Processing rpms in builddir %s', builddir_path)
try:
unsign_rpms_in_dir(builddir_path, opts, log)
sign_rpms_in_dir(owner, coprname, builddir_path, chroot, opts, log)
except Exception as e:
log.exception(str(e))
continue
log.info('Running add_appdata for %s', dir_path)
call_copr_repo(dir_path, logger=log, do_stat=True)
invalidate_aws_cloudfront_data(opts, owner, coprname, chroot) |
class TestBinaryJSONField(FieldValues):
valid_inputs = [(b'{"a": 1, "3": null, "b": ["some", "list", true, 1.23]}', {'a': 1, 'b': ['some', 'list', True, 1.23], '3': None})]
invalid_inputs = [('{"a": "unterminated string}', ['Value must be valid JSON.'])]
outputs = [(['some', 'list', True, 1.23], b'["some", "list", true, 1.23]')]
field = serializers.JSONField(binary=True) |
class OptionSeriesAreaSonificationContexttracksMappingGapbetweennotes(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class OptionSeriesPictorialSonificationTracksMappingPlaydelay(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
def eta2(x, t, w0):
b11 = ((1.0 / 32.0) * ((((3.0 * (w0 ** (- 8))) + (6.0 * (w0 ** (- 4)))) - 5.0) + (2.0 * (w0 ** 4))))
b13 = ((3.0 / 128.0) * (((((9.0 * (w0 ** (- 8))) + (27.0 * (w0 ** (- 4)))) - 15.0) + (w0 ** 4)) + (2 * (w0 ** 8))))
b31 = ((1.0 / 128.0) * (((3.0 * (w0 ** (- 8))) + (18.0 * (w0 ** (- 4)))) - 5.0))
b33 = ((3.0 / 128.0) * (((((- 9.0) * (w0 ** (- 12))) + (3.0 * (w0 ** (- 8)))) - (3.0 * (w0 ** (- 4)))) + 1))
eta_2 = (((((b11 * np.sin(t)) * np.cos(x)) + ((b13 * np.sin(t)) * np.cos((3 * x)))) + ((b31 * np.sin((3 * t))) * np.cos(x))) + ((b33 * np.sin((3 * t))) * np.cos((3 * x))))
return eta_2 |
def fetch_production(zone_key: str='CA-ON', session: (Session | None)=None, target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)) -> list:
(dt, xml) = _fetch_ieso_xml(target_datetime, session, logger, PRODUCTION_URL)
if (not xml):
return []
generators = xml.find((XML_NS_TEXT + 'IMODocBody')).find((XML_NS_TEXT + 'Generators')).findall((XML_NS_TEXT + 'Generator'))
def production_or_zero(output):
tag = output.find((XML_NS_TEXT + 'EnergyMW'))
if (tag is not None):
return tag.text
else:
return 0
all_productions = ({'name': generator.find((XML_NS_TEXT + 'GeneratorName')).text, 'fuel': MAP_GENERATION[generator.find((XML_NS_TEXT + 'FuelType')).text], 'dt': _parse_ieso_hour(output, dt), 'production': float(production_or_zero(output))} for generator in generators for output in generator.find((XML_NS_TEXT + 'Outputs')).findall((XML_NS_TEXT + 'Output')))
df = pd.DataFrame(all_productions)
by_fuel = df.groupby(['dt', 'fuel']).sum().unstack()
by_fuel_dict = by_fuel['production'].to_dict('index')
data = [{'datetime': time.to_pydatetime(), 'zoneKey': zone_key, 'production': productions, 'storage': {}, 'source': 'ieso.ca'} for (time, productions) in by_fuel_dict.items()]
data = sorted(data, key=(lambda dp: dp['datetime']))
return data |
class ColumnMissingValuesMetric(Metric[ColumnMissingValuesMetricResult]):
DEFAULT_MISSING_VALUES: ClassVar = ['', np.inf, (- np.inf), None]
missing_values: frozenset
column_name: str
def __init__(self, column_name: str, missing_values: Optional[list]=None, replace: bool=True, options: AnyOptions=None) -> None:
self.column_name = column_name
_missing_values: list
if (missing_values is None):
_missing_values = self.DEFAULT_MISSING_VALUES
elif (not replace):
_missing_values = (self.DEFAULT_MISSING_VALUES + missing_values)
else:
_missing_values = missing_values
self.missing_values = frozenset(_missing_values)
super().__init__(options=options)
def _calculate_missing_values_stats(self, column: pd.Series) -> ColumnMissingValues:
different_missing_values = {value: 0 for value in self.missing_values}
number_of_missing_values = 0
number_of_rows = len(column)
for value in self.missing_values:
if (value is None):
missing_values = column.isnull().sum()
else:
missing_values = (column == value).sum()
if (missing_values > 0):
number_of_missing_values += missing_values
different_missing_values[value] += missing_values
share_of_missing_values = (number_of_missing_values / number_of_rows)
different_missing_values = {value: count for (value, count) in sorted(different_missing_values.items(), key=(lambda item: item[1]), reverse=True)}
number_of_different_missing_values = sum([1 for value in different_missing_values if (different_missing_values[value] > 0)])
return ColumnMissingValues(different_missing_values=different_missing_values, number_of_different_missing_values=number_of_different_missing_values, number_of_missing_values=number_of_missing_values, share_of_missing_values=share_of_missing_values, number_of_rows=number_of_rows)
def calculate(self, data: InputData) -> ColumnMissingValuesMetricResult:
if (not self.missing_values):
raise ValueError('Missed values list should not be empty.')
if (self.column_name not in data.current_data):
raise ValueError(f'Column {self.column_name} is not in current data.')
current_missing_values = self._calculate_missing_values_stats(data.current_data[self.column_name])
if (data.reference_data is None):
reference_missing_values: Optional[ColumnMissingValues] = None
else:
if (self.column_name not in data.reference_data):
raise ValueError(f'Column {self.column_name} is not in reference data.')
reference_missing_values = self._calculate_missing_values_stats(data.reference_data[self.column_name])
return ColumnMissingValuesMetricResult(column_name=self.column_name, current=current_missing_values, reference=reference_missing_values) |
class TimeBars(Op):
__slots__ = ('_timer', 'bars')
__doc__ = Tickfilter.timebars.__doc__
bars: BarList
def __init__(self, timer, source=None):
Op.__init__(self, source)
self._timer = timer
self._timer.connect(self._on_timer, None, self._on_timer_done)
self.bars = BarList()
def on_source(self, time, price, size):
if (not self.bars):
return
bar = self.bars[(- 1)]
if isNan(bar.open):
bar.open = bar.high = bar.low = price
bar.high = max(bar.high, price)
bar.low = min(bar.low, price)
bar.close = price
bar.volume += size
bar.count += 1
self.bars.updateEvent.emit(self.bars, False)
def _on_timer(self, time):
if self.bars:
bar = self.bars[(- 1)]
if (isNan(bar.close) and (len(self.bars) > 1)):
bar.open = bar.high = bar.low = bar.close = self.bars[(- 2)].close
self.bars.updateEvent.emit(self.bars, True)
self.emit(bar)
self.bars.append(Bar(time))
def _on_timer_done(self, timer):
self._timer = None
self.set_done() |
class InputFile():
def __init__(self, file) -> None:
(self._file, self.file_name) = self._resolve_file(file)
def _resolve_file(self, file):
if isinstance(file, str):
_file = open(file, 'rb')
return (_file, os.path.basename(_file.name))
elif isinstance(file, IOBase):
return (file, service_utils.generate_random_token())
elif isinstance(file, Path):
_file = open(file, 'rb')
return (_file, os.path.basename(_file.name))
else:
raise TypeError('File must be a string or a file-like object(pathlib.Path, io.IOBase).')
def file(self):
return self._file |
class Splitter():
def __init__(self, context: Context):
self._context = context
def handle_split(self, splittable: Splittable):
previous_lasts = self._context.tree.find_marked(splittable.mark_last())
if (len(previous_lasts) == 0):
return
previous_last = previous_lasts[0]
con_id = previous_last.id
split_direction = self._safe_enum_value(splittable.split_direction(self._context))
stack_direction = self._safe_enum_value(splittable.stack_direction(self._context))
if (split_direction is not None):
self._context.exec(f'[con_id="{con_id}"] split {split_direction}')
elif (stack_direction is not None):
sibling_ids = [sibling.id for sibling in previous_last.parent.descendants()]
move_direction = ('down' if (stack_direction == 'vertical') else 'right')
if ((len(sibling_ids) == 1) or self._contains_focused(sibling_ids)):
self._context.exec(f'[con_id="{con_id}"] move {move_direction}')
if (self._contains_focused(sibling_ids) and (previous_last.parent.orientation == stack_direction)):
self._context.exec(f'[con_id="{con_id}"] move {move_direction}')
def _contains_focused(self, sibling_ids: List[str]):
return ((len(sibling_ids) == 2) and (self._context.focused.id in sibling_ids))
def _safe_enum_value(enum: Enum) -> Optional[Any]:
return (enum.value if (enum is not None) else None) |
class RevisionIdFilter(FilterBase):
def __init__(self, revision_hash_list):
super(RevisionIdFilter, self).__init__()
self.unwanted_hg_hashes = {h.encode('ascii', 'strict') for h in revision_hash_list}
def should_drop_commit(self, commit_data):
return (commit_data['hg_hash'] in self.unwanted_hg_hashes) |
class CreateRevisionTest(TestModelMixin, TestBase):
def testCreateRevision(self):
with reversion.create_revision():
obj = TestModel.objects.create()
self.assertSingleRevision((obj,))
def testCreateRevisionNested(self):
with reversion.create_revision():
with reversion.create_revision():
obj = TestModel.objects.create()
self.assertSingleRevision((obj,))
def testCreateRevisionEmpty(self):
with reversion.create_revision():
pass
self.assertNoRevision()
def testCreateRevisionException(self):
try:
with reversion.create_revision():
TestModel.objects.create()
raise Exception('Boom!')
except Exception:
pass
self.assertNoRevision()
def testCreateRevisionDecorator(self):
obj = reversion.create_revision()(TestModel.objects.create)()
self.assertSingleRevision((obj,))
def testPreRevisionCommitSignal(self):
_callback = MagicMock()
reversion.signals.pre_revision_commit.connect(_callback)
with reversion.create_revision():
TestModel.objects.create()
self.assertEqual(_callback.call_count, 1)
def testPostRevisionCommitSignal(self):
_callback = MagicMock()
reversion.signals.post_revision_commit.connect(_callback)
with reversion.create_revision():
TestModel.objects.create()
self.assertEqual(_callback.call_count, 1) |
def generate_repo_id_and_name_ext(dependent, url, dep_idx):
repo_id = 'coprdep:{0}'.format(generate_repo_name(url))
name = 'Copr {0}/{1}/{2} external runtime dependency #{3} - {4}'.format(app.config['PUBLIC_COPR_HOSTNAME'].split(':')[0], dependent.owner_name, dependent.name, dep_idx, generate_repo_name(url))
return (repo_id, name) |
def size(value: Any, unit: str='%', toStr: bool=False):
if (value is False):
return (None, '')
if isinstance(value, tuple):
if toStr:
return '{}{}'.format(value[0], value[1])
return value
elif (value == 'auto'):
return (value, '')
elif isinstance(value, str):
if value.endswith('%'):
unit = value[(- 1):]
value = int(value[:(- 1)])
else:
unit = value[(- 2):]
if (unit not in ['cm', 'mm', 'in', 'px', 'pt', 'pc', 'em', 'ex', 'ch', 'vw', 'vh']):
raise ValueError('Unit not recognised {}'.format(unit))
value = int(value[:(- 2)])
elif ((value is not None) and (value > 100) and (unit == '%')):
unit = 'px'
if toStr:
return '{}{}'.format(value, unit)
return (value, unit) |
class OptionPlotoptionsPackedbubbleSonificationContexttracksMappingLowpass(Options):
def frequency(self) -> 'OptionPlotoptionsPackedbubbleSonificationContexttracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionPlotoptionsPackedbubbleSonificationContexttracksMappingLowpassFrequency)
def resonance(self) -> 'OptionPlotoptionsPackedbubbleSonificationContexttracksMappingLowpassResonance':
return self._config_sub_data('resonance', OptionPlotoptionsPackedbubbleSonificationContexttracksMappingLowpassResonance) |
def iter_generate_delft_training_data_lines_for_document(tei_file: str, raw_file: Optional[str], training_tei_parser: TrainingTeiParser, data_generator: ModelDataGenerator) -> Iterable[str]:
with auto_download_input_file(tei_file, auto_decompress=True) as local_tei_file:
tei_root = etree.parse(local_tei_file).getroot()
labeled_layout_tokens_list = training_tei_parser.parse_training_tei_to_labeled_layout_tokens_list(tei_root)
LOGGER.debug('labeled_layout_tokens_list: %r', labeled_layout_tokens_list)
translated_tag_result = translate_tag_result_tags_IOB_to_grobid(get_tag_result_for_labeled_layout_tokens_list(labeled_layout_tokens_list))
LOGGER.debug('translated_tag_result: %r', translated_tag_result)
if raw_file:
with auto_download_input_file(raw_file, auto_decompress=True) as local_raw_file:
with open(local_raw_file, 'r', encoding='utf-8') as raw_fp:
(texts, features) = load_data_crf_lines(raw_fp)
assert (len(texts) == len(translated_tag_result))
for (doc_tokens, doc_tag_result) in zip(texts, translated_tag_result):
assert (len(doc_tokens) == len(doc_tag_result))
else:
layout_documents = [LayoutDocument.for_blocks([LayoutBlock.for_tokens([labeled_layout_token.layout_token for labeled_layout_token in labeled_layout_tokens])]) for labeled_layout_tokens in labeled_layout_tokens_list]
LOGGER.debug('layout_documents: %r', layout_documents)
data_line_iterable = list(data_generator.iter_data_lines_for_layout_documents(layout_documents))
(_texts, features) = load_data_crf_lines(data_line_iterable)
LOGGER.debug('features: %r', features)
(yield from iter_format_tag_result(tag_result=translated_tag_result, output_format=TagOutputFormats.DATA, texts=None, features=features)) |
def lazy_import():
from fastly.model.included_with_waf_rule_revision import IncludedWithWafRuleRevision
from fastly.model.waf_rule_revision_response_data import WafRuleRevisionResponseData
globals()['IncludedWithWafRuleRevision'] = IncludedWithWafRuleRevision
globals()['WafRuleRevisionResponseData'] = WafRuleRevisionResponseData |
class Configuration(JSONSerializable, ABC):
__slots__ = ('_key_order',)
def __init__(self) -> None:
self._key_order: List[str] = []
def from_json(cls, obj: Dict) -> 'Configuration':
def ordered_json(self) -> OrderedDict:
data = self.json
result = OrderedDict()
seen_keys = set()
for key in self._key_order:
enforce((key not in result), 'Key in results!')
value = data.get(key)
if (value is not None):
result[key] = value
seen_keys.add(key)
for (key, value) in data.items():
if (key not in seen_keys):
result[key] = value
return result |
def test_dependencies_from_to_json():
version_str = '==0.1.0'
git_url = '
branch = 'some-branch'
dep1 = Dependency('package_1', version_str, DEFAULT_PYPI_INDEX_URL, git_url, branch)
dep2 = Dependency('package_2', version_str)
expected_obj = {'package_1': dep1, 'package_2': dep2}
expected_obj_json = dependencies_to_json(expected_obj)
assert (expected_obj_json == {'package_1': {'version': '==0.1.0', 'index': DEFAULT_PYPI_INDEX_URL, 'git': git_url, 'ref': branch}, 'package_2': {'version': version_str}})
actual_obj = dependencies_from_json(expected_obj_json)
assert (expected_obj == actual_obj) |
class CustomSessionInterface(SessionInterface):
EXPIRES_MINUTES = (48 * 60)
session_class = CustomSession
def __init__(self, cache, prefix='session$'):
self.cache = cache
self.prefix = prefix
def open_session(self, app, request):
session_id = request.cookies.get(app.config['SESSION_COOKIE_NAME'])
expires = int((now() + (1000 * datetime.timedelta(minutes=self.EXPIRES_MINUTES).total_seconds())))
if (not session_id):
return self.session_class(session_id=self.generate_session_id(), new=True, expires=expires)
else:
session = self.find_session_for_id(session_id)
if (session is not None):
if (now() < session.expires):
return self.session_class(initial=session, session_id=session.session_id, expires=session.expires)
return self.session_class(session_id=self.generate_session_id(), new=True, was_invalid=True, expires=expires)
def save_session(self, app, session, response):
if session.was_invalid:
if (not session):
self.delete_cookie(app, session, response)
else:
self.store_session_db(session)
self.store_session_cache(session)
self.store_cookie(app, session, response)
elif (not session):
if ((not session.new) and session.modified):
self.delete_session(session.session_id)
self.delete_cookie(app, session, response)
elif self.should_set_cookie(app, session):
self.store_session_db(session)
self.store_session_cache(session)
if session.new:
self.store_cookie(app, session, response)
def should_set_cookie(self, app, session):
return session.modified
def generate_session_id(self):
return str(uuid4()).replace('-', '')
def find_session_for_id(self, session_id):
cache_data = self.cache.get((self.prefix + session_id), True)
if (cache_data is not None):
return CustomSession(initial=cache_data.data, session_id=session_id, expires=cache_data.expires)
else:
with sessioncontext() as s:
try:
session_model = s.query(SessionOrmModel).filter_by(session_id=session_id).one()
session = CustomSession(initial=session_model.data, session_id=session_id, expires=session_model.expires)
self.store_session_cache(session)
return session
except NoResultFound:
return None
def store_cookie(self, app, session, response):
expire_date = datetime.datetime.utcfromtimestamp((session.expires / 1000))
response.set_cookie(app.config['SESSION_COOKIE_NAME'], session.session_id, expires=expire_date, domain=self.get_cookie_domain(app))
def delete_cookie(self, app, session, response):
response.delete_cookie(app.config['SESSION_COOKIE_NAME'], domain=self.get_cookie_domain(app))
def store_session_db(self, session):
session_model = SessionOrmModel(session_id=session.session_id, data=session, expires=session.expires)
with sessioncontext() as s:
s.add(s.merge(session_model))
s.commit()
def store_session_cache(self, session):
self.cache.set((self.prefix + session.session_id), CustomSessionCacheDict(session, session.expires))
def delete_session(self, session_id):
with sessioncontext() as s:
try:
session_model = s.query(SessionOrmModel).filter_by(session_id=session_id).one()
s.delete(session_model)
s.commit()
except NoResultFound:
pass
self.cache.delete((self.prefix + session_id)) |
class WatsonCredentialsDialog(Gtk.Dialog):
def __init__(self, parent):
Gtk.Dialog.__init__(self, 'Enter Credentials', parent, 0, (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OK, Gtk.ResponseType.OK))
self.set_default_size(150, 100)
username_field = Gtk.Entry()
username_field.set_placeholder_text('Username')
password_field = Gtk.Entry()
password_field.set_placeholder_text('Password')
password_field.set_visibility(False)
password_field.set_invisible_char('*')
self.username_field = username_field
self.password_field = password_field
box = self.get_content_area()
box.set_margin_top(10)
box.set_margin_bottom(10)
box.set_margin_left(10)
box.set_margin_right(10)
box.set_spacing(10)
box.add(username_field)
box.add(password_field)
self.show_all() |
class RDKitMoleculeSetup(MoleculeSetup):
def from_mol(cls, mol, keep_chorded_rings=False, keep_equivalent_rings=False, assign_charges=True, conformer_id=(- 1)):
if cls.has_implicit_hydrogens(mol):
raise ValueError('RDKit molecule has implicit Hs. Need explicit Hs.')
if (mol.GetNumConformers() == 0):
raise ValueError('RDKit molecule does not have a conformer. Need 3D coordinates.')
rdkit_conformer = mol.GetConformer(conformer_id)
if (not rdkit_conformer.Is3D()):
warnings.warn("RDKit molecule not labeled as 3D. This warning won't show again.")
RDKitMoleculeSetup.warned_not3D = True
if ((mol.GetNumConformers() > 1) and (conformer_id == (- 1))):
msg = 'RDKit molecule has multiple conformers. Considering only the first one.'
print(msg, file=sys.stderr)
molsetup = cls()
molsetup.mol = mol
molsetup.atom_true_count = molsetup.get_num_mol_atoms()
molsetup.name = molsetup.get_mol_name()
coords = rdkit_conformer.GetPositions()
molsetup.init_atom(assign_charges, coords)
molsetup.init_bond()
molsetup.perceive_rings(keep_chorded_rings, keep_equivalent_rings)
molsetup.rmsd_symmetry_indices = cls.get_symmetries_for_rmsd(mol)
molsetup.modified_atom_positions = []
return molsetup
def get_conformer_with_modified_positions(self, new_atom_positions):
new_mol = Chem.Mol(self.mol)
new_conformer = Chem.Conformer(self.mol.GetConformer())
is_set_list = ([False] * self.mol.GetNumAtoms())
for (atom_index, new_position) in new_atom_positions.items():
new_conformer.SetAtomPosition(atom_index, new_position)
is_set_list[atom_index] = True
new_mol.RemoveAllConformers()
new_mol.AddConformer(new_conformer, assignId=True)
for (atom_index, is_set) in enumerate(is_set_list):
if ((not is_set) and (new_mol.GetAtomWithIdx(atom_idx).GetAtomicNum() == 1)):
neighbors = new_mol.GetAtomWithIdx(atom_idx).GetNeighbors()
if (len(neighbors) != 1):
raise RuntimeError('Expected H to have one neighbors')
Chem.SetTerminalAtomCoords(new_mol, atom_index, neighbors[0].GetIdx())
return new_conformer
def get_mol_with_modified_positions(self, new_atom_positions_list=None):
if (new_atom_positions_list is None):
new_atom_positions_list = self.modified_atom_positions
new_mol = Chem.Mol(self.mol)
new_mol.RemoveAllConformers()
for new_atom_positions in new_atom_positions_list:
conformer = self.get_conformer_with_modified_positions(new_atom_positions)
new_mol.AddConformer(conformer, assignId=True)
return new_mol
def get_smiles_and_order(self):
mol_no_ignore = self.mol
ps = Chem.RemoveHsParameters()
ps.removeWithQuery = True
mol_noH = Chem.RemoveHs(mol_no_ignore, ps)
atomic_num_mol_noH = [atom.GetAtomicNum() for atom in mol_noH.GetAtoms()]
noH_to_H = []
parents_of_hs = {}
for (index, atom) in enumerate(mol_no_ignore.GetAtoms()):
if (atom.GetAtomicNum() == 1):
continue
for i in range(len(noH_to_H), len(atomic_num_mol_noH)):
if (atomic_num_mol_noH[i] > 1):
break
h_atom = mol_noH.GetAtomWithIdx(len(noH_to_H))
assert (h_atom.GetAtomicNum() == 1)
neighbors = h_atom.GetNeighbors()
assert (len(neighbors) == 1)
parents_of_hs[len(noH_to_H)] = neighbors[0].GetIdx()
noH_to_H.append('H')
noH_to_H.append(index)
extra_hydrogens = (len(atomic_num_mol_noH) - len(noH_to_H))
if (extra_hydrogens > 0):
assert (set(atomic_num_mol_noH[len(noH_to_H):]) == {1})
for i in range(extra_hydrogens):
h_atom = mol_noH.GetAtomWithIdx(len(noH_to_H))
assert (h_atom.GetAtomicNum() == 1)
neighbors = h_atom.GetNeighbors()
assert (len(neighbors) == 1)
parents_of_hs[len(noH_to_H)] = neighbors[0].GetIdx()
noH_to_H.append('H')
hs_by_parent = {}
for (hidx, pidx) in parents_of_hs.items():
hs_by_parent.setdefault(pidx, [])
hs_by_parent[pidx].append(hidx)
for (pidx, hidxs) in hs_by_parent.items():
siblings_of_h = [atom for atom in mol_no_ignore.GetAtomWithIdx(noH_to_H[pidx]).GetNeighbors() if (atom.GetAtomicNum() == 1)]
sortidx = [i for (i, j) in sorted(list(enumerate(siblings_of_h)), key=(lambda x: x[1].GetIdx()))]
if (len(hidxs) == len(siblings_of_h)):
for (i, hidx) in enumerate(hidxs):
noH_to_H[hidx] = siblings_of_h[sortidx[i]].GetIdx()
elif (len(hidxs) < len(siblings_of_h)):
sibling_isotopes = [siblings_of_h[sortidx[i]].GetIsotope() for i in range(len(siblings_of_h))]
molnoH_isotopes = [mol_noH.GetAtomWithIdx(hidx) for hidx in hidxs]
matches = []
for (i, sibling_isotope) in enumerate(sibling_isotopes):
for hidx in hidxs[len(matches):]:
if (mol_noH.GetAtomWithIdx(hidx).GetIsotope() == sibling_isotope):
matches.append(i)
break
if (len(matches) != len(hidxs)):
raise RuntimeError(('Number of matched isotopes %d differs from query Hs: %d' % (len(matched), len(hidxs))))
for (hidx, i) in zip(hidxs, matches):
noH_to_H[hidx] = siblings_of_h[sortidx[i]].GetIdx()
else:
raise RuntimeError('nr of Hs in mol_noH bonded to an atom exceeds nr of Hs in mol_no_ignore')
smiles = Chem.MolToSmiles(mol_noH)
order_string = mol_noH.GetProp('_smilesAtomOutputOrder')
order_string = order_string.replace(',]', ']')
order = json.loads(order_string)
order = list(np.argsort(order))
order = {noH_to_H[i]: (order[i] + 1) for i in range(len(order))}
return (smiles, order)
def find_pattern(self, smarts):
p = Chem.MolFromSmarts(smarts)
return self.mol.GetSubstructMatches(p)
def get_mol_name(self):
if self.mol.HasProp('_Name'):
return self.mol.GetProp('_Name')
else:
return None
def get_num_mol_atoms(self):
return self.mol.GetNumAtoms()
def get_equivalent_atoms(self):
return list(Chem.CanonicalRankAtoms(self.mol, breakTies=False))
def get_symmetries_for_rmsd(mol, max_matches=17):
mol_noHs = Chem.RemoveHs(mol)
matches = mol.GetSubstructMatches(mol_noHs, uniquify=False, maxMatches=max_matches)
if (len(matches) == max_matches):
if mol.HasProp('_Name'):
molname = mol.GetProp('_Name')
else:
molname = ''
print(('warning: found the maximum nr of matches (%d) in RDKitMolSetup.get_symmetries_for_rmsd' % max_matches))
print(('Maybe this molecule is "too" symmetric? %s' % molname), Chem.MolToSmiles(mol_noHs))
return matches
def init_atom(self, assign_charges, coords):
if assign_charges:
copy_mol = Chem.Mol(self.mol)
for atom in copy_mol.GetAtoms():
if (atom.GetAtomicNum() == 34):
atom.SetAtomicNum(16)
rdPartialCharges.ComputeGasteigerCharges(copy_mol)
charges = [a.GetDoubleProp('_GasteigerCharge') for a in copy_mol.GetAtoms()]
else:
charges = ([0.0] * self.mol.GetNumAtoms())
chiral_info = {}
for data in Chem.FindMolChiralCenters(self.mol, includeUnassigned=True):
chiral_info[data[0]] = data[1]
for a in self.mol.GetAtoms():
idx = a.GetIdx()
chiral = False
if (idx in chiral_info):
chiral = chiral_info[idx]
self.add_atom(idx, coord=coords[idx], element=a.GetAtomicNum(), charge=charges[idx], atom_type=None, pdbinfo=rdkitutils.getPdbInfoNoNull(a), chiral=False, ignore=False)
def init_bond(self):
for b in self.mol.GetBonds():
idx1 = b.GetBeginAtomIdx()
idx2 = b.GetEndAtomIdx()
bond_order = int(b.GetBondType())
if (bond_order == 12):
bond_order = 5
if (bond_order == 1):
rotatable = True
else:
rotatable = False
self.add_bond(idx1, idx2, order=bond_order, rotatable=rotatable)
def copy(self):
newsetup = RDKitMoleculeSetup()
for (key, value) in self.__dict__.items():
if (key != 'mol'):
newsetup.__dict__[key] = deepcopy(value)
newsetup.mol = Chem.Mol(self.mol)
return newsetup
def has_implicit_hydrogens(mol):
for atom in mol.GetAtoms():
nr_H_neighbors = 0
for neighbor in atom.GetNeighbors():
nr_H_neighbors += int((neighbor.GetAtomicNum() == 1))
if (atom.GetTotalNumHs(includeNeighbors=False) > nr_H_neighbors):
return True
return False
def restrain_to(self, target_mol, kcal_per_angstrom_square=1.0, delay_angstroms=2.0):
if (not _has_misctools):
raise ImportError(_import_misctools_error)
stereo_isomorphism = StereoIsomorphism()
(mapping, idx) = stereo_isomorphism(target_mol, self.mol)
lig_to_drive = {b: a for (a, b) in mapping}
num_real_atoms = target_mol.GetNumAtoms()
target_positions = target_mol.GetConformer().GetPositions()
for atom_index in range(len(mapping)):
target_xyz = target_positions[lig_to_drive[atom_index]]
restraint = Restraint(atom_index, target_xyz, kcal_per_angstrom_square, delay_angstroms)
self.restraints.append(restraint)
return |
class ActionDispatcher(BackendDispatcher):
task_type = 'action'
worker_manager_class = ActionWorkerManager
def __init__(self, backend_opts):
super().__init__(backend_opts)
self.max_workers = backend_opts.actions_max_workers
def get_frontend_tasks(self):
try:
raw_actions = self.frontend_client.get('pending-actions').json()
except (FrontendClientException, ValueError) as error:
self.log.exception('Retrieving an action tasks failed with error: %s', error)
return []
return [ActionQueueTask(Action(self.opts, action, log=self.log)) for action in raw_actions] |
def test_can_detect_stuck_states():
with pytest.raises(InvalidDefinition, match='All non-final states should have at least one outgoing transition.'):
class CampaignMachine(StateMachine, strict_states=True):
draft = State(initial=True)
producing = State()
paused = State()
closed = State()
abort = ((draft.to(closed) | producing.to(closed)) | closed.to(closed))
produce = draft.to(producing)
pause = producing.to(paused) |
class GLUMask(nn.Module):
def __init__(self, n_freq, n_bottleneck, pool_size=2, kernel_size=3, dropout_p=0.5, mag_spec=True, log_spec=True, n_sublayers=1):
super().__init__()
self.mag_spec = mag_spec
self.log_spec = log_spec
if mag_spec:
n_inputs = n_freq
else:
n_inputs = (2 * n_freq)
self.layers = nn.ModuleList([GLULayer(n_inputs, n_bottleneck, n_sublayers=1, pool_size=pool_size), GLULayer(n_bottleneck, n_bottleneck, n_sublayers=n_sublayers, pool_size=pool_size), nn.Dropout(p=dropout_p), GLULayer(n_bottleneck, n_bottleneck, n_sublayers=n_sublayers, pool_size=pool_size), nn.ConvTranspose1d(in_channels=n_bottleneck, out_channels=n_freq, kernel_size=kernel_size, padding=(kernel_size // 2))])
def apply_constraints_(self):
pass
def forward(self, X):
batch_shape = X.shape[:(- 2)]
(n_freq, n_frames) = X.shape[(- 2):]
X = X.reshape(((- 1), n_freq, n_frames))
X_pwr = mag_sq(X)
g = torch.clamp(torch.mean(X_pwr, dim=((- 2), (- 1)), keepdim=True), min=1e-05)
if self.mag_spec:
X = divide(X_pwr, g)
else:
X = divide(X, torch.sqrt(g))
X = torch.view_as_real(X)
X = torch.cat((X[(..., 0)], X[(..., 1)]), dim=(- 2))
if self.log_spec:
X = torch.abs(X)
weights = torch.log10((X + 1e-07))
else:
weights = X
for (idx, layer) in enumerate(self.layers):
weights = layer(weights)
weights = torch.sigmoid(weights)
weights = ((weights * (1 - 1e-05)) + 1e-05)
return weights.reshape((batch_shape + (n_freq, n_frames))) |
def test_creosote_project_success(venv_manager: VenvManager, capsys: CaptureFixture) -> None:
(venv_path, site_packages_path) = venv_manager.create_venv()
for dependency_name in ['dotty-dict', 'loguru', 'pip-requirements-parser', 'toml']:
venv_manager.create_record(site_packages_path=site_packages_path, dependency_name=dependency_name, contents=[f'{dependency_name}/__init__.py,sha256=4skFj_sdo33SWqTefV1JBAvZiT4MY_pB5yaRL5DMNVs,240'])
args = ['--venv', str(venv_path), '--path', 'src', '--deps-file', 'pyproject.toml', '--format', 'no-color']
exit_code = cli.main(args)
actual_output = capsys.readouterr().err.splitlines()
assert (actual_output == ['Found dependencies in pyproject.toml: dotty-dict, loguru, pip-requirements-parser, toml', 'No unused dependencies found! '])
assert (exit_code == 0) |
class IterableList():
def __init__(self, items: List[Any]):
self.items = items
def __iter__(self):
return self.Iterator(self.items)
class Iterator():
def __init__(self, items: List[Any]):
self.iter = iter(items)
def __iter__(self):
return self
def __next__(self):
return next(self.iter) |
class OptionPlotoptionsNetworkgraphSonificationContexttracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class OptionSeriesColumnSonificationTracksMapping(Options):
def frequency(self) -> 'OptionSeriesColumnSonificationTracksMappingFrequency':
return self._config_sub_data('frequency', OptionSeriesColumnSonificationTracksMappingFrequency)
def gapBetweenNotes(self) -> 'OptionSeriesColumnSonificationTracksMappingGapbetweennotes':
return self._config_sub_data('gapBetweenNotes', OptionSeriesColumnSonificationTracksMappingGapbetweennotes)
def highpass(self) -> 'OptionSeriesColumnSonificationTracksMappingHighpass':
return self._config_sub_data('highpass', OptionSeriesColumnSonificationTracksMappingHighpass)
def lowpass(self) -> 'OptionSeriesColumnSonificationTracksMappingLowpass':
return self._config_sub_data('lowpass', OptionSeriesColumnSonificationTracksMappingLowpass)
def noteDuration(self) -> 'OptionSeriesColumnSonificationTracksMappingNoteduration':
return self._config_sub_data('noteDuration', OptionSeriesColumnSonificationTracksMappingNoteduration)
def pan(self) -> 'OptionSeriesColumnSonificationTracksMappingPan':
return self._config_sub_data('pan', OptionSeriesColumnSonificationTracksMappingPan)
def pitch(self) -> 'OptionSeriesColumnSonificationTracksMappingPitch':
return self._config_sub_data('pitch', OptionSeriesColumnSonificationTracksMappingPitch)
def playDelay(self) -> 'OptionSeriesColumnSonificationTracksMappingPlaydelay':
return self._config_sub_data('playDelay', OptionSeriesColumnSonificationTracksMappingPlaydelay)
def rate(self) -> 'OptionSeriesColumnSonificationTracksMappingRate':
return self._config_sub_data('rate', OptionSeriesColumnSonificationTracksMappingRate)
def text(self):
return self._config_get(None)
def text(self, text: str):
self._config(text, js_type=False)
def time(self) -> 'OptionSeriesColumnSonificationTracksMappingTime':
return self._config_sub_data('time', OptionSeriesColumnSonificationTracksMappingTime)
def tremolo(self) -> 'OptionSeriesColumnSonificationTracksMappingTremolo':
return self._config_sub_data('tremolo', OptionSeriesColumnSonificationTracksMappingTremolo)
def volume(self) -> 'OptionSeriesColumnSonificationTracksMappingVolume':
return self._config_sub_data('volume', OptionSeriesColumnSonificationTracksMappingVolume) |
def align(sequences):
lengths = []
indices = []
offset = 0
for seq in sequences:
for token_length in seq:
lengths.append(token_length)
indices.extend(((i + offset) for i in range(token_length)))
offset += token_length
return Ragged(numpy.array(indices, dtype='i'), numpy.array(lengths, dtype='i')) |
def get_func_args(func):
if PY2:
argspec = inspect.getargspec(func)
if inspect.ismethod(func):
return argspec.args[1:]
return argspec.args
else:
sig = inspect.signature(func)
return [name for (name, param) in sig.parameters.items() if ((param.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD) and (name != 'self'))] |
def test_auth(server):
url = str(server.url)
runner = CliRunner()
result = runner.invoke( [url, '-v', '--auth', 'username', 'password'])
print(result.output)
assert (result.exit_code == 0)
assert (remove_date_header(splitlines(result.output)) == ["* Connecting to '127.0.0.1'", "* Connected to '127.0.0.1' on port 8000", 'GET / HTTP/1.1', f"Host: {server.url.netloc.decode('ascii')}", 'Accept: */*', 'Accept-Encoding: gzip, deflate, br', 'Connection: keep-alive', f'User-Agent: python- 'Authorization: Basic dXNlcm5hbWU6cGFzc3dvcmQ=', '', 'HTTP/1.1 200 OK', 'server: uvicorn', 'content-type: text/plain', 'Transfer-Encoding: chunked', '', 'Hello, world!']) |
class OnScrollEvent(ControlEvent):
def __init__(self, t, p, minse, maxse, vd, sd=None, dir=None, os=None, v=None) -> None:
self.event_type: str = t
self.pixels: float = p
self.min_scroll_extent: float = minse
self.max_scroll_extent: float = maxse
self.viewport_dimension: float = vd
self.scroll_delta: Optional[float] = sd
self.direction: Optional[str] = dir
self.overscroll: Optional[float] = os
self.velocity: Optional[float] = v
def __str__(self):
return f'{self.event_type}: pixels={self.pixels}, min_scroll_extent={self.min_scroll_extent}, max_scroll_extent={self.max_scroll_extent}, viewport_dimension={self.viewport_dimension}, scroll_delta={self.scroll_delta}, direction={self.direction}, overscroll={self.overscroll}, velocity={self.velocity}' |
def attach_image(msg, url, file_path, selector, dimensions='1024x1024'):
if ('selectedTab=map' in url):
wait = 8000
dimensions = '1000x600'
elif ('selectedTab=chart' in url):
wait = 1000
dimensions = '800x600'
elif ('selectedTab' in url):
wait = 500
dimensions = '800x600'
else:
wait = 1000
cmd = '{cmd} "{host}{url}" {file_path} "{selector}" {dimensions} {wait}'
cmd = cmd.format(cmd=GRAB_CMD, host=settings.GRAB_HOST, url=url, file_path=file_path, selector=selector, dimensions=dimensions, wait=wait)
response = subprocess.run(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, env=dict(os.environ, OPENSSL_CONF='/etc/ssl'))
if (response.returncode > 0):
raise BadAlertImageError(f'''phantomjs command failed with code {response.returncode}
cmd:
{cmd}
stdout:
{response.stdout}
stderr:
{response.stderr}''')
else:
logger.debug(('Command %s completed with output %s' % (cmd, response.stdout.strip())))
if (os.path.getsize(file_path) == 0):
msg = ('File at %s empty (generated from url %s)' % (file_path, url))
raise BadAlertImageError(msg)
return attach_inline_image_file(msg, file_path, subtype='png') |
def truthy(o: Any) -> Optional[bool]:
if isinstance(o, str):
if (o.lower() in {'y', 'yes', 't', 'true', '1'}):
return True
elif (o.lower() in {'n', 'no', 'f', 'false', '0'}):
return False
else:
return None
elif (o is None):
return None
else:
return bool(o) |
class DOSContractCreateEmptyContractBenchmark(BaseDOSContractBenchmark):
def name(self) -> str:
return 'DOSContract empty contract deployment'
def _setup_benchmark(self, chain: MiningChain) -> None:
self.deploy_dos_contract(chain)
chain.mine_block()
def _apply_transaction(self, chain: MiningChain) -> None:
self.create_empty_contract(chain) |
class LedgerApiDialogues(BaseLedgerApiDialogues):
def __init__(self, self_address: Address, **kwargs) -> None:
def role_from_first_message(message: Message, receiver_address: Address) -> BaseDialogue.Role:
return LedgerApiDialogue.Role.AGENT
BaseLedgerApiDialogues.__init__(self, self_address=self_address, role_from_first_message=role_from_first_message) |
class TestProcessNextRequests(TestCase):
def test_emtpy_request_returns_job_response_error(self):
settings = factories.ServerSettingsFactory()
server = HandleNextRequestServer(settings=settings)
server.transport = SimplePassthroughServerTransport(server.service_name)
server.transport.set_request({})
server.handle_next_request()
response = server.transport.get_response()
self.assertTrue(('errors' in response))
errors = response['errors']
self.assertEqual(len(errors), 3)
self.assertEqual({'actions', 'control', 'context'}, set([e.get('field', None) for e in errors])) |
def template_injection_url(uri, scanid):
result = subprocess.check_output(['python3', './tplmap/tplmap.py', '-u', uri], text=True)
output = str(result)
if ('not injectable' in output.lower()):
print('Endpoint is not vulnerable')
else:
newoutput = result.split('\n')
for line in newoutput:
if ('Capabilities:' in line):
attack_result = {'id': 25, 'scanid': scanid, 'url': uri, 'alert': 'Template Injection', 'impact': 'High', 'req_headers': 'NA', 'req_body': 'NA', 'res_headers': 'NA', 'res_body': 'NA'}
dbupdate.insert_record(attack_result)
print(line)
break |
class ChooseWalletPage(QWizardPage):
HELP_CONTEXT = HelpContext('choose-wallet')
_force_completed = False
_list_thread_context: Optional[ListPopulationContext] = None
_list_thread: Optional[threading.Thread] = None
_commit_pressed = False
def __init__(self, parent: WalletWizard) -> None:
super().__init__(parent)
self.setTitle(_('Select an existing wallet'))
self.setButtonText(QWizard.CommitButton, ((' ' + _('Open &Selected Wallet')) + ' '))
self.setCommitPage(True)
self._recent_wallet_paths: List[str] = []
self._recent_wallet_entries: Dict[(str, FileState)] = {}
vlayout = QVBoxLayout()
page = self
class TableWidget(QTableWidget):
def keyPressEvent(self, event):
key = event.key()
if ((key == Qt.Key_Return) or (key == Qt.Key_Enter)):
page._event_key_selection()
else:
super(TableWidget, self).keyPressEvent(event)
def contextMenuEvent(self, event):
if (not can_show_in_file_explorer()):
return
selected_indexes = self.selectedIndexes()
if (not len(selected_indexes)):
return
wallet_path = page._recent_wallet_paths[selected_indexes[0].row()]
entry = page._recent_wallet_entries[wallet_path]
show_file_action: Optional[QAction] = None
show_directory_action: Optional[QAction] = None
menu = QMenu(self)
if (sys.platform == 'win32'):
show_file_action = menu.addAction(SHOW_IN_EXPLORER)
show_directory_action = menu.addAction(OPEN_DIRECTORY_IN_EXPLORER)
elif (sys.platform == 'darwin'):
show_file_action = menu.addAction(SHOW_IN_FINDER)
show_directory_action = menu.addAction(OPEN_FOLDER_IN_FINDER)
action = menu.exec_(self.mapToGlobal(event.pos()))
if (action == show_file_action):
show_in_file_explorer(entry.path)
elif (action == show_directory_action):
path = os.path.dirname(entry.path)
show_in_file_explorer(path)
self._wallet_table = TableWidget()
self._wallet_table.setSelectionMode(QAbstractItemView.SingleSelection)
self._wallet_table.selectionModel().selectionChanged.connect(self._event_selection_changed)
self._wallet_table.doubleClicked.connect(self._event_entry_doubleclicked)
hh = self._wallet_table.horizontalHeader()
hh.setStretchLastSection(True)
vh = self._wallet_table.verticalHeader()
vh.setSectionResizeMode(QHeaderView.ResizeToContents)
vh.hide()
self._wallet_table.setColumnCount(1)
self._wallet_table.setSelectionBehavior(QAbstractItemView.SelectRows)
self._wallet_table.setStyleSheet('\n QTableView {\n selection-background-color: #F5F8FA;\n }\n QHeaderView::section {\n font-weight: bold;\n }\n ')
self._wallet_table.setTabKeyNavigation(False)
self._wallet_table.setHorizontalHeaderLabels(['Recently Opened Wallets'])
self._unlocked_pixmap = QPixmap(icon_path('icons8-lock-80.png')).scaledToWidth(40, Qt.SmoothTransformation)
vlayout.addWidget(self._wallet_table)
tablebutton_layout = QHBoxLayout()
self.file_button = QPushButton(((' ' + _('Open &Other Wallet')) + ' '))
self.file_button.setSizePolicy(QSizePolicy.Maximum, QSizePolicy.Fixed)
self.file_button.clicked.connect(self._event_click_open_file)
tablebutton_layout.addStretch()
tablebutton_layout.addWidget(self.file_button, Qt.AlignRight)
vlayout.addLayout(tablebutton_layout)
self.setLayout(vlayout)
self._on_reset_next_page()
def _on_reset_next_page(self) -> None:
self._next_page_id = WalletPage.MIGRATE_OLDER_WALLET
def nextId(self) -> WalletPage:
return self._next_page_id
def isFinalPage(self) -> bool:
return False
def isComplete(self) -> bool:
if self._commit_pressed:
result = False
elif self._force_completed:
result = True
else:
result = (len(self._wallet_table.selectedIndexes()) > 0)
return result
def validatePage(self) -> bool:
return self.isComplete()
def _attempt_open_wallet(self, wallet_path: str, change_page: bool=False) -> bool:
if (not os.path.exists(wallet_path)):
MessageBox.show_error(_('Unable to open a deleted wallet.'))
return False
entry: Optional[FileState] = None
for entry in self._recent_wallet_entries.values():
if (entry.path == wallet_path):
break
else:
entry = create_file_state(wallet_path)
if (entry is None):
MessageBox.show_error(_('Unrecognised or unsupported wallet file.'))
return False
if entry.is_too_modern:
MessageBox.show_error(_('The selected wallet cannot be opened as it is from a later version of ElectrumSV.'))
return False
password: str = None
wizard: WalletWizard = self.wizard()
storage = WalletStorage(entry.path)
try:
password = request_password(self, storage, entry)
if (password is None):
return False
if change_page:
self._force_completed = True
if entry.requires_upgrade:
self._next_page_id = WalletPage.MIGRATE_OLDER_WALLET
migration_page = wizard.page(WalletPage.MIGRATE_OLDER_WALLET)
migration_page.set_migration_data(entry, storage, password)
storage = None
wizard.next()
else:
assert (entry.storage_kind == StorageKind.DATABASE), f'not a database {entry.storage_kind}'
wizard.set_wallet_path(entry.path)
wizard.accept()
finally:
if (storage is not None):
storage.close()
return True
def _event_click_create_wallet(self) -> None:
initial_path = app_state.config.get_preferred_wallet_dirpath()
create_filepath = create_new_wallet(self, initial_path)
if (create_filepath is not None):
wizard: WalletWizard = self.wizard()
wizard.set_wallet_path(create_filepath)
self._force_completed = True
self._next_page_id = (- 1)
wizard.accept()
def _event_click_open_file(self) -> None:
initial_dirpath = app_state.config.get_preferred_wallet_dirpath()
(wallet_filepath, __) = QFileDialog.getOpenFileName(self, 'Select your wallet file', initial_dirpath)
if wallet_filepath:
wallet_filepath = os.path.normpath(wallet_filepath)
self._attempt_open_wallet(wallet_filepath, change_page=True)
def _event_click_open_selected_file(self) -> None:
self._commit_pressed = False
selected_indexes = self._wallet_table.selectedIndexes()
wallet_path = self._recent_wallet_paths[selected_indexes[0].row()]
self._attempt_open_wallet(wallet_path, change_page=True)
def _event_press_open_selected_file(self) -> None:
self._commit_pressed = True
def _event_selection_changed(self, _selected: QItemSelection, _deselected: QItemSelection) -> None:
selected_indexes = self._wallet_table.selectedIndexes()
selected_row = (selected_indexes[0].row() if len(selected_indexes) else (- 1))
if (selected_row != (- 1)):
wallet_path = self._recent_wallet_paths[selected_row]
entry = self._recent_wallet_entries[wallet_path]
if entry.requires_upgrade:
self._next_page_id = WalletPage.MIGRATE_OLDER_WALLET
else:
self._next_page_id = (- 1)
else:
self._clear_selection()
self.completeChanged.emit()
def _event_key_selection(self) -> None:
selected_indexes = self._wallet_table.selectedIndexes()
if len(selected_indexes):
self._select_row(selected_indexes[0].row())
def _event_entry_doubleclicked(self, index: QModelIndex) -> None:
self._select_row(index.row())
def _select_row(self, row: int) -> None:
wallet_path = self._recent_wallet_paths[row]
self._attempt_open_wallet(wallet_path, change_page=True)
def _clear_selection(self) -> None:
self._force_completed = False
self._on_reset_next_page()
self._commit_pressed = False
wizard: WalletWizard = self.wizard()
wizard.set_wallet_path(None)
def on_enter(self) -> None:
self._clear_selection()
wizard: WalletWizard = self.wizard()
button = wizard.button(QWizard.CustomButton1)
button.setVisible(True)
button.setText(((' ' + _('Create &New Wallet')) + ' '))
button.clicked.connect(self._event_click_create_wallet)
button.show()
cancel_button = wizard.button(QWizard.CancelButton)
cancel_button.show()
commit_button = wizard.button(QWizard.CommitButton)
commit_button.clicked.connect(self._event_click_open_selected_file)
commit_button.pressed.connect(self._event_press_open_selected_file)
self._gui_list_reset()
self._recent_wallet_paths.extend([candidate_path for candidate_path in [os.path.normpath(candidate_path) for candidate_path in app_state.config.get('recently_open', [])] if os.path.exists(candidate_path)])
self._list_thread_context = ListPopulationContext()
self._list_thread_context.update_list_entry.connect(self._gui_list_update)
self._list_thread = threading.Thread(target=self._populate_list_in_thread, args=(self._list_thread_context,))
self._list_thread.setDaemon(True)
self._list_thread.start()
self._wallet_table.setFocus()
def on_leave(self) -> None:
if (self._list_thread is not None):
assert (self._list_thread_context is not None)
self._list_thread_context.update_list_entry.disconnect()
self._list_thread_context.stale = True
self._list_thread = None
wizard: WalletWizard = self.wizard()
button = wizard.button(QWizard.CustomButton1)
button.setVisible(False)
button.clicked.disconnect(self._event_click_create_wallet)
commit_button = wizard.button(QWizard.CommitButton)
commit_button.clicked.disconnect(self._event_click_open_selected_file)
def _populate_list_in_thread(self, context: ListPopulationContext) -> None:
for file_path in self._recent_wallet_paths:
if context.stale:
return
entry = create_file_state(file_path)
if context.stale:
return
if (entry is not None):
context.update_list_entry.emit(entry)
def _get_file_state(self, wallet_path: str) -> Optional[FileState]:
if (not os.path.exists(wallet_path)):
return None
entry = self._recent_wallet_entries.get(wallet_path)
if (entry is not None):
modification_time = os.path.getmtime(entry.path)
if (entry.modification_time == modification_time):
return entry
return create_file_state(wallet_path)
def _gui_list_reset(self) -> None:
self._recent_wallet_paths: List[str] = []
self._recent_wallet_entries: Dict[(str, FileState)] = {}
while self._wallet_table.rowCount():
self._wallet_table.removeRow((self._wallet_table.rowCount() - 1))
def _gui_list_update(self, entry: FileState) -> None:
assert (entry.path is not None)
row_index = self._wallet_table.rowCount()
if (entry.path in self._recent_wallet_entries):
return
self._wallet_table.insertRow(row_index)
self._recent_wallet_entries[entry.path] = entry
row_widget = QWidget()
row_layout = QHBoxLayout()
row_layout.setSpacing(0)
row_layout.setContentsMargins(0, 0, 0, 0)
row_icon_label = QLabel()
row_icon_label.setPixmap(self._unlocked_pixmap)
row_icon_label.setAlignment((Qt.AlignHCenter | Qt.AlignVCenter))
row_icon_label.setMaximumWidth(80)
row_desc_label = QLabel((((entry.name + "<br/><font color='grey'>") + os.path.dirname(entry.path)) + '</font>'))
row_desc_label.setTextFormat(Qt.RichText)
row_layout.addWidget(row_icon_label)
row_layout.addWidget(row_desc_label)
row_layout.addStretch(1)
row_widget.setLayout(row_layout)
self._wallet_table.setCellWidget(row_index, 0, row_widget) |
class StatsDictTest(TestCaseBase):
def test_add(self) -> None:
n = 10
a = np.random.rand(n)
b = np.random.randn(n)
d = StatsDict()
for (x, y) in zip(a.tolist(), b.tolist()):
d.add('a', x)
d.add('b', y)
self.assertEqual(d['a'].count(), n)
self.assertEqual(d['a'].mean(), np.mean(a))
self.assertEqual(d['a'].var(ddof=0), np.var(a, ddof=0))
self.assertEqual(d['a'].std(ddof=0), np.std(a, ddof=0))
self.assertEqual(d['a'].min(), np.min(a))
self.assertEqual(d['a'].max(), np.max(a))
self.assertEqual(d['b'].count(), n)
self.assertEqual(d['b'].mean(), np.mean(b))
self.assertEqual(d['b'].var(ddof=1), np.var(b, ddof=1))
self.assertEqual(d['b'].std(ddof=1), np.std(b, ddof=1))
self.assertEqual(d['b'].min(), np.min(b))
self.assertEqual(d['b'].max(), np.max(b))
def test_extend(self) -> None:
n = 10
a = np.random.rand(n)
b = np.random.randn(n)
d = StatsDict()
for (x, y) in zip(a.tolist(), b.tolist()):
d.extend({'a': x, 'b': y})
self.assertEqual(d['a'].count(), n)
self.assertEqual(d['a'].mean(), np.mean(a))
self.assertEqual(d['a'].var(ddof=0), np.var(a, ddof=0))
self.assertEqual(d['a'].std(ddof=0), np.std(a, ddof=0))
self.assertEqual(d['a'].min(), np.min(a))
self.assertEqual(d['a'].max(), np.max(a))
self.assertEqual(d['b'].count(), n)
self.assertEqual(d['b'].mean(), np.mean(b))
self.assertEqual(d['b'].var(ddof=1), np.var(b, ddof=1))
self.assertEqual(d['b'].std(ddof=1), np.std(b, ddof=1))
self.assertEqual(d['b'].min(), np.min(b))
self.assertEqual(d['b'].max(), np.max(b)) |
class DecisionTree(elmdptt.TaskModelInitialization):
criterion = luigi.Parameter()
max_depth = luigi.Parameter()
min_samples_leaf = luigi.Parameter
random_state = luigi.Parameter()
max_leaf_nodes = luigi.Parameter()
def actual_task_code(self):
model = tree.DecisionTreeClassifier(criterion=self.criterion, max_depth=self.max_depth, min_samples_leaf=self.min_samples_leaf, random_state=self.random_state, max_leaf_nodes=self.max_leaf_nodes)
return model |
def test_full_backfill_if_metric_not_updated_for_a_long_time(dbt_project: DbtProject, test_id: str):
date_gap_size = 15
utc_today = datetime.utcnow().date()
data_dates = generate_dates(base_date=(utc_today - timedelta(1)))
data = [{TIMESTAMP_COLUMN: cur_date.strftime(DATE_FORMAT)} for cur_date in data_dates for _ in range(5) if (cur_date < (utc_today - timedelta(date_gap_size)))]
test_result = dbt_project.test(test_id, DBT_TEST_NAME, DBT_TEST_ARGS, data=data, as_model=True, materialization='incremental', test_vars={'custom_run_started_at': (datetime.utcnow() - timedelta(date_gap_size)).isoformat()})
assert (test_result['status'] != 'error')
assert (get_daily_row_count_metrics(dbt_project, test_id) == {cur_date: 5 for cur_date in data_dates if ((utc_today - timedelta((DAYS_BACK + date_gap_size))) <= cur_date < (utc_today - timedelta(date_gap_size)))})
data = [{TIMESTAMP_COLUMN: cur_date.strftime(DATE_FORMAT)} for cur_date in data_dates]
test_result = dbt_project.test(test_id, DBT_TEST_NAME, DBT_TEST_ARGS, data=data, as_model=True, materialization='incremental')
assert (test_result['status'] != 'error')
assert (get_daily_row_count_metrics(dbt_project, test_id) == {cur_date: (5 if (cur_date < (utc_today - timedelta(DAYS_BACK))) else 1) for cur_date in data_dates if (((utc_today - timedelta((DAYS_BACK + date_gap_size))) <= cur_date < (utc_today - timedelta(date_gap_size))) or (cur_date >= (utc_today - timedelta(DAYS_BACK))))}) |
class UniqueForMonthTests(TestCase):
def setUp(self):
self.instance = UniqueForMonthModel.objects.create(slug='existing', published='2017-01-01')
def test_not_unique_for_month(self):
data = {'slug': 'existing', 'published': '2017-01-01'}
serializer = UniqueForMonthSerializer(data=data)
assert (not serializer.is_valid())
assert (serializer.errors == {'slug': ['This field must be unique for the "published" month.']})
def test_unique_for_month(self):
data = {'slug': 'existing', 'published': '2017-02-01'}
serializer = UniqueForMonthSerializer(data=data)
assert serializer.is_valid()
assert (serializer.validated_data == {'slug': 'existing', 'published': datetime.date(2017, 2, 1)}) |
class TestOFPActionDecMplsTtl(unittest.TestCase):
type_ = ofproto.OFPAT_DEC_MPLS_TTL
len_ = ofproto.OFP_ACTION_MPLS_TTL_SIZE
fmt = ofproto.OFP_ACTION_HEADER_PACK_STR
buf = pack(fmt, type_, len_)
c = OFPActionDecMplsTtl()
def test_parser(self):
res = self.c.parser(self.buf, 0)
eq_(res.len, self.len_)
eq_(res.type, self.type_)
def test_serialize(self):
buf = bytearray()
self.c.serialize(buf, 0)
res = struct.unpack(self.fmt, six.binary_type(buf))
eq_(res[0], self.type_)
eq_(res[1], self.len_) |
class OptionPlotoptionsDumbbellSonificationDefaultspeechoptionsMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class bsn_set_switch_pipeline_request(bsn_header):
version = 6
type = 4
experimenter = 6035143
subtype = 53
def __init__(self, xid=None, pipeline=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (pipeline != None):
self.pipeline = pipeline
else:
self.pipeline = ''
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!L', self.experimenter))
packed.append(struct.pack('!L', self.subtype))
packed.append(struct.pack('!256s', self.pipeline))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = bsn_set_switch_pipeline_request()
_version = reader.read('!B')[0]
assert (_version == 6)
_type = reader.read('!B')[0]
assert (_type == 4)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_experimenter = reader.read('!L')[0]
assert (_experimenter == 6035143)
_subtype = reader.read('!L')[0]
assert (_subtype == 53)
obj.pipeline = reader.read('!256s')[0].rstrip('\x00')
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.pipeline != other.pipeline):
return False
return True
def pretty_print(self, q):
q.text('bsn_set_switch_pipeline_request {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('pipeline = ')
q.pp(self.pipeline)
q.breakable()
q.text('}') |
class DeadPathElimination(PipelineStage):
name = 'dead-path-elimination'
def __init__(self):
self._logic_converter: BaseConverter = Z3Converter()
self._timeout: Optional[int] = None
def run(self, task: DecompilerTask) -> None:
self._timeout = task.options.getint(f'{self.name}.timeout_satisfiable')
engine = task.options.getstring('logic-engine.engine')
if (engine == 'delogic'):
self._logic_converter = DelogicConverter()
if (task.graph.root is None):
warning(f'[{self.__class__.__name__}] Can not detect dead blocks because the cfg has no head.')
return
if (not (dead_edges := set(self.find_unsatisfyable_edges(task.graph)))):
return
self._remove_dead_edges(task.graph, dead_edges)
def _fix_phi_origin_blocks_on_remove(self, dead_blocks: Set[BasicBlock], graph: ControlFlowGraph) -> None:
for instruction in graph.instructions:
if (not isinstance(instruction, Phi)):
continue
removed_phi_predecessors = [block for block in instruction.origin_block.keys() if (block in dead_blocks)]
for block in removed_phi_predecessors:
instruction.remove_from_origin_block(block)
def _remove_dead_edges(self, cfg: ControlFlowGraph, dead_edges: Set[BasicBlockEdge]):
original_head: BasicBlock = cfg.root
for dead_edge in dead_edges:
self._remove_and_fix_edge(cfg, dead_edge)
dead_blocks: Set[BasicBlock] = self._find_unreachable_blocks(cfg, original_head)
self._fix_phi_origin_blocks_on_remove(dead_blocks, cfg)
cfg.remove_nodes_from(dead_blocks)
info(f'[{self.__class__.__name__}] Eliminated {len(dead_blocks)} basic blocks from {len(dead_edges)} dead edges.')
def find_unsatisfyable_edges(self, graph: ControlFlowGraph) -> Iterator[BasicBlockEdge]:
for branch_block in [node for node in graph if (graph.out_degree(node) > 1)]:
branch_instruction = branch_block.instructions[(- 1)]
assert isinstance(branch_instruction, GenericBranch), f'Branching basic block without branch instruction at {branch_block.name}'
if isinstance(branch_instruction, IndirectBranch):
continue
if (dead_edge := self._get_invalid_branch_edge(graph, branch_block, branch_instruction)):
(yield dead_edge)
def _get_invalid_branch_edge(self, graph: ControlFlowGraph, block: BasicBlock, instruction: Branch) -> Optional[BasicBlockEdge]:
try:
condition = self._logic_converter.convert(instruction, define_expr=True)
except ValueError as value_error:
warning(f'[{self.__class__.__name__}] {str(value_error)}')
return
for edge in graph.get_out_edges(block):
if self._is_invalid_edge(edge, condition):
return edge
def _is_invalid_edge(self, edge: BasicBlockEdge, condition: Union[(BoolRef, WorldObject)]) -> bool:
if isinstance(edge, FalseCase):
condition = self._logic_converter.negate(condition)
return self._logic_converter.is_not_satisfiable(condition, timeout=self._timeout)
def _find_unreachable_blocks(graph: ControlFlowGraph, head: BasicBlock) -> Set[BasicBlock]:
reachable_blocks: Set[BasicBlock] = set(graph.iter_postorder(head))
return (set(graph) - reachable_blocks)
def _remove_and_fix_edge(graph: ControlFlowGraph, dead_edge: BasicBlockEdge) -> None:
graph.remove_edge(dead_edge)
dead_edge.source.instructions = dead_edge.source.instructions[:(- 1)]
for edge in graph.get_out_edges(dead_edge.source):
graph.substitute_edge(edge, UnconditionalEdge(edge.source, edge.sink)) |
def ovlp3d_33(ax, da, A, bx, db, B):
result = numpy.zeros((10, 10), dtype=float)
x0 = ((ax + bx) ** (- 1.0))
x1 = (x0 * ((ax * A[0]) + (bx * B[0])))
x2 = (- x1)
x3 = (x2 + B[0])
x4 = (x0 * ((((- 2.0) * x1) + A[0]) + B[0]))
x5 = (x2 + A[0])
x6 = (x3 * x5)
x7 = (x0 + (2.0 * x6))
x8 = (x3 * x7)
x9 = (x4 + x8)
x10 = (x3 * x9)
x11 = (x3 ** 2)
x12 = (3.0 * x0)
x13 = (x12 + (4.0 * x6))
x14 = (x0 * ((2.0 * x11) + x13))
x15 = (x5 * x9)
x16 = (x14 + (2.0 * x15))
x17 = (x5 * x7)
x18 = (2.0 * x0)
x19 = (x18 * ((x17 + (2.0 * x4)) + x8))
x20 = ((x16 * x3) + x19)
x21 = (2.0 * x5)
x22 = ((ax * bx) * x0)
x23 = (((5. * da) * db) * numpy.exp(((- x22) * ((((A[0] - B[0]) ** 2) + ((A[1] - B[1]) ** 2)) + ((A[2] - B[2]) ** 2)))))
x24 = ((x0 ** 1.5) * x23)
x25 = (0. * x24)
x26 = (x0 * ((ax * A[1]) + (bx * B[1])))
x27 = (- x26)
x28 = (x27 + B[1])
x29 = 2.
x30 = (0. * x29)
x31 = (x24 * x30)
x32 = (x31 * ((x16 * x5) + x19))
x33 = (x0 * ((ax * A[2]) + (bx * B[2])))
x34 = (- x33)
x35 = (x34 + B[2])
x36 = (x28 ** 2)
x37 = (0.5 * x0)
x38 = (x36 + x37)
x39 = ((x0 ** 1.5) * x23)
x40 = (x38 * x39)
x41 = (x5 ** 2)
x42 = (x17 + x4)
x43 = ((x0 * (x13 + (2.0 * x41))) + (x21 * x42))
x44 = (x30 * x43)
x45 = 0.
x46 = (x24 * x45)
x47 = (x35 ** 2)
x48 = (x37 + x47)
x49 = (x39 * x48)
x50 = (1.5 * x0)
x51 = (0. * x5)
x52 = (x51 * (x41 + x50))
x53 = (x36 + x50)
x54 = (x28 * x39)
x55 = (x53 * x54)
x56 = (x29 * x52)
x57 = (x47 + x50)
x58 = (x35 * x39)
x59 = (x57 * x58)
x60 = (x27 + A[1])
x61 = (x20 * x31)
x62 = (0. * x16)
x63 = (x28 * x60)
x64 = (x39 * (x37 + x63))
x65 = (x24 * x62)
x66 = (x35 * x60)
x67 = (x0 * ((((- 2.0) * x26) + A[1]) + B[1]))
x68 = (x0 + (2.0 * x63))
x69 = (x28 * x68)
x70 = (x67 + x69)
x71 = (0. * x39)
x72 = (x42 * x71)
x73 = 1.
x74 = (0. * x42)
x75 = (x73 * x74)
x76 = (x49 * x60)
x77 = (x12 + (4.0 * x63))
x78 = (x0 * ((2.0 * x36) + x77))
x79 = (2.0 * x70)
x80 = ((x28 * x79) + x78)
x81 = (x37 + x41)
x82 = (x39 * x81)
x83 = (x30 * x82)
x84 = (0. * x82)
x85 = (0. * x81)
x86 = (x29 * x57)
x87 = (0. * x82)
x88 = (x34 + A[2])
x89 = (x28 * x88)
x90 = (x35 * x88)
x91 = (x37 + x90)
x92 = (x39 * x91)
x93 = (x40 * x88)
x94 = (x0 * ((((- 2.0) * x33) + A[2]) + B[2]))
x95 = (x0 + (2.0 * x90))
x96 = (x35 * x95)
x97 = (x94 + x96)
x98 = (x29 * x53)
x99 = (x12 + (4.0 * x90))
x100 = (x0 * ((2.0 * x47) + x99))
x101 = (2.0 * x97)
x102 = (x100 + (x101 * x35))
x103 = (x60 ** 2)
x104 = (x103 + x37)
x105 = (x104 * x39)
x106 = ((2.0 * x10) + x14)
x107 = (x106 * x30)
x108 = (x60 * x68)
x109 = (x108 + x67)
x110 = (x71 * x9)
x111 = (0. * x9)
x112 = (x105 * x35)
x113 = ((x60 * x79) + x78)
x114 = (x37 + x6)
x115 = (x114 * x71)
x116 = (x109 * x73)
x117 = (0. * x114)
x118 = (0. * x114)
x119 = (x18 * ((x108 + (2.0 * x67)) + x69))
x120 = ((x113 * x28) + x119)
x121 = (x31 * x5)
x122 = (x24 * x5)
x123 = (0. * x113)
x124 = (0. * x5)
x125 = (x111 * x73)
x126 = (x70 * x73)
x127 = (x39 * x88)
x128 = (x73 * x97)
x129 = (x39 * x60)
x130 = (x122 * x45)
x131 = (x88 ** 2)
x132 = (x131 + x37)
x133 = (x132 * x39)
x134 = (x133 * x28)
x135 = (x88 * x95)
x136 = (x135 + x94)
x137 = (x136 * x73)
x138 = (x100 + (x101 * x88))
x139 = (0. * x138)
x140 = (x18 * ((x135 + (2.0 * x94)) + x96))
x141 = ((x138 * x35) + x140)
x142 = ((0. * x103) + (0. * x50))
x143 = (x3 * (x11 + x50))
x144 = (2.0 * x60)
x145 = ((x0 * ((2.0 * x103) + x77)) + (x109 * x144))
x146 = (x145 * x30)
x147 = (x11 + x37)
x148 = (x147 * x39)
x149 = (x142 * x29)
x150 = (x31 * ((x113 * x60) + x119))
x151 = (x24 * x3)
x152 = (x151 * x45)
x153 = ((0. * x143) * x29)
x154 = (0. * x148)
x155 = (0. * x147)
x156 = (0. * x3)
x157 = ((0. * x131) + (0. * x50))
x158 = (x157 * x29)
x159 = (2.0 * x88)
x160 = ((x0 * ((2.0 * x131) + x99)) + (x136 * x159))
x161 = (x160 * x30)
x162 = (x31 * ((x138 * x88) + x140))
result[(0, 0)] = numpy.sum((x25 * ((x0 * (((4.0 * x10) + (5.0 * x14)) + (6.0 * x15))) + (x20 * x21))))
result[(0, 1)] = numpy.sum((x28 * x32))
result[(0, 2)] = numpy.sum((x32 * x35))
result[(0, 3)] = numpy.sum((x40 * x44))
result[(0, 4)] = numpy.sum((((x28 * x35) * x43) * x46))
result[(0, 5)] = numpy.sum((x44 * x49))
result[(0, 6)] = numpy.sum((x52 * x55))
result[(0, 7)] = numpy.sum(((x35 * x40) * x56))
result[(0, 8)] = numpy.sum(((x28 * x49) * x56))
result[(0, 9)] = numpy.sum((x52 * x59))
result[(1, 0)] = numpy.sum((x60 * x61))
result[(1, 1)] = numpy.sum((x62 * x64))
result[(1, 2)] = numpy.sum((x65 * x66))
result[(1, 3)] = numpy.sum((x70 * x72))
result[(1, 4)] = numpy.sum(((x35 * x64) * x75))
result[(1, 5)] = numpy.sum((x74 * x76))
result[(1, 6)] = numpy.sum((x80 * x83))
result[(1, 7)] = numpy.sum(((x35 * x70) * x84))
result[(1, 8)] = numpy.sum(((x48 * x64) * x85))
result[(1, 9)] = numpy.sum(((x66 * x86) * x87))
result[(2, 0)] = numpy.sum((x61 * x88))
result[(2, 1)] = numpy.sum((x65 * x89))
result[(2, 2)] = numpy.sum((x62 * x92))
result[(2, 3)] = numpy.sum((x74 * x93))
result[(2, 4)] = numpy.sum(((x28 * x75) * x92))
result[(2, 5)] = numpy.sum((x72 * x97))
result[(2, 6)] = numpy.sum(((x87 * x89) * x98))
result[(2, 7)] = numpy.sum(((x38 * x85) * x92))
result[(2, 8)] = numpy.sum(((x28 * x84) * x97))
result[(2, 9)] = numpy.sum((x102 * x83))
result[(3, 0)] = numpy.sum((x105 * x107))
result[(3, 1)] = numpy.sum((x109 * x110))
result[(3, 2)] = numpy.sum((x111 * x112))
result[(3, 3)] = numpy.sum((x113 * x115))
result[(3, 4)] = numpy.sum(((x116 * x117) * x58))
result[(3, 5)] = numpy.sum(((x104 * x118) * x49))
result[(3, 6)] = numpy.sum((x120 * x121))
result[(3, 7)] = numpy.sum(((x122 * x123) * x35))
result[(3, 8)] = numpy.sum(((x109 * x124) * x49))
result[(3, 9)] = numpy.sum(((x112 * x51) * x86))
result[(4, 0)] = numpy.sum((((x106 * x46) * x60) * x88))
result[(4, 1)] = numpy.sum(((x125 * x64) * x88))
result[(4, 2)] = numpy.sum(((x125 * x60) * x92))
result[(4, 3)] = numpy.sum(((x117 * x126) * x127))
result[(4, 4)] = numpy.sum(((x114 * x64) * x91))
result[(4, 5)] = numpy.sum(((x117 * x128) * x129))
result[(4, 6)] = numpy.sum(((x130 * x80) * x88))
result[(4, 7)] = numpy.sum(((x124 * x126) * x92))
result[(4, 8)] = numpy.sum(((x124 * x128) * x64))
result[(4, 9)] = numpy.sum(((x102 * x130) * x60))
result[(5, 0)] = numpy.sum((x107 * x133))
result[(5, 1)] = numpy.sum((x111 * x134))
result[(5, 2)] = numpy.sum((x110 * x136))
result[(5, 3)] = numpy.sum(((x118 * x132) * x40))
result[(5, 4)] = numpy.sum(((x117 * x137) * x54))
result[(5, 5)] = numpy.sum((x115 * x138))
result[(5, 6)] = numpy.sum(((x134 * x51) * x98))
result[(5, 7)] = numpy.sum(((x124 * x136) * x40))
result[(5, 8)] = numpy.sum(((x122 * x139) * x28))
result[(5, 9)] = numpy.sum((x121 * x141))
result[(6, 0)] = numpy.sum(((x129 * x142) * x143))
result[(6, 1)] = numpy.sum((x146 * x148))
result[(6, 2)] = numpy.sum(((x148 * x149) * x66))
result[(6, 3)] = numpy.sum((x150 * x3))
result[(6, 4)] = numpy.sum(((x145 * x152) * x35))
result[(6, 5)] = numpy.sum(((x149 * x3) * x76))
result[(6, 6)] = numpy.sum((x25 * ((x0 * ((((4.0 * x28) * x70) + ((6.0 * x60) * x70)) + (5.0 * x78))) + (x120 * x144))))
result[(6, 7)] = numpy.sum((x150 * x35))
result[(6, 8)] = numpy.sum((x146 * x49))
result[(6, 9)] = numpy.sum(((x142 * x59) * x60))
result[(7, 0)] = numpy.sum(((x105 * x153) * x88))
result[(7, 1)] = numpy.sum(((x109 * x154) * x88))
result[(7, 2)] = numpy.sum(((x104 * x155) * x92))
result[(7, 3)] = numpy.sum(((x123 * x151) * x88))
result[(7, 4)] = numpy.sum(((x116 * x156) * x92))
result[(7, 5)] = numpy.sum(((x105 * x156) * x97))
result[(7, 6)] = numpy.sum(((x120 * x31) * x88))
result[(7, 7)] = numpy.sum(((x113 * x71) * x91))
result[(7, 8)] = numpy.sum(((x109 * x71) * x97))
result[(7, 9)] = numpy.sum(((x102 * x105) * x30))
result[(8, 0)] = numpy.sum(((x133 * x153) * x60))
result[(8, 1)] = numpy.sum(((x132 * x155) * x64))
result[(8, 2)] = numpy.sum(((x136 * x154) * x60))
result[(8, 3)] = numpy.sum(((x133 * x156) * x70))
result[(8, 4)] = numpy.sum(((x137 * x156) * x64))
result[(8, 5)] = numpy.sum(((x139 * x151) * x60))
result[(8, 6)] = numpy.sum(((x133 * x30) * x80))
result[(8, 7)] = numpy.sum(((x136 * x70) * x71))
result[(8, 8)] = numpy.sum((x139 * x64))
result[(8, 9)] = numpy.sum(((x141 * x31) * x60))
result[(9, 0)] = numpy.sum(((x127 * x143) * x157))
result[(9, 1)] = numpy.sum(((x148 * x158) * x89))
result[(9, 2)] = numpy.sum((x148 * x161))
result[(9, 3)] = numpy.sum(((x158 * x3) * x93))
result[(9, 4)] = numpy.sum(((x152 * x160) * x28))
result[(9, 5)] = numpy.sum((x162 * x3))
result[(9, 6)] = numpy.sum(((x157 * x55) * x88))
result[(9, 7)] = numpy.sum((x161 * x40))
result[(9, 8)] = numpy.sum((x162 * x28))
result[(9, 9)] = numpy.sum((x25 * ((x0 * (((5.0 * x100) + ((4.0 * x35) * x97)) + ((6.0 * x88) * x97))) + (x141 * x159))))
return result |
def test_upload(monkeypatch, set_api_key):
responses.add(responses.GET, f'{Env.current.web_api_endpoint}/tidy3d/tasks/3eb06d16-208b-487b-864b-e9b1d3e010a7/detail', json={'data': {'taskId': '3eb06d16-208b-487b-864b-e9b1d3e010a7', 'createdAt': '2022-01-01T00:00:00.000Z'}}, status=200)
def mock_download(*args, **kwargs):
pass
monkeypatch.setattr('tidy3d.web.core.task_core.upload_file', mock_download)
task = SimulationTask.get('3eb06d16-208b-487b-864b-e9b1d3e010a7')
with tempfile.NamedTemporaryFile() as temp:
task.upload_file(temp.name, 'temp.json') |
class SignatureDuplicationTandem(Signature):
def __init__(self, contig, start, end, copies, fully_covered, signature, read):
self.contig = contig
assert (end >= start)
self.start = start
self.end = end
self.copies = copies
self.fully_covered = fully_covered
self.signature = signature
self.read = read
self.type = 'DUP_TAN'
def get_destination(self):
(source_contig, source_start, source_end) = self.get_source()
return (source_contig, source_end, (source_end + (self.copies * (source_end - source_start))))
def as_string(self, sep='\t'):
(source_contig, source_start, source_end) = self.get_source()
(dest_contig, dest_start, dest_end) = self.get_destination()
return sep.join(['{0}:{1}-{2}', '{3}:{4}-{5}', '{6}', '{7}']).format(source_contig, source_start, source_end, dest_contig, dest_start, dest_end, '{0};{1};{2}'.format(self.type, self.signature, self.copies), self.read) |
class VarImpl(object):
def __init__(self, local_scope):
self._local_scope = local_scope
def Lookup(self, var_name):
if (var_name in self._local_scope.get('vars', {})):
return self._local_scope['vars'][var_name]
if (var_name == 'host_os'):
return 'linux'
if (var_name == 'host_cpu'):
return 'x64'
raise Exception(('Var is not defined: %s' % var_name)) |
def repeat(interval, callback, persistent=True, idstring='', stop=False, store_key=None, *args, **kwargs):
global _TICKER_HANDLER
if (_TICKER_HANDLER is None):
from evennia.scripts.tickerhandler import TICKER_HANDLER as _TICKER_HANDLER
if stop:
_TICKER_HANDLER.remove(interval=interval, callback=callback, idstring=idstring, persistent=persistent, store_key=store_key)
else:
return _TICKER_HANDLER.add(interval=interval, callback=callback, idstring=idstring, persistent=persistent) |
class PlotFrame(DemoFrame):
def _create_component(self):
numpoints = 50
low = (- 5)
high = 15.0
x = arange(low, high, ((high - low) / numpoints))
container = OverlayPlotContainer(bgcolor='lightgray')
common_index = None
index_range = None
value_range = None
self.animated_plots = []
for (i, color) in enumerate(COLOR_PALETTE):
if (not common_index):
animated_plot = AnimatedPlot(x_values=x, y_values=jn(i, x), color=color)
plot = animated_plot.plot
common_index = plot.index
index_range = plot.index_mapper.range
value_range = plot.value_mapper.range
else:
if ((i % 2) == 1):
orientation = 'v'
else:
orientation = 'h'
animated_plot = AnimatedPlot(x_values=common_index, y_values=jn(i, x), color=color, orientation=orientation)
plot = animated_plot.plot
plot.index_mapper.range = index_range
plot.value_mapper.range = value_range
container.add(plot)
self.animated_plots.append(animated_plot)
for (i, a_plot) in enumerate(self.animated_plots):
a_plot.plot.position = [(50 + ((i % 3) * (PLOT_SIZE + 50))), (50 + ((i // 3) * (PLOT_SIZE + 50)))]
self.timer = Timer(100.0, self.onTimer)
self.container = container
return container
def onTimer(self, *args):
for plot in self.animated_plots:
plot.timer_tick() |
class LZString():
def __init__(self):
self.vm = None
def init(self, html, url):
if self.vm:
return
cryptojs = re.search('src="([^"]+?/crypt_\\w+?\\.js)"', html).group(1)
cryptojs = grabhtml(urljoin(url, cryptojs), referer=url)
self.vm = VM(f'window = self; {cryptojs}')
def decompress_from_base64(self, data):
return self.vm.call('LZString.decompressFromBase64', data) |
(firedrake.DirichletBC)
def coarsen_bc(bc, self, coefficient_mapping=None):
V = self(bc.function_space(), self, coefficient_mapping=coefficient_mapping)
val = self(bc.function_arg, self, coefficient_mapping=coefficient_mapping)
subdomain = bc.sub_domain
return type(bc)(V, val, subdomain) |
class RegistrationSessionRepository(BaseRepository[RegistrationSession], UUIDRepositoryMixin[RegistrationSession], ExpiresAtMixin[RegistrationSession]):
model = RegistrationSession
async def get_by_token(self, token: str, *, fresh: bool=True) -> (RegistrationSession | None):
statement = select(RegistrationSession).where((RegistrationSession.token == token))
if fresh:
statement = statement.where((RegistrationSession.expires_at > datetime.now(UTC)))
return (await self.get_one_or_none(statement)) |
def fetch_exchange(zone_key1: ZoneKey, zone_key2: ZoneKey, session: (Session | None)=None, target_datetime: (datetime | None)=None, logger: Logger=getLogger(__name__)) -> list[dict]:
if target_datetime:
raise NotImplementedError('This parser is not yet able to parse past dates')
sorted_zone_keys = ZoneKey('->'.join(sorted([zone_key1, zone_key2])))
if (sorted_zone_keys not in EXCHANGE_NAME_DIRECTION_MAPPING):
raise ParserException(parser='CAMMESA.py', message='This exchange is not currently implemented', zone_key=sorted_zone_keys)
current_session = (session or Session())
api_cammesa_response = current_session.get(CAMMESA_EXCHANGE_ENDPOINT)
if (not api_cammesa_response.ok):
raise ParserException(parser='CAMMESA.py', message=f'Exception when fetching exchange for {sorted_zone_keys}: error when calling url={CAMMESA_EXCHANGE_ENDPOINT}', zone_key=sorted_zone_keys)
(exchange_name, expected_angle) = EXCHANGE_NAME_DIRECTION_MAPPING[sorted_zone_keys]
exchange_list = api_cammesa_response.json()['features']
exchange_data = next((exchange['properties'] for exchange in exchange_list if (exchange['properties']['nombre'] == exchange_name)), None)
if (exchange_data is None):
raise ParserException(parser='CAMMESA.py', message=f'Exception when fetching exchange for {sorted_zone_keys}: exchange not found', zone_key=sorted_zone_keys)
given_angle = int(exchange_data['url'][6:])
flow = int(exchange_data['text'])
if (expected_angle != given_angle):
flow = (- flow)
exchange_datetime = datetime.fromisoformat(((exchange_data['fecha'][:(- 2)] + ':') + exchange_data['fecha'][(- 2):]))
exchanges = ExchangeList(logger)
exchanges.append(zoneKey=sorted_zone_keys, datetime=exchange_datetime, netFlow=flow, source=SOURCE)
return exchanges.to_list() |
def compare_and_update_pips(pips, new_pips):
assert (pips.keys() == new_pips.keys()), repr((pips.keys(), new_pips.keys()))
for name in pips:
if ((pips[name]['src_wire'] is not None) and (new_pips[name]['src_wire'] is not None)):
assert (pips[name]['src_wire'] == new_pips[name]['src_wire']), repr((pips[name]['src_wire'], new_pips[name]['src_wire']))
elif ((pips[name]['src_wire'] is None) and (new_pips[name]['src_wire'] is not None)):
pips[name]['src_wire'] = new_pips[name]['src_wire']
if ((pips[name]['dst_wire'] is not None) and (new_pips[name]['dst_wire'] is not None)):
assert (pips[name]['dst_wire'] == new_pips[name]['dst_wire']), repr((pips[name]['dst_wire'], new_pips[name]['dst_wire']))
elif ((pips[name]['dst_wire'] is None) and (new_pips[name]['dst_wire'] is not None)):
pips[name]['dst_wire'] = new_pips[name]['dst_wire']
for k in ['is_pseudo', 'is_directional', 'can_invert']:
assert (pips[name][k] == new_pips[name][k]), (k, pips[name][k], new_pips[name][k]) |
class _Uninitialized(object):
def __new__(cls):
if (Uninitialized is not None):
return Uninitialized
else:
self = object.__new__(cls)
return self
def __repr__(self):
return '<uninitialized>'
def __reduce_ex__(self, protocol):
return (_Uninitialized, ()) |
def get_linked_addon_note(anki_nid: int) -> Optional[Tuple[(SiacNote, int)]]:
c = _get_connection()
res = c.execute(f'select distinct notes.*, notes_pdf_page.page from notes join notes_pdf_page on notes.id = notes_pdf_page.siac_nid where notes_pdf_page.nid = {anki_nid}').fetchall()
c.close()
if ((res is not None) and (len(res) == 0)):
return None
page = res[0][(- 1)]
if (page is None):
page = (- 1)
return (_to_notes([res[0][:(- 1)]])[0], page) |
def gen_sites():
db = Database(util.get_db_root(), util.get_part())
grid = db.grid()
for tile_name in sorted(grid.tiles()):
loc = grid.loc_of_tilename(tile_name)
gridinfo = grid.gridinfo_at_loc(loc)
if (gridinfo.tile_type in ['CLBLL_L', 'CLBLL_R', 'CLBLM_L', 'CLBLM_R']):
site_name = sorted(gridinfo.sites.keys())[0]
if (gridinfo.tile_type[(- 1)] == 'L'):
int_tile_loc = ((loc.grid_x + 1), loc.grid_y)
else:
int_tile_loc = ((loc.grid_x - 1), loc.grid_y)
int_tile_name = grid.tilename_at_loc(int_tile_loc)
if (not int_tile_name.startswith('INT_')):
continue
(yield (int_tile_name, site_name)) |
class UiReadout(object):
def __init__(self, hashQueue, monitorQueue):
self.log = logging.getLogger('Main.UI')
self.hashQueue = hashQueue
self.processingHashQueue = monitorQueue
self.stopOnEmpty = False
self.stopped = False
def run(self):
commits = 0
pbar = tqdm
skipped = 0
match = 0
clean = 0
processed = 0
qmax = 0
processed
pbar = tqdm.tqdm()
while scanner.runState.run:
try:
item = self.hashQueue.get(timeout=0.1)
pbar.update()
if (item == 'skipped'):
skipped += 1
elif (item == 'hash_match'):
match += 1
elif (item == 'clean'):
clean += 1
elif (item == 'processed'):
processed += 1
else:
print()
print()
print('WAT?')
print()
print(item)
print()
print()
qmax = max(qmax, self.processingHashQueue.qsize())
pbar.total = ((((qmax + skipped) + match) + clean) + processed)
pbar.set_description(('Hasher: %s remaining, %s skipped, %s match, %s clean, %s processed' % (self.processingHashQueue.qsize(), skipped, match, clean, processed)))
except queue.Empty:
if self.stopOnEmpty:
break
pass
self.log.info('UI Thread Exiting')
self.stopped = True
def startThread(self):
self.log.info('Starting thread')
dbTh = threading.Thread(target=self.run)
dbTh.start()
self.log.info('Thread started')
def gracefulShutdown(self):
self.stopOnEmpty = True
while (not self.stopped):
time.sleep(0.5) |
def laguerre_recurrence_coefficients(a, order):
nn = (int(order) + 1)
ab = np.zeros((nn, 2))
if (a <= (- 1)):
raise ValueError('First input must be >= -1!')
nu_value = (a + 1)
mu_value = gamma((a + 1))
ab[(0, 0)] = nu_value
ab[(0, 1)] = mu_value
if (nn == 1):
return ab
for i in range(0, order):
n = float((i + 1))
na = (((2 * n) + a) + 1)
nb = (n * (n + a))
ab[((i + 1), 0)] = na
ab[((i + 1), 1)] = nb
return ab |
def _guess_extension(content_type):
return {'application/javascript': '.js', 'application/msword': '.doc', 'application/octet-stream': '.bin', 'application/oda': '.oda', 'application/pdf': '.pdf', 'application/pkcs7-mime': '.p7c', 'application/postscript': '.ps', 'application/vnd.apple.mpegurl': '.m3u', 'application/vnd.ms-excel': '.xls', 'application/vnd.ms-powerpoint': '.ppt', 'application/x-bcpio': '.bcpio', 'application/x-cpio': '.cpio', 'application/x-csh': '.csh', 'application/x-dvi': '.dvi', 'application/x-gtar': '.gtar', 'application/x-hdf': '.hdf', 'application/x-latex': '.latex', 'application/x-mif': '.mif', 'application/x-netcdf': '.nc', 'application/x-pkcs12': '.p12', 'application/x-pn-realaudio': '.ram', 'application/x-python-code': '.pyc', 'application/x-sh': '.sh', 'application/x-shar': '.shar', 'application/x-shockwave-flash': '.swf', 'application/x-sv4cpio': '.sv4cpio', 'application/x-sv4crc': '.sv4crc', 'application/x-tar': '.tar', 'application/x-tcl': '.tcl', 'application/x-tex': '.tex', 'application/x-texinfo': '.texinfo', 'application/x-troff': '.tr', 'application/x-troff-man': '.man', 'application/x-troff-me': '.me', 'application/x-troff-ms': '.ms', 'application/x-ustar': '.ustar', 'application/x-wais-source': '.src', 'application/xml': '.xml', 'application/zip': '.zip', 'audio/basic': '.au', 'audio/mpeg': '.mp3', 'audio/x-aiff': '.aif', 'audio/x-pn-realaudio': '.ra', 'audio/x-wav': '.wav', 'image/gif': '.gif', 'image/ief': '.ief', 'image/jpeg': '.jpe', 'image/png': '.png', 'image/svg+xml': '.svg', 'image/tiff': '.tiff', 'image/vnd.microsoft.icon': '.ico', 'image/x-cmu-raster': '.ras', 'image/x-ms-bmp': '.bmp', 'image/x-portable-anymap': '.pnm', 'image/x-portable-bitmap': '.pbm', 'image/x-portable-graymap': '.pgm', 'image/x-portable-pixmap': '.ppm', 'image/x-rgb': '.rgb', 'image/x-xbitmap': '.xbm', 'image/x-xpixmap': '.xpm', 'image/x-xwindowdump': '.xwd', 'message/rfc822': '.eml', 'text/css': '.css', 'text/csv': '.csv', 'text/html': '.html', 'text/plain': '.txt', 'text/richtext': '.rtx', 'text/tab-separated-values': '.tsv', 'text/x-python': '.py', 'text/x-setext': '.etx', 'text/x-sgml': '.sgml', 'text/x-vcard': '.vcf', 'text/xml': '.xml', 'video/mp4': '.mp4', 'video/mpeg': '.mpeg', 'video/quicktime': '.mov', 'video/webm': '.webm', 'video/x-msvideo': '.avi', 'video/x-sgi-movie': '.movie'}.get(content_type, '') |
def extractInfinityTranslations(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if ('ETC' in item['tags']):
return buildReleaseMessageWithType(item, 'Emperor of The Cosmos', vol, chp, frag=frag, postfix=postfix)
if ('SAS' in item['tags']):
return buildReleaseMessageWithType(item, 'Strongest Abandoned Son', vol, chp, frag=frag, postfix=postfix)
if ('Disb' in item['tags']):
return buildReleaseMessageWithType(item, 'Death is The Beginning', vol, chp, frag=frag, postfix=postfix)
return False |
.django_db
def test_award_update_contract_txn_with_list():
awards = [baker.make('search.AwardSearch', award_id=i, total_obligation=0, generated_unique_award_id=f'AWARD_{i}') for i in range(5)]
baker.make('search.TransactionSearch', transaction_id=10, award=awards[0], is_fpds=True, base_and_all_options_value=1000, base_exercised_options_val=100, generated_unique_award_id=awards[0].generated_unique_award_id)
count = update_procurement_awards((awards[0].award_id,))
awards[0].refresh_from_db()
assert (count == 1)
assert (awards[0].base_and_all_options_value == 1000)
baker.make('search.TransactionSearch', transaction_id=11, is_fpds=True, award=awards[1], base_and_all_options_value=4000, base_exercised_options_val=400, generated_unique_award_id=awards[1].generated_unique_award_id)
baker.make('search.TransactionSearch', transaction_id=12, is_fpds=True, award=awards[2], base_and_all_options_value=5000, base_exercised_options_val=500, generated_unique_award_id=awards[2].generated_unique_award_id)
count = update_procurement_awards((awards[1].award_id, awards[2].award_id))
awards[1].refresh_from_db()
awards[2].refresh_from_db()
assert (count == 2)
assert (awards[1].base_and_all_options_value == 4000)
assert (awards[1].base_exercised_options_val == 400)
assert (awards[2].base_and_all_options_value == 5000)
assert (awards[2].base_exercised_options_val == 500) |
class TestGetLayoutGraphicWithSimilarCoordinates():
def test_should_return_the_best_matching_graphic(self):
page_graphics = [LayoutGraphic(coordinates=LayoutPageCoordinates(x=10, y=10, width=200, height=100)), LayoutGraphic(coordinates=LayoutPageCoordinates(x=10, y=10, width=100, height=100)), LayoutGraphic(coordinates=LayoutPageCoordinates(x=100, y=10, width=100, height=100))]
result = get_layout_graphic_with_similar_coordinates(page_graphics, BoundingBox(x=10, y=10, width=90, height=100))
assert (result == page_graphics[1])
def test_should_ignore_matches_below_threshold(self):
page_graphics = [LayoutGraphic(coordinates=LayoutPageCoordinates(x=10, y=10, width=100, height=100))]
result = get_layout_graphic_with_similar_coordinates(page_graphics, BoundingBox(x=10, y=10, width=10, height=1000))
assert (result is None)
def test_should_ignore_graphics_without_coordinates(self):
page_graphics = [LayoutGraphic(coordinates=None)]
result = get_layout_graphic_with_similar_coordinates(page_graphics, BoundingBox(x=10, y=10, width=10, height=1000))
assert (result is None)
def test_should_ignore_svg_graphics(self):
page_graphics = [LayoutGraphic(coordinates=LayoutPageCoordinates.from_bounding_box(BOUNDING_BOX_1), graphic_type='svg')]
result = get_layout_graphic_with_similar_coordinates(page_graphics, BOUNDING_BOX_1, ignored_graphic_types={'svg'})
assert (result is None) |
class String(Value):
unescapable = re.compile('^[^\\\\():<>"*{} \\t\\r\\n]+$')
escapes = {'\t': '\\t', '\r': '\\r', '"': '\\"'}
def _render(self):
if (self.unescapable.match(self.value) is not None):
return str(self.value)
regex = '[{}]'.format(''.join((re.escape(s) for s in sorted(self.escapes))))
return '"{}"'.format(re.sub(regex, (lambda r: self.escapes[r.group()]), self.value)) |
class ExamplePlotApp(HasTraits):
plot = Instance(Plot)
def _plot_default(self):
index = numpy.arange(1.0, 10.0, 0.01)
series1 = ((100.0 + index) / ((100.0 - (20 * (index ** 2))) + (5.0 * (index ** 4))))
series2 = ((100.0 + index) / ((100.0 - (20 * (index ** 2))) + (5.0 * (index ** 3))))
plot_data = ArrayPlotData(index=index)
plot_data.set_data('series1', series1)
plot_data.set_data('series2', series2)
plot = ToolbarPlot(plot_data)
line_plot = plot.plot(('index', 'series1'), color='auto')[0]
line_plot.tools.append(PanTool(line_plot))
line_plot.tools.append(ZoomTool(line_plot))
line_plot.index_mapper.domain_limits = (3.3, 6.6)
return plot
traits_view = View(Item('plot', editor=ComponentEditor(), width=600, height=600, show_label=False), resizable=True) |
_deserializable
class CacheInitConfig(BaseConfig):
def __init__(self, similarity_threshold: Optional[float]=0.8, auto_flush: Optional[int]=20):
if ((similarity_threshold < 0) or (similarity_threshold > 1)):
raise ValueError(f'similarity_threshold {similarity_threshold} should be between 0 and 1')
self.similarity_threshold = similarity_threshold
self.auto_flush = auto_flush
def from_config(config: Optional[Dict[(str, Any)]]):
if (config is None):
return CacheInitConfig()
else:
return CacheInitConfig(similarity_threshold=config.get('similarity_threshold', 0.8), auto_flush=config.get('auto_flush', 20)) |
def _start():
global patch, name, path, monitor
global delay, prefix, input_name, input_variable
delay = patch.getfloat('general', 'delay')
prefix = patch.getstring('output', 'prefix')
(input_name, input_variable) = list(zip(*patch.config.items('input')))
for (name, variable) in zip(input_name, input_variable):
monitor.info(('%s = %s' % (name, variable)))
if len(locals()):
print(('LOCALS: ' + ', '.join(locals().keys()))) |
class MyETW(etw.ETW):
def __init__(self, event_callback):
providers = [etw.ProviderInfo('Some Provider', etw.GUID('{-1111-1111-1111-}'))]
super().__init__(providers=providers, event_callback=event_callback)
def start(self):
self.do_capture_setup()
super().start()
def stop(self):
super().stop()
self.do_capture_teardown()
def do_capture_setup(self):
pass
def do_capture_teardown(self):
pass |
class BuildKubernetesSchema(StrictSchema):
class_name = ma.fields.String(required=True)
_schema
def check_valid_class(self, data):
ALLOWED_CLASS = ['airflow.contrib.kubernetes.volume.Volume', 'airflow.contrib.kubernetes.volume_mount.VolumeMount', 'airflow.contrib.kubernetes.secret.Secret', 'airflow.contrib.kubernetes.pod.Resources']
if (data.get('class_name') not in ALLOWED_CLASS):
raise ma.ValidationError('`class_name` must be one of `{}`'.format('`, `'.join(ALLOWED_CLASS)), ['class_name']) |
def test_that_index_file_is_read(tmpdir):
with tmpdir.as_cwd():
with open('config.ert', 'w', encoding='utf-8') as fh:
fh.writelines(dedent('\n JOBNAME my_name%d\n NUM_REALIZATIONS 10\n OBS_CONFIG observations\n GEN_DATA RES RESULT_FILE:out\n '))
with open('obs_idx.txt', 'w', encoding='utf-8') as fh:
fh.write('0\n2\n4\n6\n8')
with open('obs_data.txt', 'w', encoding='utf-8') as fh:
for i in range(5):
fh.write(f'''{float(i)} 0.1
''')
with open('observations', 'w', encoding='utf-8') as fo:
fo.writelines(dedent('\n GENERAL_OBSERVATION OBS {\n DATA = RES;\n INDEX_FILE = obs_idx.txt;\n OBS_FILE = obs_data.txt;\n };'))
observations = ErtConfig.from_file('config.ert').enkf_obs
assert (observations['OBS'].observations[0].indices.tolist() == [0, 2, 4, 6, 8]) |
_lock(timeout=300, key_args=(2,), wait_for_release=True, base_name='vm_status_changed')
def _vm_status_check(task_id, node_uuid, uuid, state, state_cache=None, vm=None, change_time=None, force_change=False, **kwargs):
if (state_cache is None):
try:
vm = Vm.objects.select_related('slavevm').get(uuid=uuid)
except Vm.DoesNotExist:
logger.warn('Got status of undefined vm (%s) - ignoring', uuid)
return
else:
state_cache = vm.status
cache.set(Vm.status_key(uuid), vm.status)
else:
state_cache = int(state_cache)
if (state_cache == state):
logger.info('Ignoring new status %s of vm %s because it is already saved', state, uuid)
return
deploy = False
deploy_finish = False
deploy_over = False
deploy_dummy = False
if force_change:
logger.warn('Detected FORCED status change for vm %s', uuid)
elif (state_cache == Vm.CREATING):
if (state == Vm.RUNNING):
logger.warn('Detected new status %s for vm %s. We were waiting for this. Switching state to (A) "running (2)" or (B) "deploying_start (12)" or (C) "deploying_dummy (14)" and running vm_deploy(force_stop=True).', state, uuid)
deploy = True
else:
logger.warn('Detected new status %s for vm %s, but vm waiting for deploy (%s). Awaiting running state.', state, uuid, state_cache)
return
elif (state_cache == Vm.DEPLOYING_DUMMY):
if (state == Vm.STOPPED):
logger.warn('Detected new status %s for vm %s. We were waiting for this. Dummy deploy is finished. Switching state to "stopped".', state, uuid)
deploy_over = True
else:
logger.warn('Detected new status %s for vm %s, but vm is dummy deploying (%s). Awaiting stopped state.', state, uuid, state_cache)
return
elif (state_cache == Vm.DEPLOYING_START):
if (state == Vm.STOPPED):
logger.warn('Detected new status %s for vm %s. We were waiting for this. Switching state to "deploying_finish (13)" and running vm_deploy task.', state, uuid)
deploy_finish = True
else:
logger.warn('Detected new status %s for vm %s, but vm is deploying (%s). Awaiting stopped state.', state, uuid, state_cache)
return
elif (state_cache == Vm.DEPLOYING_FINISH):
if (state == Vm.RUNNING):
logger.warn('Detected new status %s for vm %s. We were waiting for this. Deploy is finished. Switching state to "running".', state, uuid)
deploy_over = True
else:
logger.warn('Detected new status %s for vm %s, but vm waiting for finishing deploy (%s). Awaiting running state.', state, uuid, state_cache)
return
elif (state_cache not in Vm.STATUS_KNOWN):
logger.debug('Detected unknown cached status %s for vm %s', state_cache, uuid)
return
logger.warn('Detected status change %s->%s for vm %s', state_cache, state, uuid)
try:
if (not vm):
vm = Vm.objects.select_related('node', 'slavevm').get(uuid=uuid)
except Vm.DoesNotExist:
logger.error('Status of undefined vm (%s) changed', uuid)
return
if (vm.node.uuid != node_uuid):
logger.error('Detected status change for vm %s on node %s, but the vm should be on %s!', uuid, vm.node.uuid, node_uuid)
return
if deploy:
if vm.is_deploy_needed():
vm.status = Vm.DEPLOYING_START
vm.save_status(status_change_time=change_time)
return
elif vm.is_blank():
vm.status = Vm.DEPLOYING_DUMMY
vm.save_status(status_change_time=change_time)
deploy_dummy = True
else:
deploy_over = True
if deploy_finish:
vm.status = Vm.DEPLOYING_FINISH
vm.save_status(status_change_time=change_time)
if (deploy_finish or deploy_dummy):
(_tid, _err) = vm_deploy(vm, force_stop=deploy_dummy)
if _err:
logger.error('Got error when creating deploy task. Task: %s. Error: %s.', _tid, _err)
else:
logger.warn('Created deploy task: %s.', _tid)
return
if vm.is_changing_status():
logger.warn('Detected running vm_status task (pending state) for vm %s', uuid)
_save_vm_status(task_id, vm, state, old_state=state_cache, deploy_over=deploy_over, change_time=change_time, **kwargs)
return state |
class TestFilteredTraitObserverEqualHash(unittest.TestCase):
def test_not_equal_filter(self):
observer1 = FilteredTraitObserver(notify=True, filter=DummyFilter(return_value=True))
observer2 = FilteredTraitObserver(notify=True, filter=DummyFilter(return_value=False))
self.assertNotEqual(observer1, observer2)
def test_not_equal_notify(self):
filter_func = mock.Mock()
observer1 = FilteredTraitObserver(notify=False, filter=filter_func)
observer2 = FilteredTraitObserver(notify=True, filter=filter_func)
self.assertNotEqual(observer1, observer2)
def test_equal_filter_notify(self):
observer1 = FilteredTraitObserver(notify=True, filter=DummyFilter(return_value=True))
observer2 = FilteredTraitObserver(notify=True, filter=DummyFilter(return_value=True))
self.assertEqual(observer1, observer2)
self.assertEqual(hash(observer1), hash(observer2))
def test_not_equal_type(self):
filter_func = mock.Mock()
observer1 = FilteredTraitObserver(notify=True, filter=filter_func)
imposter = mock.Mock()
imposter.notify = True
imposter.filter = filter_func
self.assertNotEqual(observer1, imposter)
def test_slots(self):
observer = FilteredTraitObserver(notify=True, filter=DummyFilter(return_value=True))
with self.assertRaises(AttributeError):
observer.__dict__
with self.assertRaises(AttributeError):
observer.__weakref__
def test_eval_repr_roundtrip(self):
observer = FilteredTraitObserver(notify=True, filter=DummyFilter(return_value=True))
self.assertEqual(eval(repr(observer)), observer) |
def test_protobuf_envelope_serializer():
serializer = ProtobufEnvelopeSerializer()
envelope_context = EnvelopeContext(connection_id=None, uri=URI('/uri'))
expected_envelope = Envelope(to='to', sender='sender', protocol_specification_id=PublicId('author', 'name', '0.1.0'), message=b'message', context=envelope_context)
encoded_envelope = serializer.encode(expected_envelope)
actual_envelope = serializer.decode(encoded_envelope)
assert (actual_envelope == expected_envelope) |
def GetMatrix(pos, quat) -> np.ndarray:
q = quat.copy()
n = np.dot(q, q)
if (n < np.finfo(q.dtype).eps):
return np.identity(4)
q = (q * np.sqrt((2.0 / n)))
q = np.outer(q, q)
matrix = np.array([[((1.0 - q[(1, 1)]) - q[(2, 2)]), (- (q[(2, 3)] - q[(1, 0)])), (q[(1, 3)] + q[(2, 0)]), pos[0]], [(q[(2, 3)] + q[(1, 0)]), (- ((1.0 - q[(1, 1)]) - q[(3, 3)])), (q[(1, 2)] - q[(3, 0)]), pos[1]], [(- (q[(1, 3)] - q[(2, 0)])), (q[(1, 2)] + q[(3, 0)]), (- ((1.0 - q[(2, 2)]) - q[(3, 3)])), pos[2]], [0.0, 0.0, 0.0, 1.0]], dtype=float)
return matrix |
def test_ref_lp_from_decorator_with_named_outputs():
_launch_plan(project='project', domain='domain', name='name', version='version')
def ref_lp1(p1: str, p2: str) -> typing.NamedTuple('RefLPOutput', o1=int, o2=str):
...
assert (ref_lp1.python_interface.outputs == {'o1': int, 'o2': str}) |
_required
_required
_required(UserAdminPermission)
_required
_POST
def dc_user_modal_form(request):
qs = request.GET.copy()
if (request.POST['action'] == 'update'):
user = get_edited_user(request, request.POST['adm-username'])
else:
user = None
form = AdminUserModalForm(request, user, request.POST, prefix='adm')
if form.is_valid():
args = (form.cleaned_data['username'],)
status = form.save(args=args)
if (status == 204):
return HttpResponse(None, status=status)
elif (status in (200, 201)):
if (form.action == 'create'):
messages.success(request, _('User was successfully created'))
return redirect('dc_user_profile', username=form.cleaned_data['username'])
else:
messages.success(request, _('User profile was successfully updated'))
redirect_to = 'dc_user_list'
if user:
user = user.__class__.objects.get(pk=user.pk)
if ((request.user == user) and (not user.is_admin(dc=request.dc))):
redirect_to = '/'
return redirect(redirect_to, query_string=qs)
return render(request, 'gui/dc/user_dc_form.html', {'form': form}) |
class MinHashLSHEnsemble(object):
def __init__(self, threshold: float=0.9, num_perm: int=128, num_part: int=16, m: int=8, weights: Tuple[(float, float)]=(0.5, 0.5), storage_config: Optional[Dict]=None, prepickle: Optional[bool]=None) -> None:
if ((threshold > 1.0) or (threshold < 0.0)):
raise ValueError('threshold must be in [0.0, 1.0]')
if (num_perm < 2):
raise ValueError('Too few permutation functions')
if (num_part < 1):
raise ValueError('num_part must be at least 1')
if ((m < 2) or (m > num_perm)):
raise ValueError('m must be in the range of [2, num_perm]')
if any((((w < 0.0) or (w > 1.0)) for w in weights)):
raise ValueError('Weight must be in [0.0, 1.0]')
if (sum(weights) != 1.0):
raise ValueError('Weights must sum to 1.0')
self.threshold = threshold
self.h = num_perm
self.m = m
rs = self._init_optimal_params(weights)
storage_config = ({'type': 'dict'} if (not storage_config) else storage_config)
basename = storage_config.get('basename', _random_name(11))
self.indexes = [dict(((r, MinHashLSH(num_perm=self.h, params=(int((self.h / r)), r), storage_config=self._get_storage_config(basename, storage_config, partition, r), prepickle=prepickle)) for r in rs)) for partition in range(0, num_part)]
self.lowers = [None for _ in self.indexes]
self.uppers = [None for _ in self.indexes]
def _init_optimal_params(self, weights):
(false_positive_weight, false_negative_weight) = weights
self.xqs = np.exp(np.linspace((- 5), 5, 10))
self.params = np.array([_optimal_param(self.threshold, self.h, self.m, xq, false_positive_weight, false_negative_weight) for xq in self.xqs], dtype=int)
rs = set()
for (_, r) in self.params:
rs.add(r)
return rs
def _get_optimal_param(self, x, q):
i = np.searchsorted(self.xqs, (float(x) / float(q)), side='left')
if (i == len(self.params)):
i = (i - 1)
return self.params[i]
def _get_storage_config(self, basename, base_config, partition, r):
config = dict(base_config)
config['basename'] = b'-'.join([basename, struct.pack('>H', partition), struct.pack('>H', r)])
return config
def index(self, entries: Iterable[Tuple[(Hashable, MinHash, int)]]) -> None:
if (not self.is_empty()):
raise ValueError('Cannot call index again on a non-empty index')
if (not isinstance(entries, list)):
queue = deque([])
for (key, minhash, size) in entries:
if (size <= 0):
raise ValueError('Set size must be positive')
queue.append((key, minhash, size))
entries = list(queue)
if (len(entries) == 0):
raise ValueError('entries is empty')
(sizes, counts) = np.array(sorted(Counter((e[2] for e in entries)).most_common())).T
partitions = optimal_partitions(sizes, counts, len(self.indexes))
for (i, (lower, upper)) in enumerate(partitions):
(self.lowers[i], self.uppers[i]) = (lower, upper)
entries.sort(key=(lambda e: e[2]))
curr_part = 0
for (key, minhash, size) in entries:
if (size > self.uppers[curr_part]):
curr_part += 1
for r in self.indexes[curr_part]:
self.indexes[curr_part][r].insert(key, minhash)
def query(self, minhash: MinHash, size: int) -> Generator[(Hashable, None, None)]:
for (i, index) in enumerate(self.indexes):
u = self.uppers[i]
if (u is None):
continue
(b, r) = self._get_optimal_param(u, size)
for key in index[r]._query_b(minhash, b):
(yield key)
def __contains__(self, key: Hashable) -> bool:
return any((any(((key in index[r]) for r in index)) for index in self.indexes))
def is_empty(self) -> bool:
return all((all((index[r].is_empty() for r in index)) for index in self.indexes)) |
def evaluate(base, string):
colors = []
try:
color = string.strip()
second = None
method = None
(first, method, more) = parse_color(base, color)
if (first and (more is not None)):
if (more is False):
first = None
else:
(second, method, more) = parse_color(base, color, start=first.end, second=True)
if ((not second) or (more is False)):
first = None
second = None
if first:
first = first.color
if second:
second = second.color
elif first:
first = first.color
delta = 'Delta E 2000: 0'
if (method is None):
method = '2000'
if first:
colors.append(first)
if second:
colors.append(second)
if (method == 'euclidean'):
delta = 'Distance: {}'.format(colors[0].distance(colors[1]))
else:
delta = 'Delta E {}: {}'.format(method, colors[0].delta_e(colors[1], method=method))
except Exception:
delta = 'Delta E 2000: 0'
colors = []
return (colors, delta) |
def async_mock_offchain_lookup_request_response(monkeypatch: 'MonkeyPatch', Literal[('GET', 'POST')]='GET', mocked_request_url: str=None, mocked_status_code: int=200, mocked_json_data: str='0x', json_data_field: str='data', sender: str=None, calldata: str=None) -> None:
class AsyncMockedResponse():
status = mocked_status_code
def __await__(self) -> Generator[(Any, Any, Any)]:
(yield)
return self
async def json() -> Dict[(str, str)]:
return {json_data_field: mocked_json_data}
def raise_for_status() -> None:
raise Exception('called raise_for_status()')
async def _mock_specific_request(*args: Any, **kwargs: Any) -> Union[('ClientResponse', AsyncMockedResponse)]:
url_from_args = args[1]
if (url_from_args == mocked_request_url):
assert (kwargs['timeout'] == ClientTimeout(10))
if ( == 'post'):
assert (kwargs['data'] == {'data': calldata, 'sender': sender})
return AsyncMockedResponse()
session = (await async_cache_and_return_session(url_from_args))
return (await session.request(method= url=url_from_args, **kwargs))
monkeypatch.setattr(f'aio _mock_specific_request) |
def expand_braces(patterns: AnyStr, flags: int, limit: int) -> Iterable[AnyStr]:
if (flags & BRACE):
for p in ([patterns] if isinstance(patterns, (str, bytes)) else patterns):
try:
(yield from bracex.iexpand(p, keep_escapes=True, limit=limit))
except bracex.ExpansionLimitException:
raise
except Exception:
(yield p)
else:
for p in ([patterns] if isinstance(patterns, (str, bytes)) else patterns):
(yield p) |
class TestHtml5NthOfSelectors(util.PluginTestCase):
def setup_fs(self):
template = self.dedent('\n <!DOCTYPE html>\n <html>\n <head>\n <meta content="text/html; charset=UTF-8">\n </head>\n <body>\n <p>aaaa</p>\n <p>bbbb</p>\n <span class="test">cccc</span>\n <span>dddd</span>\n <span class="test">eeee</span>\n <span>ffff</span>\n <span class="test">gggg</span>\n <p>hhhh</p>\n <p class="test">iiii</p>\n <p>jjjj</p>\n <p class="test">kkkk</p>\n <span>llll</span>\n </body>\n </html>\n ')
self.mktemp('test.txt', template, 'utf-8')
def test_css_child_of_s(self):
config = self.dedent("\n matrix:\n - name: html_css\n sources:\n - '{}/**/*.txt'\n aspell:\n lang: en\n d: en_US\n hunspell:\n d: en_US\n pipeline:\n - pyspelling.filters.html:\n mode: html5\n ignores:\n - ':nth-child(2n + 1 of :is(p, span).test)'\n ").format(self.tempdir)
self.mktemp('.html5.yml', config, 'utf-8')
self.assert_spellcheck('.html5.yml', ['aaaa', 'bbbb', 'dddd', 'eeee', 'ffff', 'hhhh', 'iiii', 'jjjj', 'llll'])
def test_css_child_of_s_vs_schild(self):
config = self.dedent("\n matrix:\n - name: html_css\n sources:\n - '{}/**/*.txt'\n aspell:\n lang: en\n d: en_US\n hunspell:\n d: en_US\n pipeline:\n - pyspelling.filters.html:\n mode: html5\n ignores:\n - ':nth-child(-n+3 of p)'\n ").format(self.tempdir)
self.mktemp('.html5.yml', config, 'utf-8')
self.assert_spellcheck('.html5.yml', ['cccc', 'dddd', 'eeee', 'ffff', 'gggg', 'iiii', 'jjjj', 'kkkk', 'llll'])
config = self.dedent("\n matrix:\n - name: html_css\n sources:\n - '{}/**/*.txt'\n aspell:\n lang: en\n d: en_US\n hunspell:\n d: en_US\n pipeline:\n - pyspelling.filters.html:\n mode: html5\n ignores:\n - 'p:nth-child(-n+3)'\n ").format(self.tempdir)
self.mktemp('.html5.yml', config, 'utf-8')
self.assert_spellcheck('.html5.yml', ['cccc', 'dddd', 'eeee', 'ffff', 'gggg', 'hhhh', 'iiii', 'jjjj', 'kkkk', 'llll']) |
def test_contract_init_with_w3_function_name(w3, function_name_tester_contract_abi, function_name_tester_contract):
contract_factory = w3.eth.contract(abi=function_name_tester_contract_abi)
contract = contract_factory(function_name_tester_contract.address)
result = contract.functions.w3().call()
assert (result is True) |
def __create_marker_context_menu():
items = []
def on_jumpto_item_activate(widget, name, parent, context):
position = context['current-marker'].props.position
player.PLAYER.set_progress(position)
def on_remove_item_activate(widget, name, parent, context):
providers.unregister('playback-markers', context['current-marker'])
items.append(menu.simple_menu_item('jumpto-marker', [], _('_Jump to'), 'go-jump', on_jumpto_item_activate))
items.append(MoveMarkerMenuItem('move-marker', [items[(- 1)].name]))
items.append(menu.simple_menu_item('remove-marker', [items[(- 1)].name], _('_Remove Marker'), 'list-remove', on_remove_item_activate))
for item in items:
providers.register('playback-marker-context-menu', item) |
class LanguageErrorMessage():
LANGUAGE_REQUIRED = (lambda input_lang: f"This provider doesn't auto-detect languages, please provide a valid {input_lang}")
LANGUAGE_NOT_SUPPORTED = (lambda lang, input_lang: f'Provider does not support selected {input_lang}: `{lang}`')
LANGUAGE_GENERIQUE_REQUESTED = (lambda lang, suggested_lang, input_lang: f"Provider does not support selected {input_lang}:'{lang}'. Please try a more general language: '{suggested_lang}'")
LANGUAGE_SYNTAX_ERROR = (lambda lang: f"Invalid language format for: '{lang}'.") |
class ReceiverStreamsCombiner():
def __init__(self, output_batch_queue: OutputBatchQueue):
self.output_batch_queue = output_batch_queue
self.tasks: Dict[(BaseReceiver, Awaitable[Tuple[(BaseReceiver, dm.ResponseBatch)]])] = {}
self.running = True
self.receivers_to_delete: List[BaseReceiver] = []
def add_listener(self, receiver: BaseReceiver) -> None:
logger.debug('Add listener')
self.tasks[receiver] = asyncio.create_task(self.__converter_function(receiver, receiver.receive()))
async def __converter_function(receiver: BaseReceiver, future: Awaitable[dm.ResponseBatch]) -> Tuple[(BaseReceiver, dm.ResponseBatch)]:
batch = (await future)
return (receiver, batch)
async def remove_listener(self, receiver: BaseReceiver):
logger.debug('Try to remove listener')
self.receivers_to_delete.append(receiver)
logger.debug('Listener is removed')
def stop(self):
logger.warning('Receiver stream combiner will be stoped')
self.running = False
async def converter(self):
while self.running:
(await asyncio.sleep(0.1))
tasks_to_process = self.tasks.values()
if (not tasks_to_process):
continue
(done, _) = (await asyncio.wait(tasks_to_process, return_when=asyncio.FIRST_COMPLETED))
for task in done:
result = task.result()
(receiver, batch) = result
if (batch is not None):
(await self.output_batch_queue.put(batch))
model_instance = receiver.get_model_instance()
model_instance.lock = False
model_instance.current_processing_batch = None
if (receiver in self.receivers_to_delete):
logger.debug('Remove listener')
self.tasks.pop(receiver)
receiver.close()
if (receiver in self.tasks):
self.tasks[receiver] = asyncio.create_task(self.__converter_function(receiver, receiver.receive())) |
.parametrize('num_modes, log_level', [(101, 'WARNING'), (100, None)])
def test_monitor_num_modes(log_capture, num_modes, log_level):
monitor = td.ModeMonitor(size=(td.inf, 0, td.inf), freqs=np.linspace(.0, .0, 100), name='test', mode_spec=td.ModeSpec(num_modes=num_modes))
assert_log_level(log_capture, log_level) |
_required
_passes_test(is_organizer, 'index')
def change_activity_status(request, event_slug, activity_id, status, justification=None):
event = get_object_or_404(Event, event_slug=event_slug)
activity = get_object_or_404(Activity, id=activity_id)
activity.status = status
activity.start_date = None
activity.end_date = None
activity.room = None
activity.justification = justification
activity.save()
try:
utils_email.send_activity_email(event, activity, justification)
except SMTPException as error:
logger.error(error)
messages.error(request, _("The email couldn't sent successfully, please retry later or contact a organizer"))
safe_continue = reverse('activity_detail', args=[event_slug, activity.pk])
return goto_next_or_continue(request.GET.get('next'), safe_continue) |
class AmazonTranslationApi(TranslationInterface):
def translation__language_detection(self, text) -> ResponseType[LanguageDetectionDataClass]:
response = self.clients['text'].detect_dominant_language(Text=text)
items: Sequence[InfosLanguageDetectionDataClass] = []
for lang in response['Languages']:
items.append(InfosLanguageDetectionDataClass(language=lang['LanguageCode'], display_name=get_language_name_from_code(isocode=lang['LanguageCode']), confidence=lang['Score']))
return ResponseType[LanguageDetectionDataClass](original_response=response, standardized_response=LanguageDetectionDataClass(items=items))
def translation__automatic_translation(self, source_language: str, target_language: str, text: str) -> ResponseType[AutomaticTranslationDataClass]:
payload = {'Text': text, 'SourceLanguageCode': source_language, 'TargetLanguageCode': target_language}
response = handle_amazon_call(self.clients['translate'].translate_text, **payload)
standardized: AutomaticTranslationDataClass = AutomaticTranslationDataClass(text=response['TranslatedText'])
return ResponseType[AutomaticTranslationDataClass](original_response=response, standardized_response=standardized) |
def test_link_in_headers(response_with_header_link):
config = LinkPaginationConfiguration(source='headers', rel='next')
request_params: SaaSRequestParams = SaaSRequestParams(method=HTTPMethod.GET, path='/customers', query_params={'page': 'abc'})
paginator = LinkPaginationStrategy(config)
next_request: Optional[SaaSRequestParams] = paginator.get_next_request(request_params, {}, response_with_header_link, 'customers')
assert (next_request == SaaSRequestParams(method=HTTPMethod.GET, path='/customers', query_params={'page': 'def'})) |
def test_reference():
definitions = typesystem.Definitions()
album = typesystem.Schema(fields={'title': typesystem.String(max_length=100), 'release_date': typesystem.Date(), 'artist': typesystem.Reference('Artist', definitions=definitions)})
artist = typesystem.Schema(fields={'name': typesystem.String(max_length=100)})
definitions['Artist'] = artist
value = album.validate({'title': 'Double Negative', 'release_date': '2018-09-14', 'artist': {'name': 'Low'}})
assert (value == {'title': 'Double Negative', 'release_date': datetime.date(2018, 9, 14), 'artist': {'name': 'Low'}}) |
class TicketingManager():
def calculate_update_amount(order):
discount = None
if order.discount_code_id:
discount = order.discount_code
amount = 0
total_discount = 0
fees = TicketFees.query.filter_by(currency=order.event.payment_currency).first()
for order_ticket in order.order_tickets:
with db.session.no_autoflush:
if (order_ticket.ticket.is_fee_absorbed or (not fees)):
ticket_amount = (order_ticket.ticket.price * order_ticket.quantity)
amount += (order_ticket.ticket.price * order_ticket.quantity)
else:
order_fee = ((fees.service_fee * (order_ticket.ticket.price * order_ticket.quantity)) / 100)
if (order_fee > fees.maximum_fee):
ticket_amount = ((order_ticket.ticket.price * order_ticket.quantity) + fees.maximum_fee)
amount += ((order_ticket.ticket.price * order_ticket.quantity) + fees.maximum_fee)
else:
ticket_amount = ((order_ticket.ticket.price * order_ticket.quantity) + order_fee)
amount += ((order_ticket.ticket.price * order_ticket.quantity) + order_fee)
if (discount and (str(order_ticket.ticket.id) in discount.tickets.split(','))):
if (discount.type == 'amount'):
total_discount += (discount.value * order_ticket.quantity)
else:
total_discount += ((discount.value * ticket_amount) / 100)
if discount:
if (discount.type == 'amount'):
order.amount = max((amount - total_discount), 0)
elif (discount.type == 'percent'):
order.amount = (amount - ((discount.value * amount) / 100.0))
else:
order.amount = amount
save_to_db(order)
return order
def create_payment_intent_for_order_stripe(order):
try:
payment_intent = StripePaymentsManager.get_payment_intent_stripe(order)
order.stripe_payment_intent_id = payment_intent['id']
db.session.commit()
return (True, payment_intent)
except ConflictError as e:
order.status = 'expired'
save_to_db(order)
delete_related_attendees_for_order(order)
return (False, e)
def charge_paypal_order_payment(order, paypal_payer_id, paypal_payment_id):
order.paypal_token = paypal_payment_id
save_to_db(order)
(status, error) = PayPalPaymentsManager.execute_payment(paypal_payer_id, paypal_payment_id)
if status:
order.paid_via = 'paypal'
order.status = 'completed'
order.transaction_id = paypal_payment_id
order.completed_at = datetime.utcnow()
save_to_db(order)
on_order_completed(order)
return (True, 'Charge successful')
order.status = 'expired'
save_to_db(order)
delete_related_attendees_for_order(order)
return (False, error) |
def _parse_expand_colnames(adapter, fieldlist):
(rv, tables) = ([], {})
for field in fieldlist:
if (not isinstance(field, Field)):
rv.append(None)
continue
table = field.table
(tablename, fieldname) = (table._tablename, field.name)
ft = field.type
fit = field._itype
rv.append((tablename, fieldname, table, field, ft, fit))
tables[tablename] = table
return (rv, tables) |
class TestModelingModelEMAHook(unittest.TestCase):
def test_ema_hook(self):
runner = default_runner.Detectron2GoRunner()
cfg = runner.get_default_cfg()
cfg.MODEL.DEVICE = 'cpu'
cfg.MODEL_EMA.ENABLED = True
cfg.MODEL_EMA.DECAY = 0.0
cfg.MODEL_EMA.DECAY_WARM_UP_FACTOR = (- 1)
model = TestArch()
ema.may_build_model_ema(cfg, model)
self.assertTrue(hasattr(model, 'ema_state'))
ema_hook = ema.EMAHook(cfg, model)
ema_hook.before_train()
ema_hook.before_step()
model.set_const_weights(2.0)
ema_hook.after_step()
ema_hook.after_train()
ema_checkpointers = ema.may_get_ema_checkpointer(cfg, model)
self.assertEqual(len(ema_checkpointers), 1)
out_model = TestArch()
ema_checkpointers['ema_state'].apply_to(out_model)
self.assertTrue(_compare_state_dict(out_model, model)) |
class port_mod(message):
version = 3
type = 16
def __init__(self, xid=None, port_no=None, hw_addr=None, config=None, mask=None, advertise=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (port_no != None):
self.port_no = port_no
else:
self.port_no = 0
if (hw_addr != None):
self.hw_addr = hw_addr
else:
self.hw_addr = [0, 0, 0, 0, 0, 0]
if (config != None):
self.config = config
else:
self.config = 0
if (mask != None):
self.mask = mask
else:
self.mask = 0
if (advertise != None):
self.advertise = advertise
else:
self.advertise = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(util.pack_port_no(self.port_no))
packed.append(('\x00' * 4))
packed.append(struct.pack('!6B', *self.hw_addr))
packed.append(('\x00' * 2))
packed.append(struct.pack('!L', self.config))
packed.append(struct.pack('!L', self.mask))
packed.append(struct.pack('!L', self.advertise))
packed.append(('\x00' * 4))
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = port_mod()
_version = reader.read('!B')[0]
assert (_version == 3)
_type = reader.read('!B')[0]
assert (_type == 16)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
obj.port_no = util.unpack_port_no(reader)
reader.skip(4)
obj.hw_addr = list(reader.read('!6B'))
reader.skip(2)
obj.config = reader.read('!L')[0]
obj.mask = reader.read('!L')[0]
obj.advertise = reader.read('!L')[0]
reader.skip(4)
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.port_no != other.port_no):
return False
if (self.hw_addr != other.hw_addr):
return False
if (self.config != other.config):
return False
if (self.mask != other.mask):
return False
if (self.advertise != other.advertise):
return False
return True
def pretty_print(self, q):
q.text('port_mod {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('port_no = ')
q.text(util.pretty_port(self.port_no))
q.text(',')
q.breakable()
q.text('hw_addr = ')
q.text(util.pretty_mac(self.hw_addr))
q.text(',')
q.breakable()
q.text('config = ')
value_name_map = {1: 'OFPPC_PORT_DOWN', 4: 'OFPPC_NO_RECV', 32: 'OFPPC_NO_FWD', 64: 'OFPPC_NO_PACKET_IN', : 'OFPPC_BSN_MIRROR_DEST'}
q.text(util.pretty_flags(self.config, value_name_map.values()))
q.text(',')
q.breakable()
q.text('mask = ')
value_name_map = {1: 'OFPPC_PORT_DOWN', 4: 'OFPPC_NO_RECV', 32: 'OFPPC_NO_FWD', 64: 'OFPPC_NO_PACKET_IN', : 'OFPPC_BSN_MIRROR_DEST'}
q.text(util.pretty_flags(self.mask, value_name_map.values()))
q.text(',')
q.breakable()
q.text('advertise = ')
q.text(('%#x' % self.advertise))
q.breakable()
q.text('}') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.