code stringlengths 281 23.7M |
|---|
def _extract_around_indexes_returns_correct_array_with_mode_average(indexes, before, after, extended_indexes):
data = np.random.rand(100)
result = scared.signal_processing.extract_around_indexes(data, indexes, before, after, mode=scared.signal_processing.ExtractMode.AVERAGE)
expected = []
for i in indexes:
if extended_indexes:
expected_data = data[np.arange((i - before), ((i + after) + 1))]
else:
expected_data = data[(i - before):((i + after) + 1)]
expected.append(expected_data)
expected = np.array(expected)
expected = np.mean(expected, axis=0)
assert np.array_equal(result, expected) |
class LoadCompanyCommitsAction(Action):
params = (Action.params + (ActionParam(name='company', short_name='c', type=str, required=True, description='Company name'),))
def name(cls) -> str:
return 'load-company-commits'
def help_text(cls) -> str:
return 'Load company repositories events commits'
def _execute(self, day: datetime, company: str):
return load_company_repositories_events_commits(date=day, company=company) |
def basic_object_class_multiple_faba_with_loan_value_with_two_object_classes(award_count_sub_schedule, award_count_submission, defc_codes):
award1 = _loan_award(45)
award2 = _loan_award(87)
object_class1 = major_object_class_with_children('001', [1])
object_class2 = major_object_class_with_children('002', [2])
baker.make('awards.FinancialAccountsByAwards', award=award1, parent_award_id='basic award', disaster_emergency_fund=DisasterEmergencyFundCode.objects.filter(code='M').first(), submission=SubmissionAttributes.objects.all().first(), object_class=object_class1[0], gross_outlays_delivered_orders_paid_total_cpe=8, transaction_obligated_amount=1)
baker.make('awards.FinancialAccountsByAwards', award=award2, parent_award_id='basic award', disaster_emergency_fund=DisasterEmergencyFundCode.objects.filter(code='M').first(), submission=SubmissionAttributes.objects.all().first(), object_class=object_class2[0], gross_outlays_delivered_orders_paid_total_cpe=8, transaction_obligated_amount=1) |
class PDFDocument():
security_handler_registry = {1: PDFStandardSecurityHandler, 2: PDFStandardSecurityHandler}
if (AES is not None):
security_handler_registry[4] = PDFStandardSecurityHandlerV4
if (SHA256 is not None):
security_handler_registry[5] = PDFStandardSecurityHandlerV5
debug = 0
def __init__(self, parser, password=b'', caching=True, fallback=True):
self.caching = caching
self.xrefs = []
self.info = []
self.catalog = None
self.encryption = None
self.decipher = None
self._parser = None
self._cached_objs = {}
self._parsed_objs = {}
self._parser = parser
self._parser.set_document(self)
self.is_printable = self.is_modifiable = self.is_extractable = True
try:
pos = self.find_xref(parser)
self.read_xref_from(parser, pos, self.xrefs)
except PDFNoValidXRef:
fallback = True
if fallback:
parser.fallback = True
xref = PDFXRefFallback()
xref.load(parser)
self.xrefs.append(xref)
for xref in self.xrefs:
trailer = xref.get_trailer()
if (not trailer):
continue
if ('Encrypt' in trailer):
self.encryption = (list_value(trailer['ID']), dict_value(trailer['Encrypt']))
self._initialize_password(password)
if ('Info' in trailer):
self.info.append(dict_value(trailer['Info']))
if ('Root' in trailer):
self.catalog = dict_value(trailer['Root'])
break
else:
raise PDFSyntaxError('No /Root object! - Is this really a PDF?')
if (self.catalog.get('Type') is not LITERAL_CATALOG):
if STRICT:
raise PDFSyntaxError('Catalog not found!')
return
def _initialize_password(self, password=b''):
(docid, param) = self.encryption
if (literal_name(param.get('Filter')) != 'Standard'):
raise PDFEncryptionError(('Unknown filter: param=%r' % param))
v = int_value(param.get('V', 0))
factory = self.security_handler_registry.get(v)
if (factory is None):
raise PDFEncryptionError(('Unknown algorithm: param=%r' % param))
handler = factory(docid, param, password)
self.decipher = handler.decrypt
self.is_printable = handler.is_printable()
self.is_modifiable = handler.is_modifiable()
self.is_extractable = handler.is_extractable()
self._parser.fallback = False
return
def _getobj_objstm(self, stream, index, objid):
if (stream.objid in self._parsed_objs):
(objs, n) = self._parsed_objs[stream.objid]
else:
(objs, n) = self._get_objects(stream)
if self.caching:
self._parsed_objs[stream.objid] = (objs, n)
i = ((n * 2) + index)
try:
obj = objs[i]
except IndexError:
raise PDFSyntaxError(('index too big: %r' % index))
return obj
def _get_objects(self, stream):
if (stream.get('Type') is not LITERAL_OBJSTM):
if STRICT:
raise PDFSyntaxError(('Not a stream object: %r' % stream))
try:
n = stream['N']
except KeyError:
if STRICT:
raise PDFSyntaxError(('N is not defined: %r' % stream))
n = 0
parser = PDFStreamParser(stream.get_data())
parser.set_document(self)
objs = []
try:
while 1:
(_, obj) = parser.nextobject()
objs.append(obj)
except PSEOF:
pass
return (objs, n)
KEYWORD_OBJ = KWD(b'obj')
def _getobj_parse(self, pos, objid):
self._parser.seek(pos)
(_, objid1) = self._parser.nexttoken()
if (objid1 != objid):
raise PDFSyntaxError(('objid mismatch: %r=%r' % (objid1, objid)))
(_, genno) = self._parser.nexttoken()
(_, kwd) = self._parser.nexttoken()
if (kwd is not self.KEYWORD_OBJ):
raise PDFSyntaxError(('Invalid object spec: offset=%r' % pos))
(_, obj) = self._parser.nextobject()
return obj
def getobj(self, objid):
assert (objid != 0)
if (not self.xrefs):
raise PDFException('PDFDocument is not initialized')
if self.debug:
logging.debug(('getobj: objid=%r' % objid))
if (objid in self._cached_objs):
(obj, genno) = self._cached_objs[objid]
else:
for xref in self.xrefs:
try:
(strmid, index, genno) = xref.get_pos(objid)
except KeyError:
continue
try:
if (strmid is not None):
stream = stream_value(self.getobj(strmid))
obj = self._getobj_objstm(stream, index, objid)
else:
obj = self._getobj_parse(index, objid)
if self.decipher:
obj = decipher_all(self.decipher, objid, genno, obj)
if isinstance(obj, PDFStream):
obj.set_objid(objid, genno)
break
except (PSEOF, PDFSyntaxError):
continue
else:
raise PDFObjectNotFound(objid)
if self.debug:
logging.debug(('register: objid=%r: %r' % (objid, obj)))
if self.caching:
self._cached_objs[objid] = (obj, genno)
return obj
def get_outlines(self):
if ('Outlines' not in self.catalog):
raise PDFNoOutlines
def search(entry, level):
entry = dict_value(entry)
if ('Title' in entry):
if (('A' in entry) or ('Dest' in entry)):
title = decode_text(bytes_value(entry['Title']))
dest = entry.get('Dest')
action = entry.get('A')
se = entry.get('SE')
(yield (level, title, dest, action, se))
if (('First' in entry) and ('Last' in entry)):
for x in search(entry['First'], (level + 1)):
(yield x)
if ('Next' in entry):
for x in search(entry['Next'], level):
(yield x)
return
return search(self.catalog['Outlines'], 0)
def lookup_name(self, cat, key):
try:
names = dict_value(self.catalog['Names'])
except (PDFTypeError, KeyError):
raise KeyError((cat, key))
d0 = dict_value(names[cat])
def lookup(d):
if ('Limits' in d):
(k1, k2) = list_value(d['Limits'])
if ((key < k1) or (k2 < key)):
return None
if ('Names' in d):
objs = list_value(d['Names'])
names = dict(choplist(2, objs))
return names[key]
if ('Kids' in d):
for c in list_value(d['Kids']):
v = lookup(dict_value(c))
if v:
return v
raise KeyError((cat, key))
return lookup(d0)
def get_dest(self, name):
try:
obj = self.lookup_name('Dests', name)
except KeyError:
if ('Dests' not in self.catalog):
raise PDFDestinationNotFound(name)
d0 = dict_value(self.catalog['Dests'])
if (name not in d0):
raise PDFDestinationNotFound(name)
obj = d0[name]
return obj
def find_xref(self, parser):
prev = None
for line in parser.revreadlines():
line = line.strip()
if self.debug:
logging.debug(('find_xref: %r' % line))
if (line == b'startxref'):
break
if line:
prev = line
else:
raise PDFNoValidXRef('Unexpected EOF')
if self.debug:
logging.info(('xref found: pos=%r' % prev))
return int(prev)
def read_xref_from(self, parser, start, xrefs):
parser.seek(start)
parser.reset()
try:
(pos, token) = parser.nexttoken()
except PSEOF:
raise PDFNoValidXRef('Unexpected EOF')
if self.debug:
logging.info(('read_xref_from: start=%d, token=%r' % (start, token)))
if isinstance(token, int):
parser.seek(pos)
parser.reset()
xref = PDFXRefStream()
xref.load(parser)
else:
if (token is parser.KEYWORD_XREF):
parser.nextline()
xref = PDFXRef()
xref.load(parser)
xrefs.append(xref)
trailer = xref.get_trailer()
if self.debug:
logging.info(('trailer: %r' % trailer))
if ('XRefStm' in trailer):
pos = int_value(trailer['XRefStm'])
self.read_xref_from(parser, pos, xrefs)
if ('Prev' in trailer):
pos = int_value(trailer['Prev'])
self.read_xref_from(parser, pos, xrefs)
return |
class OptionPlotoptionsArcdiagramPointEvents(Options):
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def drag(self):
return self._config_get(None)
def drag(self, value: Any):
self._config(value, js_type=False)
def dragStart(self):
return self._config_get(None)
def dragStart(self, value: Any):
self._config(value, js_type=False)
def drop(self):
return self._config_get(None)
def drop(self, value: Any):
self._config(value, js_type=False)
def mouseOut(self):
return self._config_get(None)
def mouseOut(self, value: Any):
self._config(value, js_type=False)
def mouseOver(self):
return self._config_get(None)
def mouseOver(self, value: Any):
self._config(value, js_type=False)
def remove(self):
return self._config_get(None)
def remove(self, value: Any):
self._config(value, js_type=False)
def select(self):
return self._config_get(None)
def select(self, value: Any):
self._config(value, js_type=False)
def unselect(self):
return self._config_get(None)
def unselect(self, value: Any):
self._config(value, js_type=False)
def update(self):
return self._config_get(None)
def update(self, value: Any):
self._config(value, js_type=False) |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = 'name'
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'firewall_ssh_local_key': {'required': False, 'type': 'dict', 'default': None, 'no_log': True, 'options': {}}}
for attribute_name in module_spec['options']:
fields['firewall_ssh_local_key']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['firewall_ssh_local_key']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=True)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'firewall_ssh_local_key')
(is_error, has_changed, result, diff) = fortios_firewall_ssh(module.params, fos, module.check_mode)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
.xfail(raises=NotImplementedError, reason='Interpolation of UFL expressions into mixed functions not supported')
def test_mixed_function_interpolation(parentmesh, vertexcoords, tfs):
if (parentmesh.name == 'immersedsphere'):
vertexcoords = immersed_sphere_vertexcoords(parentmesh, vertexcoords)
(tfs_fam, tfs_deg, tfs_typ) = tfs
vm = VertexOnlyMesh(parentmesh, vertexcoords, missing_points_behaviour=None)
vertexcoords = vm.coordinates.dat.data_ro.reshape((- 1), parentmesh.geometric_dimension())
V1 = tfs_typ(parentmesh, tfs_fam, tfs_deg)
V2 = FunctionSpace(parentmesh, 'CG', 1)
V = (V1 * V2)
W1 = TensorFunctionSpace(vm, 'DG', 0)
W2 = FunctionSpace(vm, 'DG', 0)
W = (W1 * W2)
x = SpatialCoordinate(parentmesh)
v = Function(V)
(v1, v2) = v.subfunctions
expr1 = outer(x, x)
assert (W1.shape == expr1.ufl_shape)
interpolate(expr1, v1)
result1 = np.asarray([np.outer(vertexcoords[i], vertexcoords[i]) for i in range(len(vertexcoords))])
if (len(result1) == 0):
result1 = result1.reshape((vertexcoords.shape + (parentmesh.geometric_dimension(),)))
expr2 = reduce(add, SpatialCoordinate(parentmesh))
interpolate(expr2, v2)
result2 = np.sum(vertexcoords, axis=1)
w_v = interpolate(v, W)
(w_v1, w_v2) = w_v.subfunctions
assert np.allclose(w_v1.dat.data_ro, result1)
assert np.allclose(w_v2.dat.data_ro, result2)
A_w = Interpolator(TestFunction(V), W)
w_v = Function(W)
A_w.interpolate(v, output=w_v)
(w_v1, w_v2) = w_v.subfunctions
assert np.allclose(w_v1.dat.data_ro, result1)
assert np.allclose(w_v2.dat.data_ro, result2) |
def construct_data_date_range(provided_filters: dict) -> str:
if ((provided_filters.get('fy') is not None) and (provided_filters.get('fy') > 2019)):
if (provided_filters.get('fy') == 2020):
string = f"FY{provided_filters.get('fy')}Q1"
else:
string = f"FY{provided_filters.get('fy')}P01"
if ((provided_filters.get('period') is not None) and (provided_filters.get('period') != 1)):
string += f"-P{provided_filters.get('period'):0>2}"
elif (provided_filters.get('period') is None):
string += f"-Q{provided_filters.get('quarter')}"
else:
string = f"FY{provided_filters.get('fy')}Q1"
if (provided_filters.get('quarter') != 1):
string += f"-Q{provided_filters.get('quarter')}"
return string |
def create_cosmwasm_store_code_msg(contract_path: str, sender_address: Address) -> MsgStoreCode:
with open(contract_path, 'rb') as contract_file:
wasm_byte_code = gzip.compress(contract_file.read(), 9)
msg = MsgStoreCode(sender=str(sender_address), wasm_byte_code=wasm_byte_code)
return msg |
class Huber():
def __call__(self, y_pred: torch.Tensor, y_true: torch.Tensor, delta: float=1.0) -> torch.Tensor:
y_pred = convert_to_tensor(y_pred)
y_true = convert_to_tensor(y_true)
check_same_shape(y_pred, y_true)
abs_error = torch.abs((y_pred - y_true))
quadratic = torch.clamp(abs_error, max=delta)
linear = (abs_error - quadratic)
loss = ((0.5 * quadratic.pow(2)) + (delta * linear))
return loss.mean() |
class OptionSeriesErrorbarAccessibility(Options):
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def descriptionFormat(self):
return self._config_get(None)
def descriptionFormat(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def exposeAsGroupOnly(self):
return self._config_get(None)
def exposeAsGroupOnly(self, flag: bool):
self._config(flag, js_type=False)
def keyboardNavigation(self) -> 'OptionSeriesErrorbarAccessibilityKeyboardnavigation':
return self._config_sub_data('keyboardNavigation', OptionSeriesErrorbarAccessibilityKeyboardnavigation)
def point(self) -> 'OptionSeriesErrorbarAccessibilityPoint':
return self._config_sub_data('point', OptionSeriesErrorbarAccessibilityPoint) |
class SetIpv6DSCP_NonZeroECNandFlabel(BaseModifyPacketTest):
def runTest(self):
actions = [ofp.action.set_field(ofp.oxm.ip_dscp(1))]
pkt = simple_tcpv6_packet(ipv6_tc=17, ipv6_fl=10)
exp_pkt = simple_tcpv6_packet(ipv6_tc=5, ipv6_fl=10)
self.verify_modify(actions, pkt, exp_pkt) |
class TestColumnValueMedian(BaseFeatureDataQualityMetricsTest):
name: ClassVar = 'Median Value'
def get_stat(self, current: NumericCharacteristics):
return current.p50
def get_condition_from_reference(self, reference: Optional[ColumnCharacteristics]) -> TestValueCondition:
if (reference is not None):
if (not isinstance(reference, NumericCharacteristics)):
raise ValueError(f'{self.column_name} should be numerical or bool')
return TestValueCondition(eq=approx(reference.p50, 0.1))
raise ValueError('Neither required test parameters nor reference data has been provided.')
def calculate_value_for_test(self) -> Optional[Numeric]:
features_stats = self.metric.get_result().current_characteristics
if (not isinstance(features_stats, NumericCharacteristics)):
raise ValueError(f'{self.column_name} should be numerical or bool')
return features_stats.p50
def get_description(self, value: Numeric) -> str:
return f'The median value of the column **{self.column_name}** is {value:.3g}. The test threshold is {self.get_condition()}.' |
class MySQLSchema(ConnectionConfigSecretsSchema):
host: str = Field(title='Host', description='The hostname or IP address of the server where the database is running.')
port: int = Field(3306, title='Port', description='The network port number on which the server is listening for incoming connections (default: 3306).')
username: Optional[str] = Field(None, title='Username', description='The user account used to authenticate and access the database.')
password: Optional[str] = Field(None, title='Password', description='The password used to authenticate and access the database.', sensitive=True)
dbname: str = Field(description='The name of the specific database within the database server that you want to connect to.', title='Database')
ssh_required: bool = Field(False, title='SSH required', description='Indicates whether an SSH tunnel is required for the connection. Enable this option if your MySQL server is behind a firewall and requires SSH tunneling for remote connections.')
_required_components: List[str] = ['host', 'dbname'] |
class EnvironmentVarGuard(MutableMapping):
def __init__(self):
self._environ = os.environ
self._changed = {}
def __getitem__(self, envvar):
return self._environ[envvar]
def __setitem__(self, envvar, value):
if (envvar not in self._changed):
self._changed[envvar] = self._environ.get(envvar)
self._environ[envvar] = value
def __delitem__(self, envvar):
if (envvar not in self._changed):
self._changed[envvar] = self._environ.get(envvar)
if (envvar in self._environ):
del self._environ[envvar]
def keys(self):
return self._environ.keys()
def __iter__(self):
return iter(self._environ)
def __len__(self):
return len(self._environ)
def set(self, envvar, value):
self[envvar] = value
def unset(self, envvar):
del self[envvar]
def copy(self):
return dict(self)
def __enter__(self):
return self
def __exit__(self, *ignore_exc):
for (k, v) in self._changed.items():
if (v is None):
if (k in self._environ):
del self._environ[k]
else:
self._environ[k] = v
os.environ = self._environ |
def get_pins():
pins = pinutils.generate_pins(0, 31)
pinutils.findpin(pins, 'PD2', True)['functions']['ADC1_IN0'] = 0
pinutils.findpin(pins, 'PD3', True)['functions']['ADC1_IN1'] = 0
pinutils.findpin(pins, 'PD4', True)['functions']['ADC1_IN2'] = 0
pinutils.findpin(pins, 'PD5', True)['functions']['ADC1_IN3'] = 0
pinutils.findpin(pins, 'PD28', True)['functions']['ADC1_IN4'] = 0
pinutils.findpin(pins, 'PD29', True)['functions']['ADC1_IN5'] = 0
pinutils.findpin(pins, 'PD30', True)['functions']['ADC1_IN6'] = 0
pinutils.findpin(pins, 'PD31', True)['functions']['ADC1_IN7'] = 0
pinutils.findpin(pins, 'PD16', True)['functions']['NEGATED'] = 0
pinutils.findpin(pins, 'PD19', True)['functions']['NEGATED'] = 0
for pin in pins:
pin['functions']['3.3'] = 0
return pins |
def genotype(candidates, bam, type, options):
num_candidates = len(candidates)
for (nr, candidate) in enumerate(candidates):
if (((nr + 1) % 10000) == 0):
logging.info('Processed {0} of {1} candidates'.format((nr + 1), num_candidates))
if (candidate.score < options.minimum_score):
continue
if ((type == 'INS') or (type == 'DUP_INT')):
(contig, start, end) = candidate.get_destination()
end = start
else:
(contig, start, end) = candidate.get_source()
contig_length = bam.get_reference_length(contig)
alignment_it = bam.fetch(contig=contig, start=max(0, (start - 1000)), stop=min(contig_length, (end + 1000)))
reads_supporting_variant = set([sig.read for sig in candidate.members])
reads_supporting_reference = set()
aln_no = 0
while (aln_no < 500):
try:
current_alignment = next(alignment_it)
except StopIteration:
break
if (current_alignment.query_name in reads_supporting_variant):
continue
if (current_alignment.is_unmapped or current_alignment.is_secondary or (current_alignment.mapping_quality < options.min_mapq)):
continue
aln_no += 1
if ((type == 'DEL') or (type == 'INV')):
minimum_overlap = min(((end - start) / 2), 2000)
if (((current_alignment.reference_start < (end - minimum_overlap)) and (current_alignment.reference_end > (end + 100))) or ((current_alignment.reference_start < (start - 100)) and (current_alignment.reference_end > (start + minimum_overlap)))):
reads_supporting_reference.add(current_alignment.query_name)
if ((type == 'INS') or (type == 'DUP_INT')):
if ((current_alignment.reference_start < (start - 100)) and (current_alignment.reference_end > (end + 100))):
reads_supporting_reference.add(current_alignment.query_name)
if ((len(reads_supporting_variant) + len(reads_supporting_reference)) >= options.minimum_depth):
candidate.support_fraction = (len(reads_supporting_variant) / (len(reads_supporting_variant) + len(reads_supporting_reference)))
if (candidate.support_fraction >= options.homozygous_threshold):
candidate.genotype = '1/1'
elif ((candidate.support_fraction >= options.heterozygous_threshold) and (candidate.support_fraction < options.homozygous_threshold)):
candidate.genotype = '0/1'
elif (candidate.support_fraction < options.heterozygous_threshold):
candidate.genotype = '0/0'
else:
candidate.genotype = './.'
elif ((len(reads_supporting_variant) + len(reads_supporting_reference)) > 0):
candidate.support_fraction = (len(reads_supporting_variant) / (len(reads_supporting_variant) + len(reads_supporting_reference)))
candidate.genotype = './.'
else:
candidate.support_fraction = '.'
candidate.genotype = './.'
candidate.ref_reads = len(reads_supporting_reference)
candidate.alt_reads = len(reads_supporting_variant) |
def extractNekoyashiki(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
if ('rakudai kishi no eiyuutan' in item['tags']):
return buildReleaseMessageWithType(item, 'Rakudai Kishi no Eiyuutan', vol, chp, frag=frag, postfix=postfix)
if (('Ore no Pet was Seijo-sama' in item['tags']) or ('Ore no Pet wa Seijo-sama' in item['tags'])):
return buildReleaseMessageWithType(item, 'Ore no Pet was Seijo-sama', vol, chp, frag=frag, postfix=postfix)
if ('M-chan wars' in item['tags']):
return buildReleaseMessageWithType(item, 'M-chan Wars: Rise and Fall of the Cat Tyrant', vol, chp, frag=frag, postfix=postfix, tl_type='oel')
if (('Etranger of the Sky' in item['tags']) or ('Tenkyuu no Etranger' in item['tags'])):
return buildReleaseMessageWithType(item, 'Spear of Thunder Etranger of the Sky', vol, chp, frag=frag, postfix=postfix)
if ('Yamato Nadeshiko' in item['tags']):
return buildReleaseMessageWithType(item, 'Yamato Nadeshiko, Koibana no Gotoku', vol, chp, frag=frag, postfix=postfix)
if ('Youhei Monogatari' in item['tags']):
return buildReleaseMessageWithType(item, 'Youhei Monogatari ~Junsuinaru Hangyakusha (Rebellion)~', vol, chp, frag=frag, postfix=postfix)
if ('Qualidea Code' in item['tags']):
return buildReleaseMessageWithType(item, 'Qualidea Code', vol, chp, frag=frag, postfix=postfix)
if ('The Brander Female Fencer' in item['tags']):
return buildReleaseMessageWithType(item, 'The Brander Female Fencer', vol, chp, frag=frag, postfix=postfix)
if ('The Elf is a Freeloader' in item['tags']):
return buildReleaseMessageWithType(item, 'The Elf is a Freeloader', vol, chp, frag=frag, postfix=postfix)
return False |
class TestScheduler(unittest.TestCase):
def test_on_schedule_decorator(self):
schedule = '* * * * *'
tz = 'America/Los_Angeles'
example_func = Mock(__name__='example_func')
decorated_func = scheduler_fn.on_schedule(schedule='* * * * *', timezone=scheduler_fn.Timezone(tz))(example_func)
endpoint = getattr(decorated_func, '__firebase_endpoint__')
self.assertIsNotNone(endpoint)
self.assertIsNotNone(endpoint.scheduleTrigger)
self.assertEqual(endpoint.scheduleTrigger.get('schedule'), schedule)
self.assertEqual(endpoint.scheduleTrigger.get('timeZone'), tz)
def test_on_schedule_call(self):
with Flask(__name__).test_request_context('/'):
environ = EnvironBuilder(headers={'X-CloudScheduler-JobName': 'example-job', 'X-CloudScheduler-ScheduleTime': '2023-04-13T12:00:00-07:00'}).get_environ()
mock_request = Request(environ)
example_func = Mock(__name__='example_func')
decorated_func = scheduler_fn.on_schedule(schedule='* * * * *')(example_func)
response = decorated_func(mock_request)
self.assertEqual(response.status_code, 200)
example_func.assert_called_once_with(scheduler_fn.ScheduledEvent(job_name='example-job', schedule_time=datetime(2023, 4, 13, 12, 0, tzinfo=scheduler_fn.Timezone('America/Los_Angeles'))))
def test_on_schedule_call_with_no_headers(self):
with Flask(__name__).test_request_context('/'):
environ = EnvironBuilder().get_environ()
mock_request = Request(environ)
example_func = Mock(__name__='example_func')
decorated_func = scheduler_fn.on_schedule(schedule='* * * * *')(example_func)
response = decorated_func(mock_request)
self.assertEqual(response.status_code, 200)
self.assertEqual(example_func.call_count, 1)
self.assertIsNone(example_func.call_args[0][0].job_name)
self.assertIsNotNone(example_func.call_args[0][0].schedule_time)
def test_on_schedule_call_with_exception(self):
with Flask(__name__).test_request_context('/'):
environ = EnvironBuilder(headers={'X-CloudScheduler-JobName': 'example-job', 'X-CloudScheduler-ScheduleTime': '2023-04-13T12:00:00-07:00'}).get_environ()
mock_request = Request(environ)
example_func = Mock(__name__='example_func', side_effect=Exception('Test exception'))
decorated_func = scheduler_fn.on_schedule(schedule='* * * * *')(example_func)
response = decorated_func(mock_request)
self.assertEqual(response.status_code, 500)
self.assertEqual(response.data, b'Test exception') |
class HomeListing(AbstractCrudObject):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isHomeListing = True
super(HomeListing, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
ac_type = 'ac_type'
additional_fees_description = 'additional_fees_description'
address = 'address'
agent_company = 'agent_company'
agent_email = 'agent_email'
agent_fb_page_id = 'agent_fb_page_id'
agent_name = 'agent_name'
agent_phone = 'agent_phone'
applinks = 'applinks'
area_size = 'area_size'
area_unit = 'area_unit'
availability = 'availability'
category_specific_fields = 'category_specific_fields'
co_2_emission_rating_eu = 'co_2_emission_rating_eu'
currency = 'currency'
days_on_market = 'days_on_market'
description = 'description'
energy_rating_eu = 'energy_rating_eu'
furnish_type = 'furnish_type'
group_id = 'group_id'
heating_type = 'heating_type'
home_listing_id = 'home_listing_id'
id = 'id'
image_fetch_status = 'image_fetch_status'
images = 'images'
laundry_type = 'laundry_type'
listing_type = 'listing_type'
max_currency = 'max_currency'
max_price = 'max_price'
min_currency = 'min_currency'
min_price = 'min_price'
name = 'name'
num_baths = 'num_baths'
num_beds = 'num_beds'
num_rooms = 'num_rooms'
num_units = 'num_units'
parking_type = 'parking_type'
partner_verification = 'partner_verification'
pet_policy = 'pet_policy'
price = 'price'
property_type = 'property_type'
sanitized_images = 'sanitized_images'
unit_price = 'unit_price'
url = 'url'
visibility = 'visibility'
year_built = 'year_built'
class ImageFetchStatus():
direct_upload = 'DIRECT_UPLOAD'
fetched = 'FETCHED'
fetch_failed = 'FETCH_FAILED'
no_status = 'NO_STATUS'
outdated = 'OUTDATED'
partial_fetch = 'PARTIAL_FETCH'
class Visibility():
published = 'PUBLISHED'
staging = 'STAGING'
def get_endpoint(cls):
return 'home_listings'
def api_create(self, parent_id, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.adobjects.productcatalog import ProductCatalog
return ProductCatalog(api=self._api, fbid=parent_id).create_home_listing(fields, params, batch, success, failure, pending)
def api_delete(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=HomeListing, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_update(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'address': 'Object', 'availability': 'string', 'currency': 'string', 'description': 'string', 'images': 'list<Object>', 'listing_type': 'string', 'name': 'string', 'num_baths': 'float', 'num_beds': 'float', 'num_units': 'float', 'price': 'float', 'property_type': 'string', 'url': 'string', 'year_built': 'unsigned int'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=HomeListing, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_augmented_realities_metadata(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/augmented_realities_metadata', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_channels_to_integrity_status(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.catalogitemchannelstointegritystatus import CatalogItemChannelsToIntegrityStatus
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/channels_to_integrity_status', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=CatalogItemChannelsToIntegrityStatus, api_type='EDGE', response_parser=ObjectParser(target_class=CatalogItemChannelsToIntegrityStatus, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_videos_metadata(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.dynamicvideometadata import DynamicVideoMetadata
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/videos_metadata', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=DynamicVideoMetadata, api_type='EDGE', response_parser=ObjectParser(target_class=DynamicVideoMetadata, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {'ac_type': 'string', 'additional_fees_description': 'string', 'address': 'Object', 'agent_company': 'string', 'agent_email': 'string', 'agent_fb_page_id': 'Page', 'agent_name': 'string', 'agent_phone': 'string', 'applinks': 'CatalogItemAppLinks', 'area_size': 'unsigned int', 'area_unit': 'string', 'availability': 'string', 'category_specific_fields': 'CatalogSubVerticalList', 'co_2_emission_rating_eu': 'Object', 'currency': 'string', 'days_on_market': 'unsigned int', 'description': 'string', 'energy_rating_eu': 'Object', 'furnish_type': 'string', 'group_id': 'string', 'heating_type': 'string', 'home_listing_id': 'string', 'id': 'string', 'image_fetch_status': 'ImageFetchStatus', 'images': 'list<string>', 'laundry_type': 'string', 'listing_type': 'string', 'max_currency': 'string', 'max_price': 'string', 'min_currency': 'string', 'min_price': 'string', 'name': 'string', 'num_baths': 'float', 'num_beds': 'float', 'num_rooms': 'float', 'num_units': 'unsigned int', 'parking_type': 'string', 'partner_verification': 'string', 'pet_policy': 'string', 'price': 'string', 'property_type': 'string', 'sanitized_images': 'list<string>', 'unit_price': 'Object', 'url': 'string', 'visibility': 'Visibility', 'year_built': 'int'}
def _get_field_enum_info(cls):
field_enum_info = {}
field_enum_info['ImageFetchStatus'] = HomeListing.ImageFetchStatus.__dict__.values()
field_enum_info['Visibility'] = HomeListing.Visibility.__dict__.values()
return field_enum_info |
def create_solver(solver_parameters, *, pmat=None, appctx=None):
p = {}
if (solver_parameters is not None):
p.update(solver_parameters)
p.setdefault('snes_type', 'ksponly')
p.setdefault('ksp_rtol', 1e-07)
problem = NonlinearVariationalProblem(F, w, bcs=bcs, Jp=pmat)
solver = NonlinearVariationalSolver(problem, nullspace=nullspace, options_prefix='', solver_parameters=p, appctx=appctx)
return solver |
.skipcomplex
def test_p_independence_hdiv(mesh):
family = ('NCF' if (mesh.topological_dimension() == 3) else 'RTCF')
expected = [6, 6]
solvers = [fdmstar, facetstar]
for degree in range(3, 6):
element = FiniteElement(family, cell=mesh.ufl_cell(), degree=degree, variant='fdm')
V = FunctionSpace(mesh, element)
problem = build_riesz_map(V, div)
for (sp, expected_it) in zip(solvers, expected):
assert (solve_riesz_map(problem, sp) <= expected_it) |
def _create_plot_component():
numpts = 1000
x = sort(random(numpts))
y = random(numpts)
color = randint(0, 7, numpts)
pd = ArrayPlotData()
pd.set_data('index', x)
pd.set_data('value', y)
pd.set_data('color', color)
plot = Plot(pd)
plot.plot(('index', 'value', 'color'), type='cmap_scatter', name='my_plot', color_mapper=accent, marker='square', fill_alpha=0.5, marker_size=6, outline_color='black', border_visible=True, bgcolor='white')
plot.title = 'Colormapped Scatter Plot with Range-selectable Data Points'
plot.padding = 50
plot.x_grid.visible = False
plot.y_grid.visible = False
plot.x_axis.font = 'sans-serif 16'
plot.y_axis.font = 'sans-serif 16'
cmap_renderer = plot.plots['my_plot'][0]
plot.tools.append(PanTool(plot, constrain_key='shift'))
zoom = ZoomTool(component=plot, tool_mode='box', always_on=False)
plot.overlays.append(zoom)
selection = ColormappedSelectionOverlay(cmap_renderer, fade_alpha=0.35, selection_type='mask')
cmap_renderer.overlays.append(selection)
colorbar = create_colorbar(plot.color_mapper)
colorbar.plot = cmap_renderer
colorbar.padding_top = plot.padding_top
colorbar.padding_bottom = plot.padding_bottom
container = HPlotContainer(use_backbuffer=True)
container.add(plot)
container.add(colorbar)
container.bgcolor = 'lightgray'
return container |
def url_is_alive(url):
try:
req = request.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)')
req.get_method = (lambda : 'HEAD')
response = request.urlopen(req, timeout=10)
return True
except:
xbmc.log(msg=format_exc(), level=xbmc.LOGERROR)
return False |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = 'name'
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'gtp_rat_timeout_profile': {'required': False, 'type': 'dict', 'default': None, 'options': {}}}
for attribute_name in module_spec['options']:
fields['gtp_rat_timeout_profile']['options'][attribute_name] = module_spec['options'][attribute_name]
if (mkeyname and (mkeyname == attribute_name)):
fields['gtp_rat_timeout_profile']['options'][attribute_name]['required'] = True
module = AnsibleModule(argument_spec=fields, supports_check_mode=False)
check_legacy_fortiosapi(module)
is_error = False
has_changed = False
result = None
diff = None
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if ('access_token' in module.params):
connection.set_option('access_token', module.params['access_token'])
if ('enable_log' in module.params):
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, 'gtp_rat_timeout_profile')
(is_error, has_changed, result, diff) = fortios_gtp(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if (versions_check_result and (versions_check_result['matched'] is False)):
module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv')
if (not is_error):
if (versions_check_result and (versions_check_result['matched'] is False)):
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff)
else:
module.exit_json(changed=has_changed, meta=result, diff=diff)
elif (versions_check_result and (versions_check_result['matched'] is False)):
module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg='Error in repo', meta=result) |
.parametrize('input_features', [None, variables_arr, np.array(variables_arr), variables_str, np.array(variables_str), variables_user])
def test_with_pipe_and_skl_transformer_input_array(df_vartypes, input_features):
pipe = Pipeline([('imputer', SimpleImputer(strategy='constant')), ('transformer', MockTransformer())])
pipe.fit(df_vartypes.to_numpy())
if (input_features is None):
assert (pipe.get_feature_names_out(input_features=input_features) == variables_arr)
else:
assert (pipe.get_feature_names_out(input_features=input_features) == list(input_features)) |
def extractEndlessazureHomeBlog(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def tunl(alph1: float, alph2: float, U: float, strict: bool=False):
if strict:
alphs = np.array((alph1, alph2))
assert ((0.5 <= alphs).all() and (alphs <= 20).all() and (0 < U <= 16))
if (alphs >= 8).all():
assert (U <= 12)
if (alphs >= 16).all():
assert (U <= 10)
x = np.array(((- 0.9324695), (- 0.6612094), (- 0.2386192), 0.2386192, 0.6612094, 0.9324695))
w = np.array((0.1713245, 0.3607616, 0.4679139, 0.4679139, 0.3607616, 0.1713245))
pi = np.pi
upi2 = (U / (2 * pi))
C = (((0.125 * pi) * U) * (((1 / np.sqrt(alph1)) + (1.0 / np.sqrt(alph2))) ** 2))
v1 = (upi2 * alph1)
v2 = (upi2 * alph2)
D = (((4 * alph1) * alph2) - (pi ** 2))
DF = (np.cosh(np.sqrt(D)) if (D >= 0) else np.cos(np.sqrt((- D))))
ez = ((- v1) if (v2 >= v1) else (- v2))
eb = min(((C * ((np.log(((2.0 * (1.0 + DF)) / 0.014)) / (2 * pi)) ** 2)) - ((v1 + v2) / 2)), 3.2)
em = ((eb - ez) / 2)
ep = ((eb + ez) / 2)
E = ((em * x) + ep)
A1 = (pi * np.sqrt(((E + v1) / C)))
A2 = (pi * np.sqrt(((E + v2) / C)))
fm = np.cosh((A1 - A2))
fp = np.cosh((A1 + A2))
G = (((w * np.exp((- E))) * (fp - fm)) / (fp + DF)).sum()
G = ((em * G) + np.exp((- eb)))
return G |
class TestGetRuleTargets():
(scope='function')
def rule(self, policy: Policy) -> Rule:
return policy.get_rules_for_action(ActionType.access.value)[0]
(scope='function')
def rule_target(self, db, policy: Policy, rule: Rule) -> RuleTarget:
rule_targets: List[RuleTarget] = RuleTarget.filter(db=db, conditions=(RuleTarget.rule_id == rule.id)).all()
assert (len(rule_targets) == 1)
return rule_targets[0]
(scope='function')
def url(self, policy: Policy, rule: Rule) -> str:
return (V1_URL_PREFIX + RULE_TARGET_LIST.format(policy_key=policy.key, rule_key=rule.key))
def test_get_rule_targets_unauthenticated(self, url, api_client):
resp = api_client.get(url)
assert (resp.status_code == 401)
def test_get_rule_targets_wrong_scope(self, url, api_client: TestClient, generate_auth_header):
auth_header = generate_auth_header(scopes=[scopes.POLICY_READ])
resp = api_client.get(url, headers=auth_header)
assert (resp.status_code == 403)
.usefixtures('policy_drp_action')
def test_get_rule_targets(self, db, api_client: TestClient, generate_auth_header, rule_target: RuleTarget, url):
all_policies = Policy.query(db=db).all()
assert (len(all_policies) > 1)
all_rules = Rule.query(db=db).all()
assert (len(all_rules) > 1)
all_rule_targets = RuleTarget.query(db=db).all()
assert (len(all_rule_targets) > 1)
auth_header = generate_auth_header(scopes=[scopes.RULE_READ])
resp = api_client.get(url, headers=auth_header)
assert (resp.status_code == 200)
data = resp.json()
assert ('items' in data)
assert (data['total'] == 1)
rule_target_data = data['items'][0]
assert (rule_target_data['name'] == rule_target.name)
assert (rule_target_data['key'] == rule_target.key)
assert (rule_target_data['data_category'] == rule_target.data_category) |
def _apply_response_formatters(method: RPCEndpoint, result_formatters: Formatters, error_formatters: Formatters, response: RPCResponse) -> RPCResponse:
def _format_response(response_type: Literal[('result', 'error', 'params')], method_response_formatter: Callable[(..., Any)]) -> RPCResponse:
appropriate_response = response[response_type]
if (response_type == 'params'):
appropriate_response = cast(EthSubscriptionParams, response[response_type])
return assoc(response, response_type, assoc(response['params'], 'result', method_response_formatter(appropriate_response['result'])))
else:
return assoc(response, response_type, method_response_formatter(appropriate_response))
if ((response.get('result') is not None) and (method in result_formatters)):
return _format_response('result', result_formatters[method])
elif ((response.get('params') is not None) and (response['params'].get('result') is not None) and (method in result_formatters)):
return _format_response('params', result_formatters[method])
elif (('error' in response) and (method in error_formatters)):
return _format_response('error', error_formatters[method])
else:
return response |
class MultisigAccount(DeterministicAccount):
def __init__(self, wallet: 'Wallet', row: AccountRow, keyinstance_rows: List[KeyInstanceRow], output_rows: List[TransactionOutputRow]) -> None:
self._multisig_keystore = cast(Multisig_KeyStore, wallet.get_keystore(cast(int, row.default_masterkey_id)))
self.m = self._multisig_keystore.m
self.n = self._multisig_keystore.n
DeterministicAccount.__init__(self, wallet, row, keyinstance_rows, output_rows)
def type(self) -> AccountType:
return AccountType.MULTISIG
def get_threshold(self, script_type: ScriptType) -> int:
assert (script_type in self.get_enabled_script_types()), f'get_threshold got bad script_type {script_type}'
return self.m
def get_public_keys_for_id(self, keyinstance_id: int) -> List[PublicKey]:
derivation_path = self._keypath[keyinstance_id]
return [k.derive_pubkey(derivation_path) for k in self.get_keystores()]
def get_enabled_script_types(self) -> Sequence[ScriptType]:
return (ScriptType.MULTISIG_P2SH, ScriptType.MULTISIG_BARE, ScriptType.MULTISIG_ACCUMULATOR)
def get_possible_scripts_for_id(self, keyinstance_id: int) -> List[Script]:
public_keys = self.get_public_keys_for_id(keyinstance_id)
public_keys_hex = [pubkey.to_hex() for pubkey in public_keys]
return [(script_type, self.get_script_template(public_keys_hex, script_type).to_script()) for script_type in self.get_enabled_script_types()]
def get_script_template_for_id(self, keyinstance_id: int, script_type: Optional[ScriptType]=None) -> ScriptTemplate:
keyinstance = self._keyinstances[keyinstance_id]
public_keys = self.get_public_keys_for_id(keyinstance_id)
public_keys_hex = [pubkey.to_hex() for pubkey in public_keys]
script_type = (script_type if ((script_type is not None) or (keyinstance.script_type == ScriptType.NONE)) else keyinstance.script_type)
return self.get_script_template(public_keys_hex, script_type)
def get_dummy_script_template(self, script_type: Optional[ScriptType]=None) -> ScriptTemplate:
public_keys_hex = []
for i in range(self.m):
public_keys_hex.append(PrivateKey(os.urandom(32)).public_key.to_hex())
return self.get_script_template(public_keys_hex, script_type)
def get_script_template(self, public_keys_hex: List[str], script_type: Optional[ScriptType]=None) -> ScriptTemplate:
if (script_type is None):
script_type = self.get_default_script_type()
if (script_type == ScriptType.MULTISIG_BARE):
return P2MultiSig_Output(sorted(public_keys_hex), self.m)
elif (script_type == ScriptType.MULTISIG_P2SH):
redeem_script = P2MultiSig_Output(sorted(public_keys_hex), self.m).to_script_bytes()
return P2SH_Address(hash160(redeem_script), Net.COIN)
elif (script_type == ScriptType.MULTISIG_ACCUMULATOR):
return AccumulatorMultiSigOutput(sorted(public_keys_hex), self.m)
else:
raise Exception('unsupported script type', script_type)
def derive_pubkeys(self, derivation_path: Sequence[int]) -> List[PublicKey]:
return [k.derive_pubkey(derivation_path) for k in self.get_keystores()]
def derive_script_template(self, derivation_path: Sequence[int]) -> ScriptTemplate:
public_keys_hex = [pubkey.to_hex() for pubkey in self.derive_pubkeys(derivation_path)]
return self.get_script_template(public_keys_hex)
def get_keystore(self) -> Multisig_KeyStore:
return self._multisig_keystore
def get_keystores(self) -> Sequence[MultisigChildKeyStoreTypes]:
return self._multisig_keystore.get_cosigner_keystores()
def has_seed(self) -> bool:
return self.get_keystore().has_seed()
def can_change_password(self) -> bool:
return self.get_keystore().can_change_password()
def is_watching_only(self) -> bool:
return self._multisig_keystore.is_watching_only()
def get_master_public_key(self) -> str:
raise NotImplementedError
def get_master_public_keys(self) -> List[str]:
return [cast(str, k.get_master_public_key()) for k in self.get_keystores()]
def get_fingerprint(self) -> bytes:
mpks = self.get_master_public_keys()
fingerprints = [k.get_fingerprint() for k in self.get_keystores()]
(sorted_mpks, sorted_fingerprints) = zip(*sorted(zip(mpks, fingerprints)))
return b''.join(sorted_fingerprints)
def get_xpubkeys_for_id(self, keyinstance_id: int) -> List[XPublicKey]:
derivation_path = self._keypath[keyinstance_id]
x_pubkeys = [k.get_xpubkey(derivation_path) for k in self.get_keystores()]
sorted_pairs = sorted(((x_pubkey.to_public_key().to_hex(), x_pubkey) for x_pubkey in x_pubkeys))
return [x_pubkey for (_hex, x_pubkey) in sorted_pairs] |
class GymDialogues(Model, BaseGymDialogues):
def __init__(self, **kwargs: Any) -> None:
Model.__init__(self, **kwargs)
def role_from_first_message(message: Message, receiver_address: Address) -> BaseDialogue.Role:
return BaseGymDialogue.Role.AGENT
BaseGymDialogues.__init__(self, self_address=str(self.skill_id), role_from_first_message=role_from_first_message) |
class OptionPlotoptionsScatterStates(Options):
def hover(self) -> 'OptionPlotoptionsScatterStatesHover':
return self._config_sub_data('hover', OptionPlotoptionsScatterStatesHover)
def inactive(self) -> 'OptionPlotoptionsScatterStatesInactive':
return self._config_sub_data('inactive', OptionPlotoptionsScatterStatesInactive)
def normal(self) -> 'OptionPlotoptionsScatterStatesNormal':
return self._config_sub_data('normal', OptionPlotoptionsScatterStatesNormal)
def select(self) -> 'OptionPlotoptionsScatterStatesSelect':
return self._config_sub_data('select', OptionPlotoptionsScatterStatesSelect) |
class LidEdge(FingerJointEdge):
char = 'l'
description = 'Edge for slide on lid (back)'
def __call__(self, length, bedBolts=None, bedBoltSettings=None, **kw):
hole_width = self.settings.hole_width
if (hole_width > 0):
super().__call__(((length - hole_width) / 2))
GroovedEdgeBase.groove_arc(self, hole_width)
super().__call__(((length - hole_width) / 2))
else:
super().__call__(length) |
class RefundReceipt(DeleteMixin, QuickbooksManagedObject, QuickbooksTransactionEntity, LinkedTxnMixin):
class_dict = {'DepartmentRef': Ref, 'CurrencyRef': Ref, 'TxnTaxDetail': TxnTaxDetail, 'DepositToAccountRef': Ref, 'CustomerRef': Ref, 'BillAddr': Address, 'ShipAddr': Address, 'ClassRef': Ref, 'BillEmail': EmailAddress, 'PaymentMethodRef': Ref, 'CheckPayment': RefundReceiptCheckPayment, 'CreditCardPayment': CreditCardPayment, 'CustomerMemo': CustomerMemo}
list_dict = {'CustomField': CustomField, 'Line': DetailLine, 'LinkedTxn': LinkedTxn}
detail_dict = {}
qbo_object_name = 'RefundReceipt'
def __init__(self):
super(RefundReceipt, self).__init__()
self.DocNumber = ''
self.TotalAmt = 0
self.ApplyTaxAfterDiscount = False
self.PrintStatus = 'NotSet'
self.Balance = 0
self.PaymentRefNum = ''
self.TxnDate = ''
self.ExchangeRate = 1
self.PrivateNote = ''
self.PaymentRefNum = ''
self.PaymentType = ''
self.TxnSource = None
self.GlobalTaxCalculation = 'TaxExcluded'
self.DepartmentRef = None
self.CurrencyRef = None
self.TxnTaxDetail = None
self.DepositToAccountRef = None
self.CustomerRef = None
self.BillAddr = None
self.ShipAddr = None
self.ClassRef = None
self.BillEmail = None
self.PaymentMethodRef = None
self.CheckPayment = None
self.CreditCardPayment = None
self.CustomerMemo = None
self.CustomField = []
self.Line = []
self.LinkedTxn = []
def __str__(self):
return str(self.TotalAmt) |
class CoordinateSystemConverter():
def __init__(self, cs1_direction=['right', 'up', 'forward'], cs2_direction=['right', 'up', 'forward']):
(self.cs1_dir, self.cs1_axis, self.cs1_sign, self.cs1_system) = self._preprocessing(cs1_direction)
(self.cs2_dir, self.cs2_axis, self.cs2_sign, self.cs2_system) = self._preprocessing(cs2_direction)
self.cs1_to_cs2_map = [self.cs1_axis[self.cs2_dir[0]], self.cs1_axis[self.cs2_dir[1]], self.cs1_axis[self.cs2_dir[2]]]
self.cs1_to_cs2_sign = [(self.cs1_sign[self.cs2_dir[0]] * self.cs2_sign[self.cs2_dir[0]]), (self.cs1_sign[self.cs2_dir[1]] * self.cs2_sign[self.cs2_dir[1]]), (self.cs1_sign[self.cs2_dir[2]] * self.cs2_sign[self.cs2_dir[2]])]
self.cs2_to_cs1_map = [self.cs2_axis[self.cs1_dir[0]], self.cs2_axis[self.cs1_dir[1]], self.cs2_axis[self.cs1_dir[2]]]
self.cs2_to_cs1_sign = [(self.cs1_sign[self.cs1_dir[0]] * self.cs2_sign[self.cs1_dir[0]]), (self.cs1_sign[self.cs1_dir[1]] * self.cs2_sign[self.cs1_dir[1]]), (self.cs1_sign[self.cs1_dir[2]] * self.cs2_sign[self.cs1_dir[2]])]
self.cs1_left_or_right = ((self.cs1_sign['right'] * self.cs1_sign['up']) * self.cs1_sign['forward'])
self.cs2_left_or_right = ((self.cs2_sign['right'] * self.cs2_sign['up']) * self.cs2_sign['forward'])
def _preprocessing(self, direction):
dir = ['', '', '']
axis = {}
sign = {}
direction_in_left = [[], [], []]
for i in range(3):
if (direction[i] in ['right', 'r', '-l', '-left']):
dir[i] = 'right'
axis['right'] = i
sign['right'] = 1
direction_in_left[i] = [1, 0, 0]
elif (direction[i] in ['up', 'u', '-d', '-down']):
dir[i] = 'up'
axis['up'] = i
sign['up'] = 1
direction_in_left[i] = [0, 1, 0]
elif (direction[i] in ['forward', 'f', '-b', '-back']):
dir[i] = 'forward'
axis['forward'] = i
sign['forward'] = 1
direction_in_left[i] = [0, 0, 1]
elif (direction[i] in ['-right', '-r', 'l', 'left']):
dir[i] = 'right'
axis['right'] = i
sign['right'] = (- 1)
direction_in_left[i] = [(- 1), 0, 0]
elif (direction[i] in ['-up', '-u', 'd', 'down']):
dir[i] = 'up'
axis['up'] = i
sign['up'] = (- 1)
direction_in_left[i] = [0, (- 1), 0]
elif (direction[i] in ['-forward', '-f', 'b', 'back']):
dir[i] = 'forward'
axis['forward'] = i
sign['forward'] = (- 1)
direction_in_left[i] = [0, 0, (- 1)]
else:
raise f'{direction[i]} is Unrecognized axis'
z = self._cross(direction_in_left[0], direction_in_left[1])
if (z == direction_in_left[2]):
system = 1
else:
system = (- 1)
return (dir, axis, sign, system)
def _cross(self, lhs: list, rhs: list) -> list:
return [((lhs[1] * rhs[2]) - (lhs[2] * rhs[1])), ((lhs[2] * rhs[0]) - (lhs[0] * rhs[2])), ((lhs[0] * rhs[1]) - (lhs[1] * rhs[0]))]
def cs1_pos_to_cs2_pos(self, pos: list) -> list:
x = (pos[self.cs1_to_cs2_map[0]] * self.cs1_to_cs2_sign[0])
y = (pos[self.cs1_to_cs2_map[1]] * self.cs1_to_cs2_sign[1])
z = (pos[self.cs1_to_cs2_map[2]] * self.cs1_to_cs2_sign[2])
return [x, y, z]
def cs2_pos_to_cs1_pos(self, pos: list) -> list:
x = (pos[self.cs2_to_cs1_map[0]] * self.cs2_to_cs1_sign[0])
y = (pos[self.cs2_to_cs1_map[1]] * self.cs2_to_cs1_sign[1])
z = (pos[self.cs2_to_cs1_map[2]] * self.cs2_to_cs1_sign[2])
return [x, y, z]
def cs1_quat_to_cs2_quat(self, quat: list) -> list:
x = (quat[self.cs1_to_cs2_map[0]] * self.cs1_to_cs2_sign[0])
y = (quat[self.cs1_to_cs2_map[1]] * self.cs1_to_cs2_sign[1])
z = (quat[self.cs1_to_cs2_map[2]] * self.cs1_to_cs2_sign[2])
w = ((quat[3] * self.cs1_system) * self.cs2_system)
return [x, y, z, w]
def cs2_quat_to_cs1_quat(self, quat: list) -> list:
x = (quat[self.cs2_to_cs1_map[0]] * self.cs2_to_cs1_sign[0])
y = (quat[self.cs2_to_cs1_map[1]] * self.cs2_to_cs1_sign[1])
z = (quat[self.cs2_to_cs1_map[2]] * self.cs2_to_cs1_sign[2])
w = ((quat[3] * self.cs1_system) * self.cs2_system)
return [x, y, z, w]
def cs1_scale_to_cs2_scale(self, scale: list) -> list:
x = scale[self.cs1_to_cs2_map[0]]
y = scale[self.cs1_to_cs2_map[1]]
z = scale[self.cs1_to_cs2_map[2]]
return [x, y, z]
def cs2_scale_to_cs1_scale(self, scale: list) -> list:
x = scale[self.cs2_to_cs1_map[0]]
y = scale[self.cs2_to_cs1_map[1]]
z = scale[self.cs2_to_cs1_map[2]]
return [x, y, z]
def cs1_matrix_to_cs2_matrix(self, matrix) -> np.ndarray:
quaternion = self.matrix_to_quat(matrix)
quaternion = self.cs1_quat_to_cs2_quat(quaternion)
return self.quat_to_matrix(quaternion)
def cs2_matrix_to_cs1_matrix(self, matrix) -> np.ndarray:
quaternion = self.matrix_to_quat(matrix)
quaternion = self.cs2_quat_to_cs1_quat(quaternion)
return self.quat_to_matrix(quaternion)
def quat_to_matrix(self, quat=[0, 0, 0, 1]) -> np.ndarray:
return np.array([[(1 - (2 * ((quat[1] ** 2) + (quat[2] ** 2)))), (2 * ((quat[0] * quat[1]) - (quat[2] * quat[3]))), (2 * ((quat[0] * quat[2]) + (quat[1] * quat[3])))], [(2 * ((quat[0] * quat[1]) + (quat[2] * quat[3]))), (1 - (2 * ((quat[0] ** 2) + (quat[2] ** 2)))), (2 * ((quat[1] * quat[2]) - (quat[0] * quat[3])))], [(2 * ((quat[0] * quat[2]) - (quat[1] * quat[3]))), (2 * ((quat[1] * quat[2]) + (quat[0] * quat[3]))), (1 - (2 * ((quat[0] ** 2) + (quat[1] ** 2))))]])
def matrix_to_quat(self, matrix) -> list:
m = np.array(matrix, dtype=float)
trace = np.trace(m)
if (trace > 0):
s = (0.5 / np.sqrt((trace + 1.0)))
w = (0.25 / s)
x = ((m[(2, 1)] - m[(1, 2)]) * s)
y = ((m[(0, 2)] - m[(2, 0)]) * s)
z = ((m[(1, 0)] - m[(0, 1)]) * s)
elif ((m[(0, 0)] > m[(1, 1)]) and (m[(0, 0)] > m[(2, 2)])):
s = (2.0 * np.sqrt((((1.0 + m[(0, 0)]) - m[(1, 1)]) - m[(2, 2)])))
w = ((m[(2, 1)] - m[(1, 2)]) / s)
x = (0.25 * s)
y = ((m[(0, 1)] + m[(1, 0)]) / s)
z = ((m[(0, 2)] + m[(2, 0)]) / s)
elif (m[(1, 1)] > m[(2, 2)]):
s = (2.0 * np.sqrt((((1.0 + m[(1, 1)]) - m[(0, 0)]) - m[(2, 2)])))
w = ((m[(0, 2)] - m[(2, 0)]) / s)
x = ((m[(0, 1)] + m[(1, 0)]) / s)
y = (0.25 * s)
z = ((m[(1, 2)] + m[(2, 1)]) / s)
else:
s = (2.0 * np.sqrt((((1.0 + m[(2, 2)]) - m[(0, 0)]) - m[(1, 1)])))
w = ((m[(1, 0)] - m[(0, 1)]) / s)
x = ((m[(0, 2)] + m[(2, 0)]) / s)
y = ((m[(1, 2)] + m[(2, 1)]) / s)
z = (0.25 * s)
return [x, y, z, w] |
class Affine(Bijector):
def __init__(self, params_fn: Optional[flowtorch.Lazy]=None, *, shape: torch.Size, context_shape: Optional[torch.Size]=None, clamp_values: bool=False, log_scale_min_clip: float=(- 5.0), log_scale_max_clip: float=3.0, scale_fn: str='softplus') -> None:
super().__init__(params_fn, shape=shape, context_shape=context_shape)
self.clamp_values = clamp_values
self.log_scale_min_clip = log_scale_min_clip
self.log_scale_max_clip = log_scale_max_clip
self.scale_fn = scale_fn
def _scale_fn(self, unbounded_scale: torch.Tensor) -> Tuple[(torch.Tensor, torch.Tensor)]:
if (self.scale_fn == 'softplus'):
scale = F.softplus(unbounded_scale)
log_scale = torch.log(scale)
elif (self.scale_fn == 'exp'):
scale = torch.exp(unbounded_scale)
log_scale = unbounded_scale
elif (self.scale_fn == 'sigmoid'):
scale = torch.sigmoid(unbounded_scale)
log_scale = F.logsigmoid(unbounded_scale)
else:
raise ValueError(f'Unknown scale function: {self.scale_fn}')
return (scale, log_scale)
def _inv_scale_fn(self, unbounded_scale: torch.Tensor) -> Tuple[(torch.Tensor, torch.Tensor)]:
if (self.scale_fn == 'softplus'):
scale = F.softplus(unbounded_scale)
inverse_scale = F.softplus(unbounded_scale).reciprocal()
log_scale = torch.log(scale)
elif (self.scale_fn == 'exp'):
inverse_scale = torch.exp((- unbounded_scale))
log_scale = unbounded_scale
elif (self.scale_fn == 'sigmoid'):
inverse_scale = (torch.exp((- unbounded_scale)) + 1.0)
log_scale = F.logsigmoid(unbounded_scale)
else:
raise ValueError(f'Unknown scale function: {self.scale_fn}')
return (inverse_scale, log_scale)
def _forward(self, x: torch.Tensor, params: Optional[Sequence[torch.Tensor]]) -> Tuple[(torch.Tensor, torch.Tensor)]:
assert (params is not None)
(mean, unbounded_scale) = params
if self.clamp_values:
unbounded_scale = clamp_preserve_gradients(unbounded_scale, self.log_scale_min_clip, self.log_scale_max_clip)
(scale, log_scale) = self._scale_fn(unbounded_scale)
y = ((scale * x) + mean)
return (y, _sum_rightmost(log_scale, self.domain.event_dim))
def _inverse(self, y: torch.Tensor, params: Optional[Sequence[torch.Tensor]]) -> Tuple[(torch.Tensor, torch.Tensor)]:
assert (params is not None)
(mean, unbounded_scale) = params
if self.clamp_values:
unbounded_scale = clamp_preserve_gradients(unbounded_scale, self.log_scale_min_clip, self.log_scale_max_clip)
(inverse_scale, log_scale) = self._inv_scale_fn(unbounded_scale)
x_new = ((y - mean) * inverse_scale)
return (x_new, _sum_rightmost(log_scale, self.domain.event_dim))
def _log_abs_det_jacobian(self, x: torch.Tensor, y: torch.Tensor, params: Optional[Sequence[torch.Tensor]]) -> torch.Tensor:
assert (params is not None)
(_, unbounded_scale) = params
if self.clamp_values:
unbounded_scale = clamp_preserve_gradients(unbounded_scale, self.log_scale_min_clip, self.log_scale_max_clip)
(_, log_scale) = self._scale_fn(unbounded_scale)
return _sum_rightmost(log_scale, self.domain.event_dim)
def param_shapes(self, shape: torch.Size) -> Tuple[(torch.Size, torch.Size)]:
return (shape, shape) |
def fetch_tokenizer(entity: str, ver: str) -> Tokenizer:
artifact = wandb.Api().artifact(f'{entity}/cleanformer/tokenizer:{ver}', type='other')
artifact_path = tokenizer_dir(ver)
json_path = (artifact_path / 'tokenizer.json')
artifact.download(root=str(artifact_path))
tokenizer = Tokenizer.from_file(str(json_path))
tokenizer.pad_token = artifact.metadata['pad']
tokenizer.pad_token_id = artifact.metadata['pad_id']
tokenizer.unk_token = artifact.metadata['unk']
tokenizer.unk_token_id = artifact.metadata['unk_id']
tokenizer.bos_token = artifact.metadata['bos']
tokenizer.bos_token_id = artifact.metadata['bos_id']
tokenizer.eos_token = artifact.metadata['eos']
tokenizer.eos_token_id = artifact.metadata['eos_id']
return tokenizer |
class TestRecallScore(SimpleClassificationTestTopK):
name = 'Recall Score'
def get_value(self, result: DatasetClassificationQuality):
return result.recall
def get_description(self, value: Numeric) -> str:
return f'The Recall Score is {value:.3g}. The test threshold is {self.get_condition()}' |
class VanillaMultiheadAttention(Module):
def __init__(self, dim, batch_size=(- 1), seq_len=(- 1), num_heads=8, qkv_bias=False, attn_drop=0.0, proj_drop=0.0, has_residual=True, causal=False, attn_mask: Tensor=None, mask_seq=0):
super().__init__()
assert ((dim % num_heads) == 0), f'dim {dim} should be divisible by num_heads {num_heads}'
self.num_heads = num_heads
head_dim = (dim // num_heads)
self.scale = (head_dim ** (- 0.5))
self.causal = causal
self.has_residual = has_residual
self.mask_seq = mask_seq
if causal:
assert (attn_mask is not None), f'Missing attn_mask=Tensor(shape=1x{seq_len}x{seq_len})'
self.op = partial(vanilla_attention, attn_mask=attn_mask)
else:
self.op = vanilla_attention
if self.mask_seq:
self.output_mask = Parameter(shape=[mask_seq, num_heads, head_dim], dtype='float16')
self.qkv = Linear(dim, (dim * 3), bias=qkv_bias)
self.attn_drop = Dropout(attn_drop)
self.proj = Linear(dim, dim, specialization=('add' if has_residual else None))
self.proj_drop = Dropout(proj_drop)
def get_shape(self, x):
return _get_shape(x)
def attention(self, x):
(b, seqlen, d) = self.get_shape(x)
hidden = (d // 3)
x = ops.reshape()(x, [(- 1), 3, hidden])
(q, k, v) = ops.split()(x, 1, dim=1)
return self.op(ops.reshape()(q, [b, seqlen, self.num_heads, (hidden // self.num_heads)]), ops.reshape()(k, [b, seqlen, self.num_heads, (hidden // self.num_heads)]), ops.reshape()(v, [b, seqlen, self.num_heads, (hidden // self.num_heads)]), self.scale)
def forward(self, *args):
assert (len(args) >= 1)
x = args[0]
(batch, seq, hidden) = self.get_shape(x)
qkv = self.qkv(x)
if self.mask_seq:
total = self.get_shape(qkv)[0]
qkv = ops.dynamic_slice()(qkv, start_indices=[0, 0, 0, 0], end_indices=[(total - self.mask_seq), None, None, None])
attn_output = self.attention(qkv)
if self.mask_seq:
attn_output = ops.concatenate()([attn_output, self.output_mask.tensor()], dim=0)
attn_output = ops.reshape()(attn_output, [(batch * seq), (- 1)])
if self.has_residual:
assert (len(args) == 2)
x = self.proj(attn_output, args[1])
else:
x = self.proj(attn_output)
x = self.proj_drop(x)
x = ops.reshape()(x, [batch, seq, hidden])
return x |
def test_upass(testdir):
testdir.makepyfile("\n import pytest\n\n .xfail\n def test_unexpected_success():\n assert str(True) == 'True'\n ")
result = testdir.runpytest('-v')
result.stdout.fnmatch_lines(['test_upass.py::test_unexpected_success ... ok'])
assert (result.ret == 0) |
def test_extruded_mixed_fs_misses_cache():
m = UnitSquareMesh(1, 1)
e = ExtrudedMesh(m, 2, layer_height=1)
U0 = FiniteElement('DG', 'triangle', 0)
U1 = FiniteElement('CG', 'interval', 2)
V0 = TensorProductElement(U0, U1)
V1 = FunctionSpace(e, HDiv(V0))
U0 = FiniteElement('CG', 'triangle', 1)
U1 = FiniteElement('DG', 'interval', 2)
V0 = TensorProductElement(U0, U1)
V2 = FunctionSpace(e, HCurl(V0))
W1 = (V1 * V2)
W2 = (V2 * V1)
assert (W1 != W2) |
()
('input_dir', type=click.Path(exists=True, file_okay=False))
('output_dir', type=click.Path(exists=False, file_okay=False))
('--recursive/--no-recursive', default=True, help='Search recursively')
('--overwrite/--no-overwrite', default=False, help='Overwrite existing files')
('--clean/--no-clean', default=False, help='Clean output directory before processing')
('--track', '-t', multiple=True, help='Name of track to keep', default=['vocals'])
('--model', help='Name of model to use', default='htdemucs')
('--shifts', help='Number of shifts, improves separation quality a bit', default=1)
('--num_workers_per_gpu', help='Number of workers per GPU', default=2)
def separate(input_dir: str, output_dir: str, recursive: bool, overwrite: bool, clean: bool, track: list[str], model: str, shifts: int, num_workers_per_gpu: int):
(input_dir, output_dir) = (Path(input_dir), Path(output_dir))
if ((input_dir == output_dir) and clean):
logger.error('You are trying to clean the input directory, aborting')
return
make_dirs(output_dir, clean)
base_args = (input_dir, output_dir, recursive, overwrite, track, model, shifts)
import torch
if (torch.cuda.is_available() and (torch.cuda.device_count() >= 1)):
logger.info(f"Device has {torch.cuda.device_count()} GPUs, let's use them!")
mp.set_start_method('spawn')
processes = []
shards = (torch.cuda.device_count() * num_workers_per_gpu)
for shard_idx in range(shards):
p = mp.Process(target=worker, args=(*base_args, torch.device(f'cuda:{(shard_idx % torch.cuda.device_count())}'), shard_idx, shards))
p.start()
processes.append(p)
for p in processes:
p.join()
return
worker(*base_args, torch.device(('cuda' if torch.cuda.is_available() else 'cpu'))) |
class OptionSeriesWaterfallDataDragdropDraghandle(Options):
def className(self):
return self._config_get('highcharts-drag-handle')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#fff')
def color(self, text: str):
self._config(text, js_type=False)
def lineColor(self):
return self._config_get('rgba(0, 0, 0, 0.6)')
def lineColor(self, text: str):
self._config(text, js_type=False)
def lineWidth(self):
return self._config_get(1)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def zIndex(self):
return self._config_get(901)
def zIndex(self, num: float):
self._config(num, js_type=False) |
class TlsCertificatesResponseAllOf(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'data': ([TlsCertificateResponseData],)}
_property
def discriminator():
return None
attribute_map = {'data': 'data'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
class UIWindow(HasPrivateTraits):
control = Instance(wx.Window)
size = Instance(wx.Size, ((- 1), (- 1)))
width = Property()
height = Property()
def __init__(self, parent, **traits):
super().__init__(**traits)
self.control = wx.Window(parent, (- 1), size=self.size, style=wx.FULL_REPAINT_ON_RESIZE)
init_wx_handlers(self.control, self)
def refresh(self, x=None, y=None, dx=None, dy=None):
if (self.control is not None):
if (x is None):
self.control.Refresh()
else:
self.control.Refresh(x, y, dx, dy)
def capture(self):
self.control.CaptureMouse()
def release(self):
self.control.ReleaseMouse()
def _erase_background(self, event):
pass
def _paint(self, event):
dc = BufferDC(self.control)
self._paint_dc(dc)
dc.copy()
def _paint_dc(self, dc):
pass
def _get_width(self):
return self.control.GetClientSize()[0]
def _set_width(self, width):
self.control.SetSize(width, self.height)
def _get_height(self):
return self.control.GetClientSize()[1]
def _set_height(self, height):
self.control.SetSize(self.width, height) |
class HTML_Text_Extractor(HTMLParser):
def __init__(self):
super().__init__()
self.reset()
self.strict = False
self.convert_charrefs = True
self.text = ''
self.processing = False
def handle_starttag(self, tag, attrs):
if (tag == 'body'):
self.processing = True
if (not self.processing):
return
if (tag == 'br'):
self.text += '\n'
def handle_endtag(self, tag):
if (tag == 'body'):
self.processing = False
if (not self.processing):
return
if (tag in ('p', 'div')):
self.text += '\n'
def handle_data(self, data):
if self.processing:
self.text += data |
class FaucetUntaggedACLOutputMirrorTest(FaucetUntaggedTest):
CONFIG_GLOBAL = '\nvlans:\n 100:\n description: "untagged"\n unicast_flood: False\nacls:\n 1:\n - rule:\n actions:\n allow: 1\n output:\n ports: [%(port_3)d]\n'
CONFIG = '\n interfaces:\n %(port_1)d:\n native_vlan: 100\n acl_in: 1\n %(port_2)d:\n native_vlan: 100\n acl_in: 1\n %(port_3)d:\n native_vlan: 100\n %(port_4)d:\n native_vlan: 100\n'
def test_untagged(self):
(first_host, second_host, mirror_host) = self.hosts_name_ordered()[0:3]
self.verify_ping_mirrored(first_host, second_host, mirror_host) |
def send_venue(token, chat_id, latitude, longitude, title, address, foursquare_id=None, foursquare_type=None, disable_notification=None, reply_to_message_id=None, reply_markup=None, timeout=None, allow_sending_without_reply=None, google_place_id=None, google_place_type=None, protect_content=None, message_thread_id=None):
method_url = 'sendVenue'
payload = {'chat_id': chat_id, 'latitude': latitude, 'longitude': longitude, 'title': title, 'address': address}
if foursquare_id:
payload['foursquare_id'] = foursquare_id
if foursquare_type:
payload['foursquare_type'] = foursquare_type
if (disable_notification is not None):
payload['disable_notification'] = disable_notification
if reply_to_message_id:
payload['reply_to_message_id'] = reply_to_message_id
if reply_markup:
payload['reply_markup'] = _convert_markup(reply_markup)
if timeout:
payload['timeout'] = timeout
if (allow_sending_without_reply is not None):
payload['allow_sending_without_reply'] = allow_sending_without_reply
if google_place_id:
payload['google_place_id'] = google_place_id
if google_place_type:
payload['google_place_type'] = google_place_type
if (protect_content is not None):
payload['protect_content'] = protect_content
if (message_thread_id is not None):
payload['message_thread_id'] = message_thread_id
return _make_request(token, method_url, params=payload) |
(scope='function')
def second_privacy_request_awaiting_consent_email_send(db: Session, consent_policy: Policy) -> PrivacyRequest:
privacy_request = _create_privacy_request_for_policy(db, consent_policy)
privacy_request.status = PrivacyRequestStatus.awaiting_email_send
privacy_request.save(db)
(yield privacy_request)
privacy_request.delete(db) |
def get_number_of_unique_terms_for_accounts(filter_query: ES_Q, field: str, is_nested: bool=True) -> int:
search = AccountSearch().filter(filter_query)
cardinality_aggregation = A('cardinality', field=field, precision_threshold=11000)
if is_nested:
nested_agg = A('nested', path='financial_accounts_by_award')
nested_agg.metric('field_count', cardinality_aggregation)
search.aggs.metric('financial_account_agg', nested_agg)
else:
search.aggs.metric('financial_account_agg', cardinality_aggregation)
response = search.handle_execute()
response_dict = response.aggs.to_dict()
return response_dict.get('financial_account_agg', {}).get('field_count', {'value': 0})['value'] |
def main():
try:
html = get_instagram_html(INSTAGRAM_USERNAME)
if (os.environ.get('LAST_IMAGE_ID') == get_last_publication_url(html)):
print('Not new image to post in discord.')
else:
os.environ['LAST_IMAGE_ID'] = get_last_publication_url(html)
print('New image to post in discord.')
webhook(os.environ.get('WEBHOOK_URL'), get_instagram_html(INSTAGRAM_USERNAME))
except Exception as e:
print(e) |
def ModelFactory(name, RangeFactory):
class ModelWithRanges(HasTraits):
percentage = RangeFactory(0, 100)
open_closed = RangeFactory(0, 100, exclude_low=True)
closed_open = RangeFactory(0, 100, exclude_high=True)
open = RangeFactory(0, 100, exclude_low=True, exclude_high=True)
closed = RangeFactory(0, 100)
steam_temperature = RangeFactory(low=100)
ice_temperature = RangeFactory(high=0)
ModelWithRanges.__name__ = name
return ModelWithRanges |
class TestAusTasKi(unittest.TestCase):
def setUp(self):
self.session = Session()
self.adapter = Adapter()
self.session.mount(' self.adapter)
def test_parsing_payload(self):
filename = 'parsers/test/mocks/AU/AU_TAS_FI_payload1.json'
with open(filename) as f:
fake_data = json.load(f)
with patch('parsers.ajenti.SignalR.get_value') as f:
f.return_value = fake_data
data = ajenti.fetch_production()
self.assertIsNotNone(data['production'])
self.assertEqual(data['production']['wind'], 0.595)
self.assertEqual(data['production']['solar'], 0.004)
self.assertEqual(data['production']['oil'], 0.283)
self.assertEqual(data['production']['biomass'], 0) |
def getdirinfo(cid):
pageitems = {'0': 25, '1': 50, '2': 100}
pageitem = pageitems[plugin.get_setting('pageitem')]
offset = 0
data = getfilelistdata(cid, offset, '0', '0', searchstr='0', nf='1')
dirinfo = {}
dirinfo['state'] = data['state']
if data['state']:
dirinfo['path'] = []
for item in data['path']:
if (item['cid'] == 0):
dirinfo['path'].append((0, 'ROOT'))
else:
dirinfo['path'].append((item['cid'], item['name']))
dirinfo['subdirs'] = []
for item in data['data']:
dirinfo['subdirs'].append((item['cid'], item['n']))
offset += pageitem
while (data['count'] > offset):
data = getfilelistdata(cid, offset, '0', '0', searchstr='0', nf='1')
offset += pageitem
if data['state']:
for item in data['data']:
dirinfo['subdirs'].append((item['cid'], item['n']))
else:
break
return dirinfo |
def create_symbol_from_spim_symbol(segment: 'Segment', context_sym: spimdisasm.common.ContextSymbol) -> 'Symbol':
in_segment = False
sym_type = None
if (context_sym.type == spimdisasm.common.SymbolSpecialType.jumptable):
in_segment = True
sym_type = 'jtbl'
elif (context_sym.type == spimdisasm.common.SymbolSpecialType.function):
sym_type = 'func'
elif (context_sym.type == spimdisasm.common.SymbolSpecialType.branchlabel):
in_segment = True
sym_type = 'label'
elif (context_sym.type == spimdisasm.common.SymbolSpecialType.jumptablelabel):
in_segment = True
sym_type = 'jtbl_label'
if (not in_segment):
if ((context_sym.overlayCategory is None) and (segment.get_exclusive_ram_id() is None)):
in_segment = segment.contains_vram(context_sym.vram)
elif (context_sym.overlayCategory == segment.get_exclusive_ram_id()):
if (context_sym.vromAddress is not None):
in_segment = segment.contains_rom(context_sym.vromAddress)
else:
in_segment = segment.contains_vram(context_sym.vram)
sym = segment.create_symbol(context_sym.vram, in_segment, type=sym_type, reference=True)
if ((sym.given_name is None) and (context_sym.name is not None)):
sym.given_name = context_sym.name
context_sym.setNameGetCallback((lambda _: sym.name))
if (context_sym.size is not None):
sym.given_size = context_sym.getSize()
if (context_sym.vromAddress is not None):
sym.rom = context_sym.getVrom()
if context_sym.isDefined:
sym.defined = True
if (context_sym.referenceCounter > 0):
sym.referenced = True
return sym |
class LuaLexer(RegexLexer):
name = 'Lua'
aliases = ['lua']
filenames = ['*.lua', '*.wlua']
mimetypes = ['text/x-lua', 'application/x-lua']
tokens = {'root': [('#!(.*?)$', Comment.Preproc), default('base')], 'base': [('(?s)--\\[(=*)\\[.*?\\]\\1\\]', Comment.Multiline), ('--.*$', Comment.Single), ('(?i)(\\d*\\.\\d+|\\d+\\.\\d*)(e[+-]?\\d+)?', Number.Float), ('(?i)\\d+e[+-]?\\d+', Number.Float), ('(?i)0x[0-9a-f]*', Number.Hex), ('\\d+', Number.Integer), ('\\n', Text), ('[^\\S\\n]', Text), ('(?s)\\[(=*)\\[.*?\\]\\1\\]', String), ('(==|~=|<=|>=|\\.\\.\\.|\\.\\.|[=+\\-*/%^<>#])', Operator), ('[\\[\\]{}().,:;]', Punctuation), ('(and|or|not)\\b', Operator.Word), ('(break|do|else|elseif|end|for|if|in|repeat|return|then|until|while)\\b', Keyword), ('(local)\\b', Keyword.Declaration), ('(true|false|nil)\\b', Keyword.Constant), ('(function)\\b', Keyword, 'funcname'), ('[A-Za-z_]\\w*(\\.[A-Za-z_]\\w*)?', Name), ("'", String.Single, combined('stringescape', 'sqs')), ('"', String.Double, combined('stringescape', 'dqs'))], 'funcname': [('\\s+', Text), ('(?:([A-Za-z_]\\w*)(\\.))?([A-Za-z_]\\w*)', bygroups(Name.Class, Punctuation, Name.Function), '#pop'), ('\\(', Punctuation, '#pop')], 'string': [('.', String)], 'stringescape': [('\\\\([abfnrtv\\\\"\']|\\d{1,3})', String.Escape)], 'sqs': [("'", String, '#pop'), include('string')], 'dqs': [('"', String, '#pop'), include('string')]}
def __init__(self, **options):
self.func_name_highlighting = get_bool_opt(options, 'func_name_highlighting', True)
self.disabled_modules = get_list_opt(options, 'disabled_modules', [])
self._functions = set()
if self.func_name_highlighting:
from .pygments.lexers._lua_builtins import MODULES
for (mod, func) in iteritems(MODULES):
if (mod not in self.disabled_modules):
self._functions.update(func)
RegexLexer.__init__(self, **options)
def get_tokens_unprocessed(self, text):
for (index, token, value) in RegexLexer.get_tokens_unprocessed(self, text):
if (token is Name):
if (value in self._functions):
(yield (index, Name.Builtin, value))
continue
elif ('.' in value):
(a, b) = value.split('.')
(yield (index, Name, a))
(yield ((index + len(a)), Punctuation, u'.'))
(yield (((index + len(a)) + 1), Name, b))
continue
(yield (index, token, value)) |
class OptionPlotoptionsPackedbubbleSonificationContexttracksActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp(self, num: float):
self._config(num, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def prop(self):
return self._config_get(None)
def prop(self, text: str):
self._config(text, js_type=False) |
class Solution():
def numberOfArithmeticSlices(self, A: List[int]) -> int:
ret = 0
prev = None
diff = None
clen = 0
for a in A:
if (prev is None):
prev = a
clen = 1
continue
if (diff is None):
diff = (a - prev)
prev = a
clen = 2
continue
if ((a - prev) == diff):
clen += 1
prev = a
else:
if (clen >= 3):
ret += (((clen - 1) * (clen - 2)) // 2)
diff = (a - prev)
clen = 2
prev = a
if (clen >= 3):
ret += (((clen - 1) * (clen - 2)) // 2)
return ret |
class A98RGBLinear(sRGB):
BASE = 'xyz-d65'
NAME = 'a98-rgb-linear'
SERIALIZE = ('--a98-rgb-linear',)
WHITE = WHITES['2deg']['D65']
def to_base(self, coords: Vector) -> Vector:
return lin_a98rgb_to_xyz(coords)
def from_base(self, coords: Vector) -> Vector:
return xyz_to_lin_a98rgb(coords) |
class Wapiti(BaseScanner):
def init(self, argv):
self.wapiti_bin = None
self.wapiti_cmd = 'wapiti'
try:
(opts, args) = getopt.getopt(argv, 'hx:')
except getopt.GetoptError as err:
print(str(err))
self.exit(1)
for (o, v) in opts:
if (o == '-h'):
self.usage()
self.exit(0)
elif (o == '-x'):
self.wapiti_bin = v
if self.wapiti_bin:
self.wapiti_cmd = os.path.join(self.wapiti_bin, self.wapiti_cmd)
try:
self.utils.execmd(self.wapiti_cmd)
except:
print(('Wapiti executable not found %s' % self.wapiti_cmd))
self.exit(1)
def get_settings(self):
return dict(request_types='xhr,link,form,jsonp,redirect,fetch', num_threads=10)
def usage(self):
print("htcap wapiti module\nusage: scan wapiti <db_file> [options]\nOptions are:\n -h this help\n -x PATH set wapiti bin dir (by default is's supposed to be in $PATH)")
class Scan(ScannerThread):
def run(self):
request = self.request
tmp_dir = self.tmp_dir
url = request.url
if ((request.method == 'POST') and request.data):
url += ('?' + request.data)
out_file = (tmp_dir + '/output.json')
cookie_file = (tmp_dir + '/cookies.json')
with open(cookie_file, 'w') as cf:
jsn = self.convert_cookies(request.cookies)
cf.write(jsn)
cmd = ['--url', url, '--timeout', '30', '--module', 'xss:get', '--scope', 'page', '--format', 'json', '--output', out_file, '--verify-ssl', '0']
if self.request.referer:
cmd.extend(('--header', ('Referer:%s' % self.request.referer)))
if self.scanner.proxy:
proto = self.scanner.proxy['proto']
if proto.startswith('socks'):
proto = 'socks'
cmd.extend(('--proxy', ('%s://%s:%s/' % (proto, self.scanner.proxy['host'], self.scanner.proxy['port']))))
extra_headers = []
for hn in self.request.extra_headers:
if (hn not in self.scanner.extra_headers):
extra_headers.append(((hn + ':') + self.request.extra_headers[hn]))
for hn in self.scanner.extra_headers:
extra_headers.append(((hn + ':') + self.scanner.extra_headers[hn]))
for header in extra_headers:
cmd.extend(('--header', header))
if (len(request.cookies) > 0):
cmd.extend(('--cookie', cookie_file))
out = None
try:
cmd_out = self.utils.execmd(self.scanner.wapiti_cmd, cmd)
out = cmd_out['out']
except Exception as e:
self.sprint(e)
if (not os.path.exists(out_file)):
return
with open(out_file, 'r') as fil:
jsn = fil.read()
report = []
try:
report = json.loads(jsn)['vulnerabilities']['Cross Site Scripting']
except Exception as e:
print(err)
for vuln in report:
self.save_vulnerabilities([{'type': 'xss_reflected', 'description': json.dumps(vuln)}])
def convert_cookies(self, cookies):
wcookies = {}
for cookie in cookies:
domain = cookie.domain
if domain:
if (not domain.startswith('.')):
domain = ('.%s' % domain)
elif cookie.setter:
domain = cookie.setter.hostname
if (not (domain in list(wcookies.keys()))):
wcookies[domain] = {}
if (not (cookie.path in list(wcookies[domain].keys()))):
wcookies[domain][cookie.path] = {}
wcookies[domain][cookie.path][cookie.name] = dict(version=0, expires=cookie.expires, secure=cookie.secure, value=cookie.value, port=None)
return json.dumps(wcookies) |
class TestUnpackList(unittest.TestCase):
def test_simple(self):
def deserializer(reader):
(length,) = reader.peek('!B')
return reader.read(('!%ds' % length))[0]
reader = loxi.generic_util.OFReader(b'\x04abc\x03de\x02f\x01')
a = loxi.generic_util.unpack_list(reader, deserializer)
self.assertEqual([b'\x04abc', b'\x03de', b'\x02f', b'\x01'], a) |
def test_database_urls_only_backfills_none_parts():
with mock.patch.dict(os.environ, {ENV_CODE_VAR: _UnitTestDbPartsNoneConfig.ENV_CODE, 'DATABASE_URL': 'postgres://dummy::12345/fresh_new_db_name', 'DATA_BROKER_DATABASE_URL': 'postgres://broker:-foobar:54321/fresh_new_db_name_broker'}, clear=True):
cfg = _UnitTestDbPartsNoneConfig(_env_file=None)
assert (cfg.DATABASE_URL is not None)
assert (cfg.USASPENDING_DB_HOST is not None)
assert (cfg.USASPENDING_DB_PORT is not None)
assert (cfg.USASPENDING_DB_NAME is not None)
assert (cfg.USASPENDING_DB_USER is not None)
assert (cfg.USASPENDING_DB_PASSWORD is not None)
assert (cfg.USASPENDING_DB_HOST == 'foobar')
assert (cfg.USASPENDING_DB_PORT == '12345')
assert (cfg.USASPENDING_DB_NAME == 'fresh_new_db_name')
assert (cfg.USASPENDING_DB_USER == 'dummy')
assert (cfg.USASPENDING_DB_PASSWORD.get_secret_value() == 'pwd')
assert (cfg.DATA_BROKER_DATABASE_URL is not None)
assert (cfg.BROKER_DB_HOST is not None)
assert (cfg.BROKER_DB_PORT is not None)
assert (cfg.BROKER_DB_NAME is not None)
assert (cfg.BROKER_DB_USER is not None)
assert (cfg.BROKER_DB_PASSWORD is not None)
assert (cfg.BROKER_DB_HOST == 'broker-foobar')
assert (cfg.BROKER_DB_PORT == '54321')
assert (cfg.BROKER_DB_NAME == 'fresh_new_db_name_broker')
assert (cfg.BROKER_DB_USER == 'broker')
assert (cfg.BROKER_DB_PASSWORD.get_secret_value() == 'pass') |
def write_grid_property(name, grid, filename, file_format, shape, buffer):
arr = np.ndarray(shape=shape, buffer=buffer, dtype=buffer.dtype)
prop = xtgeo.GridProperty(ncol=shape[0], nrow=shape[1], nlay=shape[2], values=arr, grid=grid, name=name)
prop.to_file(filename, fformat=file_format)
return arr |
class Settings():
def __init__(self):
self.screen_width = 1200
self.screen_height = 800
self.bg_color = (230, 230, 230)
self.ship_speed = 1.5
self.ship_limit = 3
self.bullet_speed = 2.5
self.bullet_width = 3
self.bullet_height = 15
self.bullet_color = (60, 60, 60)
self.bullets_allowed = 3
self.alien_speed = 1.0
self.fleet_drop_speed = 10
self.fleet_direction = 1 |
def get_available_plugin() -> List[str]:
skip = ['.isort.cfg']
skip.extend(test_alone_plugins)
ps = [{'dir_name': x, 'path': x} for x in sorted(os.listdir(os.path.join(BASE, 'plugins'))) if (x not in skip)]
plugins = [p['path'] for p in ps]
random.shuffle(plugins)
groups_of_plugins = [','.join(w) for w in list(chunk(plugins, 4))]
groups_of_plugins.extend(test_alone_plugins)
return groups_of_plugins |
(no_gui_test_assistant, 'No GuiTestAssistant')
class TestGetColor(unittest.TestCase, GuiTestAssistant):
(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_close(self):
tester = ModalDialogTester((lambda : get_color(None, 'rebeccapurple')))
tester.open_and_wait(when_opened=(lambda x: x.close(accept=False)))
self.assertEqual(tester.result, None)
(no_modal_dialog_tester, 'ModalDialogTester unavailable')
def test_close_show_alpha(self):
tester = ModalDialogTester((lambda : get_color(None, 'rebeccapurple', True)))
tester.open_and_wait(when_opened=(lambda x: x.close(accept=False)))
self.assertEqual(tester.result, None) |
class ERC20TransferFromBenchmark(BaseERC20Benchmark):
def __init__(self) -> None:
super().__init__()
def name(self) -> str:
return 'ERC20 TransferFrom'
def _setup_benchmark(self, chain: MiningChain) -> None:
self._next_nonce = None
(txn, callback) = self._deploy_simple_token(chain)
(_, receipts, computations) = chain.mine_all([txn])
assert (len(receipts) == 1)
assert (len(computations) == 1)
callback(receipts[0], computations[0])
actions = [self._erc_transfer(self.addr1, chain, nonce=1), self._erc_approve(self.addr2, chain, nonce=2)]
(transactions, callbacks) = zip(*actions)
(_, receipts, computations) = chain.mine_all(transactions)
for (callback, receipt, computation) in zip(callbacks, receipts, computations):
callback(receipt, computation)
def _next_transaction(self, chain: MiningChain) -> None:
txn_info = self._erc_transfer_from(self.addr1, self.addr2, chain, self._next_nonce)
txn = txn_info[0]
self._next_nonce = (txn.nonce + 1)
return txn_info |
class CustomDialog(QDialog):
INVALID_COLOR = QColor(255, 235, 235)
def __init__(self, title: str='Title', description: str='Description', parent: Optional[QWidget]=None) -> None:
QDialog.__init__(self, parent)
self._option_list: List[QWidget] = []
self.setModal(True)
self.setWindowTitle(title)
self._layout = QFormLayout()
self._layout.setFieldGrowthPolicy(QFormLayout.FieldGrowthPolicy.ExpandingFieldsGrow)
self._layout.setSizeConstraint(QLayout.SizeConstraint.SetFixedSize)
label = QLabel(description)
label.setAlignment(Qt.AlignmentFlag.AlignHCenter)
self._layout.addRow(self.createSpace(5))
self._layout.addRow(label)
self._layout.addRow(self.createSpace(10))
self.ok_button = None
self.setLayout(self._layout)
def notValid(self, msg: str) -> None:
if self.ok_button:
self.ok_button.setEnabled(False)
def valid(self) -> None:
if self.ok_button:
self.ok_button.setEnabled(True)
def optionValidationChanged(self) -> None:
valid = True
for option in self._option_list:
if (hasattr(option, 'isValid') and (not option.isValid())):
valid = False
self.notValid('One or more options are incorrectly set!')
if valid:
self.valid()
def showAndTell(self) -> int:
self.optionValidationChanged()
return self.exec_()
def createSpace(self, size: int=5) -> QWidget:
qw = QWidget()
qw.setMinimumSize(QSize(size, size))
return qw
def addLabeledOption(self, label: Any, option_widget: QWidget) -> None:
self._option_list.append(option_widget)
if hasattr(option_widget, 'validationChanged'):
option_widget.validationChanged.connect(self.optionValidationChanged)
if hasattr(option_widget, 'getValidationSupport'):
validation_support = option_widget.getValidationSupport()
validation_support.validationChanged.connect(self.optionValidationChanged)
self._layout.addRow(f'{label}:', option_widget)
def addWidget(self, widget: Union[(QWidget, QLayout, None)], label: str=''):
if (not label.endswith(':')):
label = f'{label}:'
self._layout.addRow(label, widget)
def addButtons(self) -> None:
buttons = QDialogButtonBox((QDialogButtonBox.Ok | QDialogButtonBox.Cancel), Qt.Orientation.Horizontal, self)
self.ok_button = buttons.button(QDialogButtonBox.Ok)
if self.ok_button:
self.ok_button.setEnabled(False)
self._layout.addRow(self.createSpace(10))
self._layout.addRow(buttons)
buttons.accepted.connect(self.accept)
buttons.rejected.connect(self.reject) |
def get_epoch_for_url(url, netloc=None):
if (not netloc):
netloc = urllib.parse.urlsplit(url).netloc
interval = get_interval_for_netloc(netloc)
assert netloc
if (interval == 0):
return (2 ** 30)
return common.util.rewalk_epoch.get_epoch_from_netloc_interval(netloc, interval) |
class ESP32H2BETA2StubLoader(ESP32H2BETA2ROM):
FLASH_WRITE_SIZE = 16384
STATUS_BYTES_LENGTH = 2
IS_STUB = True
def __init__(self, rom_loader):
self.secure_download_mode = rom_loader.secure_download_mode
self._port = rom_loader._port
self._trace_enabled = rom_loader._trace_enabled
self.cache = rom_loader.cache
self.flush_input() |
.django_db(transaction=True)
def test_download_awards_with_all_prime_awards(client, _award_download_data):
download_generation.retrieve_db_string = Mock(return_value=get_database_dsn_string())
filters = {'agency': 'all', 'prime_award_types': list(award_type_mapping.keys()), 'date_type': 'action_date', 'date_range': {'start_date': '2016-10-01', 'end_date': '2017-09-30'}}
dl_resp = client.post('/api/v2/bulk_download/awards', content_type='application/json', data=json.dumps({'filters': filters, 'columns': []}))
assert (dl_resp.status_code == status.HTTP_200_OK)
resp = client.get('/api/v2/download/status/?file_name={}'.format(dl_resp.json()['file_name']))
assert (resp.status_code == status.HTTP_200_OK)
assert (resp.json()['total_rows'] == 6)
assert (resp.json()['total_columns'] == 407) |
def add_MsgServicer_to_server(servicer, server):
rpc_method_handlers = {'Unjail': grpc.unary_unary_rpc_method_handler(servicer.Unjail, request_deserializer=cosmos_dot_slashing_dot_v1beta1_dot_tx__pb2.MsgUnjail.FromString, response_serializer=cosmos_dot_slashing_dot_v1beta1_dot_tx__pb2.MsgUnjailResponse.SerializeToString)}
generic_handler = grpc.method_handlers_generic_handler('cosmos.slashing.v1beta1.Msg', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,)) |
def test_definitions_to_json_schema():
definitions = typesystem.Definitions()
artist = typesystem.Schema(fields={'name': typesystem.String(max_length=100)})
album = typesystem.Schema(fields={'title': typesystem.String(max_length=100), 'release_date': typesystem.Date(), 'artist': typesystem.Reference(to='Artist', definitions=definitions)})
definitions['Artist'] = artist
definitions['Album'] = album
schema = typesystem.to_json_schema(definitions)
assert (schema == {'components': {'schemas': {'Artist': {'type': 'object', 'properties': {'name': {'type': 'string', 'minLength': 1, 'maxLength': 100}}, 'required': ['name']}, 'Album': {'type': 'object', 'properties': {'title': {'type': 'string', 'minLength': 1, 'maxLength': 100}, 'release_date': {'type': 'string', 'minLength': 1, 'format': 'date'}, 'artist': {'$ref': '#/components/schemas/Artist'}}, 'required': ['title', 'release_date', 'artist']}}}}) |
class Dealer():
def __init__(self, **deck_kwargs):
self.deck = Deck(**deck_kwargs)
def deal_card(self) -> Card:
return self.deck.pick(random=True)
def deal_private_cards(self, players: List[Player]):
for _ in range(2):
for player in players:
card: Card = self.deal_card()
player.add_private_card(card)
def deal_community_cards(self, table: PokerTable, n_cards: int):
if (n_cards <= 0):
raise ValueError(f'Positive n of cards must be specified, but got {n_cards}')
for _ in range(n_cards):
card: Card = self.deal_card()
table.add_community_card(card)
def deal_flop(self, table: PokerTable):
return self.deal_community_cards(table, 3)
def deal_turn(self, table: PokerTable):
return self.deal_community_cards(table, 1)
def deal_river(self, table: PokerTable):
return self.deal_community_cards(table, 1) |
def publish(message: 'base.BodhiMessage', force: bool=False):
if force:
_publish_with_retry(message)
return
session = Session()
if ('messages' not in session.info):
session.info['messages'] = []
session.info['messages'].append(message)
_log.debug('Queuing message %r for delivery on session commit', message.id) |
def test_right_to_left(mesh, DGDPC0, W):
velocity = as_vector(((- 1.0), 0.0, 0.0))
u0 = project(velocity, W)
xs = SpatialCoordinate(mesh)
inflowexpr = conditional(And((real(xs[1]) > 0.25), (real(xs[1]) < 0.75)), 1.0, 0.5)
inflow = Function(DGDPC0)
inflow.interpolate(inflowexpr)
n = FacetNormal(mesh)
un = (0.5 * (dot(u0, n) + abs(dot(u0, n))))
D = TrialFunction(DGDPC0)
phi = TestFunction(DGDPC0)
a1 = ((- inner(D, dot(u0, grad(phi)))) * dx)
a2 = (inner(((un('+') * D('+')) - (un('-') * D('-'))), jump(phi)) * dS_v)
a3 = (inner((un * D), phi) * ds_v(1))
a = ((a1 + a2) + a3)
L = ((- inner((inflow * dot(u0, n)), phi)) * ds_v(2))
out = Function(DGDPC0)
solve((a == L), out)
assert (max(abs((out.dat.data - inflow.dat.data))) < 1e-07) |
def to_string(x, quote_string=True):
if isinstance(x, string_types):
if quote_string:
return ("'%s'" % x)
else:
return ('%s' % x)
elif isinstance(x, bool):
return str(x).lower()
elif isinstance(x, (int, float)):
return str(x)
elif isinstance(x, NoneType):
return 'null'
elif isinstance(x, dict):
return to_string(','.join((('%s:%s' % (to_string(k, quote_string=False), to_string(v, quote_string=False))) for (k, v) in iteritems(x))))
elif isinstance(x, (list, tuple)):
return to_string(','.join((to_string(i, quote_string=False) for i in x)))
return to_string(str(x)) |
class thread_credits(threading.Thread):
def run(self):
global print_lock
global cursor_x, cursor_y
credit_x = 0
i = 0
length = len(credits)
last_credits = ['']
startTime = time.time()
for ch in credits:
currentTime = (startTime + ((174.0 / length) * i))
i += 1
if (ch == '\n'):
credit_x = 0
last_credits.append('')
if (len(last_credits) > credits_height):
last_credits = last_credits[(- credits_height):]
print_lock.acquire()
if is_draw_end:
print_lock.release()
break
for y in range(2, ((2 + credits_height) - len(last_credits))):
move(credits_pos_x, y, False, False)
print((' ' * credits_width), end='')
for k in range(len(last_credits)):
y = (((2 + credits_height) - len(last_credits)) + k)
move(credits_pos_x, y, False, False)
print(last_credits[k], end='')
print((' ' * (credits_width - len(last_credits[k]))), end='')
move(cursor_x, cursor_y, False, False)
print_lock.release()
else:
last_credits[(- 1)] += ch
print_lock.acquire()
if is_draw_end:
print_lock.release()
break
move((credits_pos_x + credit_x), (credits_height + 1), False, False)
print(ch, end='')
move(cursor_x, cursor_y, False, False)
print_lock.release()
credit_x += 1
while (time.time() < currentTime):
time.sleep(0.01) |
class LineEdit(QtWidgets.QLineEdit):
def __init__(self, *args, **kwargs):
super(LineEdit, self).__init__(*args, **kwargs)
self.setAcceptDrops(True)
def dragEnterEvent(self, e):
mime_data = e.mimeData()
if (mime_data.hasFormat('text/plain') or mime_data.hasUrls()):
e.accept()
else:
e.ignore()
def dropEvent(self, e):
mime_data = e.mimeData()
path = ''
if mime_data.hasFormat('text/plain'):
path = mime_data.text().replace('file://', '').strip()
elif mime_data.hasUrls():
url = mime_data.urls()[0]
path = url.toString().replace('file:///', '').strip()
self.setText(path) |
def _set_invalid_error_string(source_node: NodeBase, pc_map: Dict) -> None:
try:
node = source_node.children(include_children=False, offset_limits=pc_map['offset'])[0]
except IndexError:
return
if (node.nodeType == 'IndexAccess'):
pc_map['dev'] = 'Index out of range'
elif (node.nodeType == 'BinaryOperation'):
if (node.operator == '/'):
pc_map['dev'] = 'Division by zero'
elif (node.operator == '%'):
pc_map['dev'] = 'Modulus by zero' |
def getfqdn() -> str:
global fqdn
if (not fqdn):
fqdn = socket.getfqdn()
if (fqdn == 'localhost.localdomain'):
fqdn = socket.gethostname()
if (not fqdn):
fqdn = os.environ.get('HOSTNAME')
if (not fqdn):
fqdn = os.environ.get('HOST')
if (fqdn is None):
fqdn = ''
fqdn = fqdn.lower().strip()
return fqdn |
def wait_for_node_initialization(is_initialized, backend, wait_time=0.3):
iter = 0
while True:
if (iter == 0):
sleep(0.1)
else:
sleep(wait_time)
iter += 1
not_init = []
for (name, flag) in is_initialized.items():
if (not flag):
not_init.append(name)
if (len(not_init) > 0):
backend.loginfo_once(('Waiting for nodes "%s" to be initialized.' % str(not_init)))
else:
break |
_dataclasses.dataclass(frozen=True)
class IntParam(Param[int]):
def value(self) -> int:
if (_os.environ.get(self.name) is not None):
return int(_os.environ[self.name])
if (self.default is not None):
return (self.default.value if isinstance(self.default, Expression) else self.default)
return int() |
class Envs(FlyteIdlEntity):
def __init__(self, envs: Dict[(str, str)]):
self._envs = envs
def envs(self) -> Dict[(str, str)]:
return self._envs
def to_flyte_idl(self) -> _common_pb2.Envs:
return _common_pb2.Envs(values=[_literals_pb2.KeyValuePair(key=k, value=v) for (k, v) in self.envs.items()])
def from_flyte_idl(cls, pb2: _common_pb2.Envs) -> _common_pb2.Envs:
return cls(envs={kv.key: kv.value for kv in pb2.values}) |
class TestWebUIBuilds(CoprsTestCase):
('u1')
.usefixtures('f_users', 'f_users_api', 'f_mock_chroots', 'f_db')
def test_isolation_option_set(self):
chroot = 'fedora-rawhide-i386'
project = 'test'
self.web_ui.new_project(project, [chroot], isolation='simple')
route = '/coprs/{username}/{coprname}/edit/'.format(username=self.transaction_username, coprname=project)
def get_selected(html):
soup = BeautifulSoup(html, 'html.parser')
return soup.find('select', id='isolation').find('option', attrs={'selected': True})
resp = self.test_client.get(route)
assert (get_selected(resp.data)['value'] == 'simple') |
class RemoveTask(Task):
def __init__(self, block_id_to_remove=None, datetime_to_run=None, client=None, dic=None):
if client:
self.notion_client = client
if dic:
super().__init__(type='RemoveTask', dic=dic)
elif (block_id_to_remove and datetime_to_run):
super().__init__(type='RemoveTask', task_id=block_id_to_remove, datetime_to_run=datetime_to_run)
self.block_id_to_remove = block_id_to_remove
def __str__(self):
print('Task of type {0} at this time: {1}'.format(self.type, self.datetime_to_run))
def run_task(self):
print('Removing block with id {0}'.format(self.block_id_to_remove))
block = self.notion_client.get_block(self.block_id_to_remove)
block.remove()
print('Block removed succesfully!')
def _from_dic(self, dic):
self.type = dic['type']
self.task_id = dic['task_id']
self.datetime_to_run = datetime.strptime(dic['datetime_to_run'], '%Y-%m-%d %H:%M:%S')
self.block_id_to_remove = dic['block_id_to_remove']
def to_dic(self):
dic = {'type': self.type, 'task_id': self.task_id, 'block_id_to_remove': self.block_id_to_remove, 'datetime_to_run': str(self.datetime_to_run)}
self.task_dictionary = dic
return self.task_dictionary |
class Plugin(plugin.PluginProto):
PLUGIN_ID = 10
PLUGIN_NAME = 'Environment - BH1750 Lux sensor'
PLUGIN_VALUENAME1 = 'Lux'
def __init__(self, taskindex):
plugin.PluginProto.__init__(self, taskindex)
self.dtype = rpieGlobals.DEVICE_TYPE_I2C
self.vtype = rpieGlobals.SENSOR_TYPE_SINGLE
self.readinprogress = 0
self.valuecount = 1
self.senddataoption = True
self.timeroption = True
self.timeroptional = False
self.formulaoption = True
self._nextdataservetime = 0
self.lastread = 0
self.samples = 3
self.preread = (self.samples * 2000)
self.LARR = []
self.i2cbus = None
def plugin_init(self, enableplugin=None):
plugin.PluginProto.plugin_init(self, enableplugin)
self.LARR = []
self.uservar[0] = 0
if self.enabled:
try:
try:
i2cl = self.i2c
except:
i2cl = (- 1)
self.i2cbus = gpios.HWPorts.i2c_init(i2cl)
if (i2cl == (- 1)):
self.i2cbus = gpios.HWPorts.i2cbus
if (self.i2cbus is not None):
if (self.interval > 2):
nextr = (self.interval - 2)
else:
nextr = self.interval
self._lastdataservetime = (rpieTime.millis() - (nextr * 1000))
self.lastread = 0
else:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, 'I2C can not be initialized!')
self.enabled = False
except Exception as e:
misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, str(e))
self.enabled = False
self.i2cbus = None
def webform_load(self):
choice1 = self.taskdevicepluginconfig[0]
options = ['0x23', '0x5c']
optionvalues = [35, 92]
webserver.addFormSelector('I2C address', 'plugin_010_addr', 2, options, optionvalues, None, int(choice1))
webserver.addFormNote("Enable <a href='pinout'>I2C bus</a> first, than <a href='i2cscanner'>search for the used address</a>!")
webserver.addFormCheckBox('Oversampling', 'plugin_010_over', self.timer2s)
return True
def webform_save(self, params):
par = webserver.arg('plugin_010_addr', params)
if (par == ''):
par = 0
self.taskdevicepluginconfig[0] = int(par)
if (webserver.arg('plugin_010_over', params) == 'on'):
self.timer2s = True
else:
self.timer2s = False
return True
def timer_two_second(self):
if (self.timer2s and self.initialized and (self.readinprogress == 0) and self.enabled):
if ((self._nextdataservetime - rpieTime.millis()) <= self.preread):
self.readinprogress = 1
self.p010_get_value()
self.readinprogress = 0
return self.timer2s
def plugin_read(self):
result = False
if (self.initialized and (self.readinprogress == 0)):
self.readinprogress = 1
self.p010_get_value()
if (len(self.LARR) == 1):
self.set_value(1, self.LARR[0], False)
if (len(self.LARR) > 1):
alux = round((sum(self.LARR) / len(self.LARR)), 2)
if ((max(self.LARR) - min(self.LARR)) > 8):
difft = abs((max(self.LARR) - alux))
if (difft > abs((alux - min(self.LARR)))):
difft = abs((alux - min(self.LARR)))
if (difft < 1):
difft = 1
if (difft > 10):
difft = 10
TARR2 = []
for i in range(0, len(self.LARR)):
if (abs((alux - self.LARR[i])) <= difft):
TARR2.append(self.LARR[i])
if (len(TARR2) > 0):
alux = round((sum(TARR2) / len(TARR2)), 2)
self.set_value(1, alux, False)
self.plugin_senddata()
self.LARR = []
self._lastdataservetime = rpieTime.millis()
self._nextdataservetime = (self._lastdataservetime + (self.interval * 1000))
result = True
self.readinprogress = 0
return result
def p010_get_value(self):
if (rpieTime.millis() >= (self.lastread + 2000)):
lux = None
try:
lux = gpios.HWPorts.i2c_read_block(int(self.taskdevicepluginconfig[0]), 33, bus=self.i2cbus)
except:
lux = None
if (lux != None):
self.LARR.append(round(self.convertToNumber(lux), 2))
self.lastread = rpieTime.millis()
def convertToNumber(self, data):
res = 0
if (len(data) == 1):
res = data[0]
elif (len(data) > 1):
res = (data[1] + (256 * data[0]))
return (res / 1.2) |
class OptionPlotoptionsPackedbubble(Options):
def accessibility(self) -> 'OptionPlotoptionsPackedbubbleAccessibility':
return self._config_sub_data('accessibility', OptionPlotoptionsPackedbubbleAccessibility)
def allowPointSelect(self):
return self._config_get(False)
def allowPointSelect(self, flag: bool):
self._config(flag, js_type=False)
def animation(self):
return self._config_get(True)
def animation(self, flag: bool):
self._config(flag, js_type=False)
def animationLimit(self):
return self._config_get(250)
def animationLimit(self, num: float):
self._config(num, js_type=False)
def className(self):
return self._config_get(None)
def className(self, text: str):
self._config(text, js_type=False)
def clip(self):
return self._config_get(True)
def clip(self, flag: bool):
self._config(flag, js_type=False)
def color(self):
return self._config_get(None)
def color(self, text: str):
self._config(text, js_type=False)
def colorAxis(self):
return self._config_get(0)
def colorAxis(self, num: float):
self._config(num, js_type=False)
def colorIndex(self):
return self._config_get(None)
def colorIndex(self, num: float):
self._config(num, js_type=False)
def colorKey(self):
return self._config_get('z')
def colorKey(self, text: str):
self._config(text, js_type=False)
def crisp(self):
return self._config_get(False)
def crisp(self, flag: bool):
self._config(flag, js_type=False)
def cursor(self):
return self._config_get(None)
def cursor(self, text: str):
self._config(text, js_type=False)
def custom(self):
return self._config_get(None)
def custom(self, value: Any):
self._config(value, js_type=False)
def dashStyle(self):
return self._config_get('Solid')
def dashStyle(self, text: str):
self._config(text, js_type=False)
def dataLabels(self) -> 'OptionPlotoptionsPackedbubbleDatalabels':
return self._config_sub_data('dataLabels', OptionPlotoptionsPackedbubbleDatalabels)
def description(self):
return self._config_get(None)
def description(self, text: str):
self._config(text, js_type=False)
def displayNegative(self):
return self._config_get(True)
def displayNegative(self, flag: bool):
self._config(flag, js_type=False)
def draggable(self):
return self._config_get(True)
def draggable(self, flag: bool):
self._config(flag, js_type=False)
def enableMouseTracking(self):
return self._config_get(True)
def enableMouseTracking(self, flag: bool):
self._config(flag, js_type=False)
def events(self) -> 'OptionPlotoptionsPackedbubbleEvents':
return self._config_sub_data('events', OptionPlotoptionsPackedbubbleEvents)
def findNearestPointBy(self):
return self._config_get('xy')
def findNearestPointBy(self, text: str):
self._config(text, js_type=False)
def getExtremesFromAll(self):
return self._config_get(False)
def getExtremesFromAll(self, flag: bool):
self._config(flag, js_type=False)
def inactiveOtherPoints(self):
return self._config_get(False)
def inactiveOtherPoints(self, flag: bool):
self._config(flag, js_type=False)
def includeInDataExport(self):
return self._config_get(None)
def includeInDataExport(self, flag: bool):
self._config(flag, js_type=False)
def label(self) -> 'OptionPlotoptionsPackedbubbleLabel':
return self._config_sub_data('label', OptionPlotoptionsPackedbubbleLabel)
def layoutAlgorithm(self) -> 'OptionPlotoptionsPackedbubbleLayoutalgorithm':
return self._config_sub_data('layoutAlgorithm', OptionPlotoptionsPackedbubbleLayoutalgorithm)
def legendSymbol(self):
return self._config_get('rectangle')
def legendSymbol(self, text: str):
self._config(text, js_type=False)
def linecap(self):
return self._config_get(round)
def linecap(self, value: Any):
self._config(value, js_type=False)
def lineWidth(self):
return self._config_get(0)
def lineWidth(self, num: float):
self._config(num, js_type=False)
def linkedTo(self):
return self._config_get(None)
def linkedTo(self, text: str):
self._config(text, js_type=False)
def marker(self) -> 'OptionPlotoptionsPackedbubbleMarker':
return self._config_sub_data('marker', OptionPlotoptionsPackedbubbleMarker)
def maxSize(self):
return self._config_get('50%')
def maxSize(self, num: float):
self._config(num, js_type=False)
def minSize(self):
return self._config_get('10%')
def minSize(self, num: float):
self._config(num, js_type=False)
def negativeColor(self):
return self._config_get(None)
def negativeColor(self, text: str):
self._config(text, js_type=False)
def onPoint(self) -> 'OptionPlotoptionsPackedbubbleOnpoint':
return self._config_sub_data('onPoint', OptionPlotoptionsPackedbubbleOnpoint)
def opacity(self):
return self._config_get(1)
def opacity(self, num: float):
self._config(num, js_type=False)
def parentNode(self) -> 'OptionPlotoptionsPackedbubbleParentnode':
return self._config_sub_data('parentNode', OptionPlotoptionsPackedbubbleParentnode)
def point(self) -> 'OptionPlotoptionsPackedbubblePoint':
return self._config_sub_data('point', OptionPlotoptionsPackedbubblePoint)
def pointDescriptionFormat(self):
return self._config_get(None)
def pointDescriptionFormat(self, value: Any):
self._config(value, js_type=False)
def pointDescriptionFormatter(self):
return self._config_get(None)
def pointDescriptionFormatter(self, value: Any):
self._config(value, js_type=False)
def pointInterval(self):
return self._config_get(1)
def pointInterval(self, num: float):
self._config(num, js_type=False)
def pointIntervalUnit(self):
return self._config_get(None)
def pointIntervalUnit(self, value: Any):
self._config(value, js_type=False)
def pointStart(self):
return self._config_get(0)
def pointStart(self, num: float):
self._config(num, js_type=False)
def relativeXValue(self):
return self._config_get(False)
def relativeXValue(self, flag: bool):
self._config(flag, js_type=False)
def selected(self):
return self._config_get(False)
def selected(self, flag: bool):
self._config(flag, js_type=False)
def showCheckbox(self):
return self._config_get(False)
def showCheckbox(self, flag: bool):
self._config(flag, js_type=False)
def showInLegend(self):
return self._config_get(None)
def showInLegend(self, flag: bool):
self._config(flag, js_type=False)
def sizeBy(self):
return self._config_get('area')
def sizeBy(self, text: str):
self._config(text, js_type=False)
def skipKeyboardNavigation(self):
return self._config_get(None)
def skipKeyboardNavigation(self, flag: bool):
self._config(flag, js_type=False)
def softThreshold(self):
return self._config_get(False)
def softThreshold(self, flag: bool):
self._config(flag, js_type=False)
def sonification(self) -> 'OptionPlotoptionsPackedbubbleSonification':
return self._config_sub_data('sonification', OptionPlotoptionsPackedbubbleSonification)
def stacking(self):
return self._config_get(None)
def stacking(self, text: str):
self._config(text, js_type=False)
def states(self) -> 'OptionPlotoptionsPackedbubbleStates':
return self._config_sub_data('states', OptionPlotoptionsPackedbubbleStates)
def stickyTracking(self):
return self._config_get(False)
def stickyTracking(self, flag: bool):
self._config(flag, js_type=False)
def threshold(self):
return self._config_get(0)
def threshold(self, num: float):
self._config(num, js_type=False)
def tooltip(self) -> 'OptionPlotoptionsPackedbubbleTooltip':
return self._config_sub_data('tooltip', OptionPlotoptionsPackedbubbleTooltip)
def turboThreshold(self):
return self._config_get(0)
def turboThreshold(self, num: float):
self._config(num, js_type=False)
def useSimulation(self):
return self._config_get(True)
def useSimulation(self, flag: bool):
self._config(flag, js_type=False)
def visible(self):
return self._config_get(True)
def visible(self, flag: bool):
self._config(flag, js_type=False)
def zoneAxis(self):
return self._config_get('y')
def zoneAxis(self, text: str):
self._config(text, js_type=False)
def zones(self) -> 'OptionPlotoptionsPackedbubbleZones':
return self._config_sub_data('zones', OptionPlotoptionsPackedbubbleZones)
def zThreshold(self):
return self._config_get(0)
def zThreshold(self, num: float):
self._config(num, js_type=False) |
.integration
class TestFidesConnectorIntegration():
def test_test_connection(self, test_fides_connector: FidesConnector):
assert (test_fides_connector.test_connection() == ConnectionTestStatus.succeeded)
.usefixtures('postgres_integration_db', 'postgres_example_test_dataset_config_read_access')
def test_retrieve_data(self, test_fides_connector: FidesConnector, policy_local_storage: Policy, monkeypatch, authenticated_fides_client, async_api_client, api_client):
privacy_request = PrivacyRequest(id=f'test_fides_connector_retrieve_data{uuid.uuid4()}', policy=policy_local_storage, status=PrivacyRequestStatus.pending)
privacy_request.cache_identity(identity={'email': 'customer-'})
node = TraversalNode(generate_node('fides_dataset', 'fides_collection', 'test_field'))
monkeypatch.setattr(Client, 'send', api_client.send)
monkeypatch.setattr(request_service, 'get_async_client', (lambda : async_api_client))
result = test_fides_connector.retrieve_data(node=node, policy=policy_local_storage, privacy_request=privacy_request, input_data=[])
assert (len(result) == 1)
for rule in policy_local_storage.get_rules_for_action(action_type=ActionType.access):
result_data = result[0][rule.key]
assert_rows_match(result_data['postgres_example_test_dataset:address'], min_size=2, keys=['street', 'city', 'state', 'zip'])
assert_rows_match(result_data['postgres_example_test_dataset:orders'], min_size=1, keys=['customer_id'])
assert_rows_match(result_data['postgres_example_test_dataset:payment_card'], min_size=1, keys=['name', 'ccn', 'customer_id'])
assert_rows_match(result_data['postgres_example_test_dataset:customer'], min_size=1, keys=['name', 'email'])
assert (result_data['postgres_example_test_dataset:customer'][0]['email'] == 'customer-') |
(gridproperties())
def test_get_dataframe_filled(gridproperties):
gridproperties_list = list(gridproperties)
assume((len(gridproperties_list) > 0))
df = xtgeo.gridproperties_dataframe(gridproperties, ijk=False, activeonly=False)
assert (len(df.index) == ((gridproperties.ncol * gridproperties.nrow) * gridproperties.nlay)) |
.skipif((os.name == 'nt'), reason='Theme error writing content1: filename, directory name, or volume label syntax is incorrect')
def test_toc_urllink(cli: CliRunner, temp_with_override, file_regression):
path_output = temp_with_override.joinpath('mybook').absolute()
p_toc = path_books.joinpath('toc')
path_toc = p_toc.joinpath('_toc_urllink.yml')
result = cli.invoke(build, [p_toc.as_posix(), '--path-output', path_output.as_posix(), '--toc', path_toc.as_posix()])
print(result.output)
assert (result.exit_code == 0)
path_toc_directive = path_output.joinpath('_build', 'html', 'index.html')
soup = BeautifulSoup(path_toc_directive.read_text(encoding='utf8'), 'html.parser')
toc = soup.find_all('div', class_='toctree-wrapper')
assert (len(toc) == 1)
file_regression.check(toc[0].prettify(), extension='.html', encoding='utf8') |
class Bijection():
def __init__(self, module, word_dtype, key_dtype, counter_dtype):
self.module = module
self.word_dtype = word_dtype
self.key_words = key_dtype.fields['v'][0].shape[0]
self.counter_words = counter_dtype.fields['v'][0].shape[0]
self.counter_dtype = counter_dtype
self.key_dtype = key_dtype
self.raw_functions = {numpy.uint32: 'get_raw_uint32', numpy.dtype('uint32'): 'get_raw_uint32', numpy.uint64: 'get_raw_uint64', numpy.dtype('uint64'): 'get_raw_uint64'}
def __process_modules__(self, process):
return Bijection(process(self.module), self.word_dtype, self.key_dtype, self.counter_dtype) |
class PacketdrillSuite(Suite):
NAME = 'packetdrill'
def discover_cases(self) -> List[DiscoveredTestCase]:
return [DiscoveredTestCase(name='exec', description='does the test exit(0)')]
def parse(self, stdout: List[str], stderr: List[str], returncode: int) -> List[TestCaseResult]:
test_cases: List[TestCaseResult] = []
for line in stdout:
items = line.split()
if (len(items) != 2):
continue
test_name = items[0]
case = TestCaseResult(name=test_name, status=TestStatus.FAILED)
if (items[1] == '0'):
case.status = TestStatus.PASSED
test_cases.append(case)
return test_cases |
class ConditionHandler(Handler):
def register(self):
self._lifter.HANDLERS.update({mediumlevelil.MediumLevelILCmpE: partial(self.lift_condition, operation=OperationType.equal), mediumlevelil.MediumLevelILCmpNe: partial(self.lift_condition, operation=OperationType.not_equal), mediumlevelil.MediumLevelILCmpSge: partial(self.lift_condition, operation=OperationType.greater_or_equal), mediumlevelil.MediumLevelILCmpSgt: partial(self.lift_condition, operation=OperationType.greater), mediumlevelil.MediumLevelILCmpSle: partial(self.lift_condition, operation=OperationType.less_or_equal), mediumlevelil.MediumLevelILCmpSlt: partial(self.lift_condition, operation=OperationType.less), mediumlevelil.MediumLevelILCmpUge: partial(self.lift_condition, operation=OperationType.greater_or_equal_us), mediumlevelil.MediumLevelILCmpUgt: partial(self.lift_condition, operation=OperationType.greater_us), mediumlevelil.MediumLevelILCmpUle: partial(self.lift_condition, operation=OperationType.less_or_equal_us), mediumlevelil.MediumLevelILCmpUlt: partial(self.lift_condition, operation=OperationType.less_us), mediumlevelil.MediumLevelILFcmpE: partial(self.lift_condition, operation=OperationType.equal), mediumlevelil.MediumLevelILFcmpNe: partial(self.lift_condition, operation=OperationType.not_equal), mediumlevelil.MediumLevelILFcmpGe: partial(self.lift_condition, operation=OperationType.greater_or_equal), mediumlevelil.MediumLevelILFcmpGt: partial(self.lift_condition, operation=OperationType.greater), mediumlevelil.MediumLevelILFcmpLe: partial(self.lift_condition, operation=OperationType.less_or_equal), mediumlevelil.MediumLevelILFcmpLt: partial(self.lift_condition, operation=OperationType.less), mediumlevelil.MediumLevelILFcmpO: partial(self.lift_condition, operation=OperationType.equal), mediumlevelil.MediumLevelILFcmpUo: partial(self.lift_condition, operation=OperationType.equal)})
def lift_condition(self, condition: mediumlevelil.MediumLevelILBinaryBase, operation: OperationType=None, **kwargs) -> Condition:
return Condition(operation, [self._lifter.lift(condition.left, parent=condition), self._lifter.lift(condition.right, parent=condition)]) |
def _factorise(req, intervals=None):
if (intervals is None):
intervals = []
if (not isinstance(intervals, (list, tuple, set))):
intervals = [intervals]
if intervals:
for r in req:
for i in intervals:
if (i in r):
r[i] = _as_interval(r[i])
for i in intervals:
dates = set()
for r in req:
for interval in r.get(i, []):
dates.add(interval.start)
dates.add(interval.end)
for r in req:
if (i in r):
splits = []
for interval in r.get(i, []):
splits.extend(interval.split(dates))
r[i] = splits
req = [_as_requests(r) for r in req]
names = list({name for r in req for name in r.keys()})
cols = defaultdict(list)
if names:
for r in req:
_scan(r, cols, names[0], names[1:])
table = Table()
for (n, c) in cols.items():
table.column(n, c)
tree = table.process()
for i in intervals:
tree._join_intervals(i)
return tree.compact() |
def _is_assignment_with_simple_binary_operation(assignment: Assignment) -> bool:
return (isinstance(assignment.value, BinaryOperation) and (assignment.value.operation in {OperationType.plus, OperationType.minus}) and any(((isinstance(operand, Constant) or _is_negated_constant_variable(operand, Constant)) for operand in assignment.value.operands)) and any(((isinstance(operand, Variable) or _is_negated_constant_variable(operand, Variable)) for operand in assignment.value.operands)) and (assignment.destination == _get_variable_in_binary_operation(assignment.value))) |
_tuple
def alice_nominates_bob_and_ron_then_they_kick_her(chain):
header = PARAGON_GENESIS_HEADER
header = make_next_header(chain, header, ALICE_PK)
(yield header)
header = make_next_header(chain, header, ALICE_PK, BOB, NONCE_AUTH)
(yield header)
header = make_next_header(chain, header, BOB_PK, RON, NONCE_AUTH)
(yield header)
header = make_next_header(chain, header, ALICE_PK, RON, NONCE_AUTH)
(yield header)
header = make_next_header(chain, header, BOB_PK, ALICE, NONCE_DROP)
(yield header)
header = make_next_header(chain, header, RON_PK, ALICE, NONCE_DROP)
(yield header)
header = make_next_header(chain, header, BOB_PK)
(yield header) |
class OptionPlotoptionsItemSonificationTracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
(_BUILD_NIGHTHAWK_BENCHMARKS)
(_BUILD_NIGHTHAWK_BINARIES)
(_BUILD_ENVOY_BINARY)
('src.lib.cmd_exec.run_command')
def test_nh_build_failure(mock_cmd, mock_envoy_build, mock_nh_bin_build, mock_nh_bench_build):
mock_nh_bench_build.side_effect = subprocess.CalledProcessError(1, 'bar')
mock_envoy_path = '/home/ubuntu/envoy/bazel-bin/source/exe/envoy-static'
mock_envoy_build.return_value = mock_envoy_path
job_control = generate_test_objects.generate_default_job_control()
generate_test_objects.generate_envoy_source(job_control)
generate_test_objects.generate_nighthawk_source(job_control)
generate_test_objects.generate_environment(job_control)
benchmark = binary_benchmark.Benchmark(job_control, 'test_benchmark')
with pytest.raises(Exception) as build_exception:
benchmark.execute_benchmark()
assert (str(build_exception.value) == "Command 'bar' returned non-zero exit status 1.")
assert (not benchmark._envoy_binary_path)
mock_nh_bin_build.assert_called_once()
mock_envoy_build.assert_not_called() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.