code stringlengths 281 23.7M |
|---|
def get_metadata(content):
metadata = {}
lines = content.split('\n')
idx = 0
for (idx, line) in enumerate(lines):
header = line.split(':', 1)
if (len(header) == 2):
metadata[header[0].strip().lower()] = header[1].strip()
continue
if (line in ('---', '', '.... |
def _clone(package_id, path_str='.'):
(org, repo, version) = _split_id(package_id)
source_path = _get_data_folder().joinpath(f'packages/{org}/{repo}{version}')
if (not source_path.exists()):
raise FileNotFoundError(f"Package '{_format_pkg(org, repo, version)}' is not installed")
dest_path = Path... |
class bfd_state(bsn_tlv):
type = 177
def __init__(self, value=None):
if (value != None):
self.value = value
else:
self.value = 0
return
def pack(self):
packed = []
packed.append(struct.pack('!H', self.type))
packed.append(struct.pack('!... |
def _load_url_contents(logger: logging.Logger, url: str, stream1: TextIO, stream2: Optional[TextIO]=None) -> bool:
saved = False
try:
with requests.get(url) as r:
if (r.status_code == 200):
encoded = b''
try:
for chunk in r.iter_content(chu... |
class Count(BaseObject):
def __init__(self, api=None, agent=None, total=None, visitor=None, **kwargs):
self.api = api
self.agent = agent
self.total = total
self.visitor = visitor
for (key, value) in kwargs.items():
setattr(self, key, value)
for key in self... |
def test_repeating_definitions_are_correctly_distinguished(basic_block_with_repeating_stmts):
s0 = Assignment(Variable('b'), Constant(0))
s1 = Assignment(Variable('a'), Variable('b'))
s2 = Assignment(Variable('b'), Constant(1))
rd = ReachingDefinitions(basic_block_with_repeating_stmts)
nodes = [n fo... |
class OptionSeriesSolidgaugeSonificationContexttracksMappingLowpass(Options):
def frequency(self) -> 'OptionSeriesSolidgaugeSonificationContexttracksMappingLowpassFrequency':
return self._config_sub_data('frequency', OptionSeriesSolidgaugeSonificationContexttracksMappingLowpassFrequency)
def resonance(s... |
class OptionSeriesFunnel3dSonificationContexttracksMappingRate(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
s... |
def test_call_with_deep_context_kwargs():
class Regularizer():
def __init__(self, alpha):
self.alpha = alpha
class Loss():
def __init__(self, regularizer):
self.regularizer = regularizer
class ClassificationTask():
def __init__(self, loss):
self.lo... |
class FileMonitor(object):
def __init__(self, print_only_open=True):
self.openfiles = []
self.oldopen = builtins.open
self.oldfile = getattr(builtins, 'file', io.FileIO)
self.do_print_only_open = print_only_open
self.in_use = False
class File(self.oldfile):
... |
class AdCustomizationRuleSpec(AbstractObject):
def __init__(self, api=None):
super(AdCustomizationRuleSpec, self).__init__()
self._isAdCustomizationRuleSpec = True
self._api = api
class Field(AbstractObject.Field):
caption = 'caption'
customization_spec = 'customization_s... |
def select_cats_gatya_banner(save_stats: dict[(str, Any)]) -> list[int]:
is_jp = helper.is_jp(save_stats)
file_data = game_data_getter.get_file_latest('DataLocal', 'GatyaDataSetR1.csv', is_jp)
if (file_data is None):
helper.colored_text('Failed to get gatya banners')
return []
data = hel... |
def recalculate_priority_queue(is_addon_start: bool=False):
current = _get_priority_list_with_last_prios()
scores = []
to_decrease_delay = []
now = datetime.now()
for (nid, last_prio, last_prio_creation, _, reminder, delay) in current:
if (last_prio is None):
if (not _specific_sc... |
def test_flyte_file_in_dataclassjsonmixin():
remote_path = 's3://tmp/file'
f1 = FlyteFile(remote_path)
f2 = FlyteFile('/tmp/file')
f2._remote_source = remote_path
o = TestFileStruct_flyte_file(a=f1, b=TestInnerFileStruct_flyte_file(a=JPEGImageFile('s3://tmp/file.jpeg'), b=[f1], c={'hello': f1}, d=[f... |
def gen_sites():
xy_fun = util.create_xy_fun('BUFR_')
db = Database(util.get_db_root(), util.get_part())
grid = db.grid()
for tile_name in sorted(grid.tiles()):
loc = grid.loc_of_tilename(tile_name)
gridinfo = grid.gridinfo_at_loc(loc)
sites = []
xs = []
ys = []
... |
def test_get_mime_type_from_extension_success():
transformer = TypeEngine.get_transformer(FlyteFile)
assert (transformer.get_mime_type_from_extension('html') == 'text/html')
assert (transformer.get_mime_type_from_extension('jpeg') == 'image/jpeg')
assert (transformer.get_mime_type_from_extension('png') ... |
_postgres
def test_relations_set(pgs):
doc1 = DoctorCustom.new(name='test1')
doc1.save()
doc2 = DoctorCustom.new(name='test2')
doc2.save()
pat1 = PatientCustom.new(name='test1')
pat1.save()
pat1.symptoms.create(name='test1a')
pat2 = PatientCustom.new(name='test2')
pat2.save()
pat... |
def test_sim_power_spectrum():
freq_range = [3, 50]
ap_params = [50, 2]
pe_params = [10, 0.5, 2, 20, 0.3, 4]
(xs, ys) = sim_power_spectrum(freq_range, ap_params, pe_params)
assert np.all(xs)
assert np.all(ys)
assert (len(xs) == len(ys))
f_rotation = 20
(xs, ys) = sim_power_spectrum(f... |
def test_charge_perturbation():
perturb = td.LinearChargePerturbation(electron_coeff=1e-21, electron_ref=0, electron_range=(0, 1e+20), hole_coeff=2e-21, hole_ref=0, hole_range=(0, 5e+19))
assert (perturb.perturbation_range == (0, ((1e+20 * 1e-21) + (2e+20 * 5e-22))))
assert (not perturb.is_complex)
with... |
class EncoderDecoder(object):
def __init__(self):
self.encmap = {'Str': (lambda x: self._fromCharCodeEncode(x)), 'Hex': (lambda x: self._hexEncode(x)), 'Hes': (lambda x: self._hexSemiEncode(x)), 'Une': (lambda x: self._unEscape(x)), 'Dec': (lambda x: self._decEncode(x)), 'Mix': (lambda x: self._unEscape(sel... |
class OptionSeriesStreamgraphSonificationTracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
sel... |
class _Counter(object):
def __init__(self, init_value=0):
self.lock = threading.Lock()
self.counter = init_value
def __str__(self):
return ('Counter(%s)' % self.counter)
def get_and_increment(self):
with self.lock:
counter = self.counter
self.counter +... |
.django_db
def test_obtain_recipient_totals_parent(monkeypatch, elasticsearch_transaction_index):
parent_id = 'a52a7544-829b-c925-e1ba-d04d3171c09a-P'
for (recipient_id, recipient_profile) in TEST_RECIPIENT_PROFILES.items():
baker.make('recipient.RecipientProfile', **recipient_profile)
create_transa... |
def to_bool(value: Any) -> bool:
if (not isinstance(value, (int, float, bool, bytes, str))):
raise TypeError(f"Cannot convert {type(value).__name__} '{value}' to bool")
if isinstance(value, bytes):
value = HexBytes(value).hex()
if (isinstance(value, str) and value.startswith('0x')):
... |
def run_test(x, degree, quadrilateral, parameters={}, test_mode=False):
m = UnitSquareMesh(3, 3, quadrilateral=quadrilateral)
layers = 10
mesh = ExtrudedMesh(m, layers, layer_height=(1.0 / layers))
V = FunctionSpace(mesh, 'CG', degree)
u = Function(V)
bcs = [DirichletBC(V, 10, 1), DirichletBC(V,... |
def match_files(fname1: str, fname2: str) -> Tuple[(bool, str)]:
with open(fname1, 'r') as f1, open(fname2, 'r') as f2:
difference = set(f1).difference(f2)
are_identical = (difference == set())
diff = ''
if (not are_identical):
diff = find_difference(fname1, fname2)
return (are_ident... |
.parametrize(('family', 'degree', 'vfamily', 'vdegree'), [('CG', 3, 'DG', 2), ('DG', 3, 'CG', 2)])
def test_prism(mesh_prism, family, degree, vfamily, vdegree):
(x, y, z) = SpatialCoordinate(mesh_prism)
V = FunctionSpace(mesh_prism, family, degree, vfamily=vfamily, vdegree=vdegree)
f = Function(V).interpola... |
def get_project_url_id():
project_info = make_sp_call('platform project:info', capture_output=True).stdout.decode()
project_id = re.search('\\| id \\| ([a-z0-9]{13})', project_info).group(1)
print(f' Found project id: {project_id}')
project_url = make_sp_call('platform url --yes', capture_o... |
class SoftwareVersion(Message):
msgtype = 204
def __init__(self, swv=''):
super().__init__(self.msgtype)
self.version = swv
def _data(self):
s = self.version.encode('ascii')
return (s + (b'\x00' * (32 - len(s))))
def _setdata(self, data):
self.version = bytearray(... |
def get_git_version(abbrev=7):
release_version = read_release_version()
version = call_git_describe(abbrev)
if (version is None):
version = release_version
if (version is None):
raise ValueError('Cannot find the version number!')
if (version != release_version):
write_release... |
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = None
fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {... |
.parametrize('ttl, expected', [(600, 10), (155, 2), (33, 0)])
def test_get_verification_code_ttl_minutes_calc(ttl, expected):
result = SubjectIdentityVerificationBodyParams(verification_code='123123', verification_code_ttl_seconds=ttl)
assert (result.get_verification_code_ttl_minutes() == expected) |
class OptionPlotoptionsNetworkgraphAccessibilityPoint(Options):
def dateFormat(self):
return self._config_get(None)
def dateFormat(self, text: str):
self._config(text, js_type=False)
def dateFormatter(self):
return self._config_get(None)
def dateFormatter(self, value: Any):
... |
class Plugin(plugin.PluginProto):
PLUGIN_ID = 15
PLUGIN_NAME = 'Environment - TSL2561 Lux sensor (TESTING)'
PLUGIN_VALUENAME1 = 'Lux'
PLUGIN_VALUENAME2 = 'Infrared'
PLUGIN_VALUENAME3 = 'Fullspectrum'
DELAY_TIME = [0.015, 0.12, 0.45]
def __init__(self, taskindex):
plugin.PluginProto._... |
class PermissionRequiredTests(DecoratorTestCase):
def test_permission_required(self):
perm = Permission.objects.get(codename='add_user')
self.user.user_permissions.add(perm)
result = decorators.permission_required('auth.add_user')((lambda info: None))(self.info(self.user))
self.asser... |
def test_correct_response_of_empty_list(client, monkeypatch, elasticsearch_transaction_index, awards_and_transactions):
setup_elasticsearch_test(monkeypatch, elasticsearch_transaction_index)
resp = client.post('/api/v2/search/spending_by_category/country', content_type='application/json', data=json.dumps({'filt... |
class TestInvalid(util.TestCase):
def test_user_invalid(self):
markup = '\n <form id="form">\n <input id="1" type="text">\n </form>\n '
self.assert_selector(markup, 'input:user-invalid', [], flags=util.HTML)
self.assert_selector(markup, 'input:not(:user-invalid)... |
class BackendDispatcher(Dispatcher):
def __init__(self, opts):
super().__init__(opts)
logger_name = 'backend.{}_dispatcher'.format(self.task_type)
logger_redis_who = '{}_dispatcher'.format(self.task_type)
self.log = get_redis_logger(self.opts, logger_name, logger_redis_who)
s... |
def get_graphql(parsed_list: js_data) -> list:
reg_graphql = 'e\\.graphQL\\({func}\\(\\),$'.format(func='([a-zA-Z_\\$]{1,2})')
graphql_list = search_js_reg(parsed_list, reg_graphql)
graphql_output = []
for graphql in tqdm(graphql_list):
reg_func = '{func}=t.n\\({arg}\\)'.format(func=re.escape(gr... |
def pq_st2084_oetf(values: VectorLike, c1: float=C1, c2: float=C2, c3: float=C3, m1: float=M1, m2: float=M2) -> Vector:
adjusted = []
for c in values:
c = alg.npow((c / 10000), m1)
r = ((c1 + (c2 * c)) / (1 + (c3 * c)))
adjusted.append(alg.npow(r, m2))
return adjusted |
def extractLickymeeTranslations(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if ('Medusa' in item['tags']):
return buildReleaseMessageWithType(item, 'Regarding the Story... |
class OptionPlotoptionsErrorbarSonificationDefaultinstrumentoptionsMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(sel... |
def upgrade():
op.create_table('verification', sa.Column('verification_id', sa.String(length=32), nullable=False), sa.Column('ip4', sa.BigInteger(), nullable=False), sa.Column('expires', sa.BigInteger(), nullable=False), sa.Column('data', postgresql.JSON(), nullable=False), sa.PrimaryKeyConstraint('verification_id'... |
class InputTreeWidget(MyTreeWidget):
def __init__(self, parent: QWidget, main_window: 'ElectrumWindow') -> None:
MyTreeWidget.__init__(self, parent, main_window, self._create_menu, [_('Index'), _('Account'), _('Source'), _('Amount')], InputColumns.SOURCE, [])
def on_doubleclick(self, item: QTreeWidgetIt... |
class TestResourceMiddleware(TestMiddleware):
.parametrize('independent_middleware', [True, False])
def test_can_access_resource_params(self, asgi, independent_middleware):
global context
class Resource():
def on_get(self, req, resp, **params):
resp.text = json.dumps(... |
def extractLightNovelsWorld(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ('Tsuki ga Michibiku Isekai Douchuu (POV)' in item['tags']):
if ((not postfix) and ('-' in item['title'])):
postfix = item['title'].split('-')[(- 1)].strip()
return build... |
class UART():
def __init__(self, chl, start, stop, val, err):
self.chl = chl
self.start = start
self.stop = stop
self.value = val
self.error = err
def __str__(self):
s_start = _format_time(self.start)
s_chl = ('CH1', 'CH2')[self.chl]
if self.error:... |
def get_is_private() -> bool:
remote_names = run_txtcapture(['git', 'remote']).split()
remote_urls: List[str] = []
for remote_name in remote_names:
remote_urls += run_txtcapture(['git', 'remote', 'get-url', '--all', remote_name]).split()
return ('private' in '\n'.join(remote_urls)) |
.parametrize('current_data, reference_data, metric, expected_json', ((pd.DataFrame({'col': [1.4, 2.3, 3.4], 'test': ['a', 'b', 'c'], 'test2': ['a', 'b', 'c']}), None, ColumnCorrelationsMetric(column_name='col'), {'column_name': 'col', 'current': {}, 'reference': None}), (pd.DataFrame({'col': ['a', 'b', 'c'], 'test': [1... |
class OptionSeriesColumnrangeSonificationDefaultspeechoptionsPointgrouping(Options):
def algorithm(self):
return self._config_get('last')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: boo... |
def test_init_identity_positive():
assert Identity('some_name', address='some_address', public_key='some_public_key')
assert Identity('some_name', address='some_address', public_key='some_public_key', default_address_key=DEFAULT_LEDGER)
assert Identity('some_name', addresses={DEFAULT_LEDGER: 'some_address',... |
class TestHelpers(BaseSkillTestCase):
path_to_skill = Path(ROOT_DIR, 'packages', 'fetchai', 'skills', 'tac_negotiation')
def setup(cls):
super().setup()
def test_build_goods_datamodel_supply(self):
good_ids = ['1', '2']
is_supply = True
attributes = [Attribute('1', int, True,... |
def extractFifteensecondstranslationWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname... |
.skip
.parametrize('opt_cls', [pytest.param(QuickMin, marks=using('pyscf')), pytest.param(LBFGS, marks=using('pyscf'))])
def test_diels_alder_neb(opt_cls):
geoms = geom_loader('diels_alder_interpolated.trj')
for (i, geom) in enumerate(geoms):
calc_kwargs = {'basis': 'sto3g', 'pal': 2, 'calc_number': i}
... |
class OptionsGridAxis(Options):
def show(self):
return self._config_get(None)
def show(self, val):
self._config(val)
def add_lines(self, value, css_class=None, text=None, position=None) -> OptionLines:
line = self._config_sub_data_enum('lines', OptionLines)
line.value = value... |
def extractCoopskillerserialsBlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, ... |
def quadrupole3d_00(ax, da, A, bx, db, B, R):
result = numpy.zeros((6, 1, 1), dtype=float)
x0 = ((ax + bx) ** (- 1.0))
x1 = (((- x0) * ((ax * A[0]) + (bx * B[0]))) + R[0])
x2 = (0.5 * x0)
x3 = ((ax * bx) * x0)
x4 = ((((5. * da) * db) * (x0 ** 1.5)) * numpy.exp(((- x3) * ((((A[0] - B[0]) ** 2) + ... |
def main():
segmk = Segmaker('design.bits')
fuz_dir = os.getenv('FUZDIR', None)
assert fuz_dir
with open(os.path.join(fuz_dir, 'attrs.json'), 'r') as attr_file:
attrs = json.load(attr_file)
print('Loading tags')
with open('params.json') as f:
primitives_list = json.load(f)
fo... |
class CosmTradeSerializer(Serializer):
def encode(msg: Message) -> bytes:
msg = cast(CosmTradeMessage, msg)
message_pb = ProtobufMessage()
dialogue_message_pb = DialogueMessage()
cosm_trade_msg = cosm_trade_pb2.CosmTradeMessage()
dialogue_message_pb.message_id = msg.message_i... |
class CalculateCostSavingsTests(TestCase):
fixtures = ['lowpriority_measures']
def test_calculate_cost_savings(self):
measure_id = 'lpglucosamine'
month = '2017-10-01'
target_costs = MeasureGlobal.objects.filter(measure_id=measure_id, month=month).get().percentiles['ccg']
cost_sa... |
def write_fasta(sequences, outfile=None, seqwidth=80):
wrapper = textwrap.TextWrapper()
wrapper.break_on_hyphens = False
wrapper.replace_whitespace = False
wrapper.expand_tabs = False
wrapper.break_long_words = True
wrapper.width = 80
text = '\n'.join([('>%s\n%s\n' % ('\t'.join(([name] + com... |
def get_class_separation_plot_data_agg(current_plot: Dict[(Union[(int, str)], pd.DataFrame)], reference_plot: Optional[Dict[(Union[(int, str)], pd.DataFrame)]], target_name: str, color_options: ColorOptions) -> List[Tuple[(str, BaseWidgetInfo)]]:
additional_plots = []
cols = 1
subplot_titles = ['']
if (... |
def files_in_dir(path, ext=None, keyword=None, sort=False, sample_mode=None, sample_num=None, keywords_exclude=[]):
files = []
for (r, d, f) in os.walk(path):
for file in f:
add = True
if ((ext is not None) and (not file.endswith(ext))):
add = False
if... |
class CustomComplexCriticNet(nn.Module, CustomComplexLatentNet):
def __init__(self, obs_shapes: Dict[(str, Sequence[int])], non_lin: Union[(str, type(nn.Module))], hidden_units: List[int]):
nn.Module.__init__(self)
CustomComplexLatentNet.__init__(self, obs_shapes, non_lin, hidden_units)
self... |
class OptionSeriesDumbbellStatesSelect(Options):
def animation(self) -> 'OptionSeriesDumbbellStatesSelectAnimation':
return self._config_sub_data('animation', OptionSeriesDumbbellStatesSelectAnimation)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
se... |
class TupleTestCase(TupleTestMixin, unittest.TestCase):
def setUp(self):
self.trait = Tuple
def test_unexpected_validation_exceptions_are_propagated(self):
class A(HasTraits):
foo = Tuple(BadInt(), BadInt())
bar = Either(Int, Tuple(BadInt(), BadInt()))
a = A()
... |
class ActorCritic(Trainer, ABC):
def __init__(self, algorithm_config: Union[(A2CAlgorithmConfig, PPOAlgorithmConfig, ImpalaAlgorithmConfig)], rollout_generator: Union[(RolloutGenerator, DistributedActors)], evaluator: Optional[RolloutEvaluator], model: TorchActorCritic, model_selection: Optional[BestModelSelection]... |
def extractLulunovel520BlogspotCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_typ... |
def test_attach_destroy_resource_no_sentinel_by_config(fc_node_builder):
node0 = OperatorNode({}, {'name': 'test0', 'type': 'none'})
node1 = OperatorNode({}, {'name': 'test1', 'type': 'none'})
node2 = OperatorNode({}, {'name': 'test2', 'type': 'none', 'upstream_dependencies': ['test0']})
node3 = Operato... |
class InviteData(object):
def __init__(self, server_peer_id: PeerID, client_peer_id: PeerID):
self.context = 2
self.invite_id = 0
self.message_id = 1
self.server_peer_id = server_peer_id
self.client_peer_id = client_peer_id
def to_dict(self) -> dict:
encoded_sende... |
def chunker(big_lst) -> dict:
chunks = [big_lst[x:(x + 27)] for x in range(0, len(big_lst), 27)]
for chunk in chunks:
if any(chunk):
continue
else:
chunks.remove(chunk)
chunked_list = {words[0]: words[1:] for words in chunks}
return chunked_list |
def extractLndesuWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in... |
def _generate_certificate(ip_address: str) -> typing.Tuple[(str, bytes, bytes)]:
key = rsa.generate_private_key(public_exponent=65537, key_size=4096, backend=default_backend())
cert_name = get_machine_name()
subject = issuer = x509.Name([x509.NameAttribute(NameOID.COUNTRY_NAME, 'NO'), x509.NameAttribute(Nam... |
class LearnableLoader(nn.Module):
def __init__(self, z_dim, n_repeat_batch, dataset, batch_size, device, size=1):
super(LearnableLoader, self).__init__()
self.dataset = dataset
self.batch_size = batch_size
self.n_repeat_batch = n_repeat_batch
self.z_dim = z_dim
self.g... |
def _get_extracted_csv_table(relevant_subnets, tablename, input_path, sep=';'):
csv_table = read_csv_data(input_path, sep=sep, tablename=tablename)
if (tablename == 'Switch'):
node_table = read_csv_data(input_path, sep=sep, tablename='Node')
bus_bus_switches = set(get_bus_bus_switch_indices_from... |
def convert_utc_time_to_timezone(utc_time: datetime, timezone: Optional[str]=None) -> datetime:
from_zone = tz.tzutc()
to_zone = (tz.gettz(timezone) if timezone else tz.tzlocal())
utc_time_with_timezone = utc_time.replace(tzinfo=from_zone)
return utc_time_with_timezone.astimezone(to_zone) |
def get_func_signatures(typesystem, f):
sig = inspect.signature(f)
typesigs = []
typesig = []
for p in sig.parameters.values():
t = p.annotation
if (t is sig.empty):
t = typesystem.default_type
else:
t = typesystem.canonize_type(t)
if (p.default is... |
class OptionPlotoptionsSeriesSonificationTracksPointgrouping(Options):
def algorithm(self):
return self._config_get('minmax')
def algorithm(self, text: str):
self._config(text, js_type=False)
def enabled(self):
return self._config_get(True)
def enabled(self, flag: bool):
... |
def extractHolyfrosticeWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_ty... |
class Bar(CommConfigs):
def maxBarThickness(self):
return self._get_commons()
def maxBarThickness(self, num: int):
self._set_commons(num)
def barThickness(self):
return self._get_commons()
def barThickness(self, num: int):
self._set_commons(num)
def minBarLength(self)... |
def create_pytorch_funcs():
import math
import torch
def torch_relu(x):
return torch.nn.functional.relu(x)
def torch_relu_k(x):
return torch.nn.functional.relu6(x)
def torch_hard_sigmoid(x):
return torch.clip(((x * 0.2) + 0.5), 0, 1)
def torch_hard_tanh(x):
return... |
class SampleData():
m_values = []
m_offset = 0
m_min = 0.0
m_max = 0.0
m_avg = 0.0
def __init__(self):
self.reset()
def reset(self):
self.m_values = ([0.0] * 100)
self.m_offset = 0
self.m_min = 0.0
self.m_max = 0.0
self.m_avg = 0.0
def push... |
(scope='function')
def reindex_setup(sync_client):
bulk = []
for x in range(100):
bulk.append({'index': {'_index': 'test_index', '_id': x}})
bulk.append({'answer': x, 'correct': (x == 42), 'type': ('answers' if ((x % 2) == 0) else 'questions')})
sync_client.bulk(operations=bulk, refresh=True... |
class OptionChartOptions3d(Options):
def alpha(self):
return self._config_get(0)
def alpha(self, num: float):
self._config(num, js_type=False)
def axisLabelPosition(self):
return self._config_get(None)
def axisLabelPosition(self, value: Any):
self._config(value, js_type=F... |
class ForkID(rlp.Serializable):
fields = [('hash', rlp.sedes.binary.fixed_length(4)), ('next', rlp.sedes.big_endian_int)]
def __init__(self, hash: bytes, next: BlockNumber) -> None:
if (len(hash) != 4):
raise ValueError('Hash {hash!r} length is not 4')
super().__init__(hash, next)
... |
class OptionPlotoptionsScatterSonificationDefaultinstrumentoptionsMappingTremolo(Options):
def depth(self) -> 'OptionPlotoptionsScatterSonificationDefaultinstrumentoptionsMappingTremoloDepth':
return self._config_sub_data('depth', OptionPlotoptionsScatterSonificationDefaultinstrumentoptionsMappingTremoloDep... |
def create_disassembler_instance(platform: str):
global __instance
global __initialized
if (platform in ['n64', 'psx', 'ps2']):
__instance = SpimdisasmDisassembler()
__initialized = True
return
raise NotImplementedError('No disassembler for requested platform') |
class OptionSeriesPolygonDataDragdropDraghandle(Options):
def className(self):
return self._config_get('highcharts-drag-handle')
def className(self, text: str):
self._config(text, js_type=False)
def color(self):
return self._config_get('#fff')
def color(self, text: str):
... |
class Tree(object):
def __init__(self, _name, _data, *_children, **_attributes):
self.name = _name
if ((_data is not None) and (_data.strip() == '')):
_data = None
self.data = _data
self.children = list(_children)
self.attributes = _attributes
def _load_tags(_... |
def process(config, audio_path: Path, idx: int=0, key_shift: float=0, time_stretch: float=1.0, loudness: Optional[float]=None):
if (model_caches is None):
init(config)
(text_features_extractor, pitch_extractor, energy_extractor, vocoder, device) = model_caches
save_path = audio_path.with_suffix(f'.{... |
class TlsSubscriptionResponse(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_impor... |
class EnumDict(dict):
def __init__(self, cls_name, settings, start, constructor_init, constructor_start, constructor_boundary):
super(EnumDict, self).__init__()
self._cls_name = cls_name
self._constructor_init = constructor_init
self._constructor_start = constructor_start
sel... |
def overlapLoop(pDataFrameLoop, pDataFrameProtein):
loop_bedtool_x = BedTool.from_dataframe(pDataFrameLoop[[0, 1, 2]])
loop_bedtool_y = BedTool.from_dataframe(pDataFrameLoop[[3, 4, 5]])
protein_bedtool = BedTool.from_dataframe(pDataFrameProtein)
x = loop_bedtool_x.intersect(protein_bedtool, c=True).to_d... |
class TestNameSemanticExtractor():
def test_should_extract_single_affiliation_address(self):
semantic_content_list = list(AffiliationAddressSemanticExtractor().iter_semantic_content_for_entity_blocks([('<marker>', LayoutBlock.for_text('1')), ('<institution>', LayoutBlock.for_text('Institution 1')), ('<depar... |
def compat(backend):
compat = Compat()
if (backend.__name__ == 'firedrake'):
compat.FunctionSpaceType = (backend.functionspaceimpl.FunctionSpace, backend.functionspaceimpl.WithGeometry, backend.functionspaceimpl.MixedFunctionSpace)
compat.FunctionSpace = backend.FunctionSpace
compat.Mesh... |
class AutoGenerator(AutoModel[GeneratorWrapper]):
def from_hf_hub_to_cache(cls, *, name: str, revision: str='main'):
generator_cls = _resolve_generator_class(name)
generator_cls.from_hf_hub_to_cache(name=name, revision=revision)
def from_hf_hub(cls, *, name: str, revision: str='main', device: Op... |
def test_mia_distinguisher_raises_exceptions_if_bins_number_is_incorrect():
with pytest.raises(TypeError):
scared.MIADistinguisher(bins_number='foo')
with pytest.raises(TypeError):
scared.MIADistinguisher(bins_number={})
with pytest.raises(TypeError):
scared.MIADistinguisher(bins_num... |
class ExternalPluginMenu(GObject.Object):
toolbar_pos = GObject.property(type=str, default=TopToolbar.name)
def __init__(self, plugin):
super(ExternalPluginMenu, self).__init__()
self.plugin = plugin
self.shell = plugin.shell
self.source = plugin.source
self.app_id = None... |
.parametrize('undelimited_file_data', ['PROP\n 1 2 3 4 \n', 'PROP\n 1 2 3 4 ECHO', 'ECHO\nPROP\n 1 2 3 4', 'PROP\n 1 2 3 4 -- a comment', 'NOECHO\nPROP\n 1 2 3 4 -- a comment'])
def test_read_prop_raises_error_when_no_forwardslash(undelimited_file_data):
with patch('builtins.open', mock_open(read_data=undelimited_f... |
(scope='module')
def env_c_a_b(resources):
c_tn = c_traversal_node()
a_tn = a_traversal_node()
b_tn = b_traversal_node()
root_node = artificial_traversal_node(ROOT_COLLECTION_ADDRESS)
root_node.add_child(c_tn, Edge(FieldAddress(ROOT_COLLECTION_ADDRESS.dataset, ROOT_COLLECTION_ADDRESS.collection, 'em... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.