code stringlengths 281 23.7M |
|---|
def dumbbell_topology(m1, m2):
if ((not isinstance(m1, int)) or (not isinstance(m2, int))):
raise TypeError('m1 and m2 arguments must be of int type')
if (m1 < 2):
raise ValueError('Invalid graph description, m1 should be >= 2')
if (m2 < 1):
raise ValueError('Invalid graph descriptio... |
('rocm.squeeze.func_decl')
('rocm.unsqueeze.func_decl')
def squeeze_gen_function_decl(func_attrs):
func_name = func_attrs['name']
input_ndim = len(func_attrs['inputs'][0]._attrs['shape'])
output_ndim = len(func_attrs['outputs'][0]._attrs['shape'])
return FUNC_DECL_TEMPLATE.render(func_name=func_name, in... |
class FaucetUntaggedOutputOnlyTest(FaucetUntaggedTest):
CONFIG = '\n interfaces:\n %(port_1)d:\n output_only: True\n %(port_2)d:\n native_vlan: 100\n %(port_3)d:\n native_vlan: 100\n %(port_4)d:\n native_v... |
def extractNepustation(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if ('Cheat Majutsu' in item['tags']):
return buildReleaseMessageWithType(item, 'Cheat Majutsu De Unme... |
(scope='function')
def privacy_experience_france_tcf_overlay(db: Session, experience_config_tcf_overlay) -> Generator:
privacy_experience = PrivacyExperience.create(db=db, data={'component': ComponentType.tcf_overlay, 'region': PrivacyNoticeRegion.fr, 'experience_config_id': experience_config_tcf_overlay.id})
(... |
class WebSocketRPCServer(RPCServer):
def __init__(self, ws, rpc_callback):
dispatcher = RPCDispatcher()
dispatcher.register_instance(rpc_callback)
super(WebSocketRPCServer, self).__init__(WebSocketServerTransport(ws), JSONRPCProtocol(), dispatcher)
def serve_forever(self):
try:
... |
class TbnStrandsBinDataset(th_data.Dataset):
def __init__(self, tbn_strands, is_resampled=True, num_strds_points=100):
self.num_strands = len(tbn_strands)
self.tbn_strands = tbn_strands
self.batch_size = 300
self.num_workers = 12
self.num_strds_points = num_strds_points
... |
def write_toc(toc, title, root=False, opened=False):
print('TOC', toc)
s = ''
if isinstance(toc, list):
if root:
s += '<ul id="sidebarUL">\n'
else:
s += '<ul class="nested">\n'
for item in toc:
s += write_toc(item, title, opened=opened)
s +... |
class EyeLogicTracker(BaseEyeTracker):
def __init__(self, display, logfile=settings.LOGFILE, eventdetection=settings.EVENTDETECTION, saccade_velocity_threshold=35, saccade_acceleration_threshold=9500, blink_threshold=settings.BLINKTHRESH, **args):
try:
copy_docstr(BaseEyeTracker, EyeLogicTracker... |
def test_get_match_counts_complete(o_dir, e_dir, conf_dir, request):
output_config = os.path.join(o_dir, 'taxon-set.conf')
cmd = get_match_count_cmd(o_dir, e_dir, conf_dir, output_config, request)
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = proc.commun... |
def filter_dlp_profile_data(json):
option_list = ['comment', 'dlp_log', 'extended_log', 'feature_set', 'full_archive_proto', 'nac_quar_log', 'name', 'replacemsg_group', 'rule', 'summary_proto']
json = remove_invalid_fields(json)
dictionary = {}
for attribute in option_list:
if ((attribute in jso... |
def test_missing_other_ttlr(tmpdir, merge_lis_prs, assert_info):
fpath = os.path.join(str(tmpdir), 'missing_other_tt.lis')
content = ['data/lis/records/RHLR-1.lis.part', 'data/lis/records/THLR-1.lis.part', 'data/lis/records/FHLR-1.lis.part', 'data/lis/records/FTLR-1.lis.part', 'data/lis/records/RTLR-1.lis.part'... |
class OptionPopup(Options):
component_properties = ('z_index', 'draggable', 'background', 'escape')
def background(self):
return self.get(True)
def background(self, flag: bool):
self.set(flag)
def draggable(self):
return self.get(False)
def draggable(self, flag: bool):
... |
class LogstashClient(NamespacedClient):
_rewrite_parameters()
async def delete_pipeline(self, *, id: str, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, pretty: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]:
if (id in... |
def _migrate_gen_data(ensemble: EnsembleAccessor, data_file: DataFile) -> None:
realizations = defaultdict((lambda : defaultdict(list)))
for block in data_file.blocks(Kind.GEN_DATA):
realizations[block.realization_index][block.name].append({'values': data_file.load(block, 0), 'report_step': block.report... |
class TestValidateFidesopsMeta():
def test_fidesops_meta_on_dataset(self):
dataset = Dataset(fides_key='test_dataset', fidesops_meta={'after': ['other_dataset']}, collections=[])
assert (not hasattr(dataset, 'fidesops_meta'))
assert (dataset.fides_meta == DatasetMetadata(after=['other_datase... |
class TextSimilarityInferenceOptions(InferenceConfig):
def __init__(self, *, tokenization: NlpTokenizationConfig, results_field: t.Optional[str]=None, text: t.Optional[str]=None):
super().__init__(configuration_type='text_similarity')
self.tokenization = tokenization
self.results_field = res... |
class HPPrinterOptionsFlowHandler(config_entries.OptionsFlow):
def __init__(self, config_entry: ConfigEntry):
super().__init__()
self._config_flow = ConfigFlowManager(config_entry)
async def async_step_init(self, user_input=None):
return (await self.async_step_hp_printer_additional_setti... |
def pieceConstructor(clr, type):
if (type == 'none'):
pc = pieces.Pawn('none', 'none')
elif (type == 'pawn'):
pc = pieces.Pawn(clr, type)
elif (type == 'rook'):
pc = pieces.Rook(clr, type)
elif (type == 'bishop'):
pc = pieces.Bishop(clr, type)
elif (type == 'queen'):
... |
def refresh_redirected_techniques_map(threads: int=50):
replacement_map = build_redirected_techniques_map(threads)
mapping = {'saved_date': time.asctime(), 'mapping': replacement_map}
TECHNIQUES_REDIRECT_FILE.write_text(json.dumps(mapping, sort_keys=True, indent=2))
clear_caches()
print(f'refreshed ... |
def try_get_balance(agent_config: AgentConfig, wallet: Wallet, type_: str) -> int:
try:
if (not LedgerApis.has_ledger(type_)):
raise ValueError('No ledger api config for {} available.'.format(type_))
address = wallet.addresses.get(type_)
if (address is None):
raise Va... |
class CachingDatabase(Database):
_resource_lock = None
_cache = None
def __init__(self, concurrency_limit=):
_logger.debug('Initialising database with a maximum of {} concurrent connections'.format(concurrency_limit))
self._resource_lock = threading.BoundedSemaphore(concurrency_limit)
... |
def _assert_mock_expectations(test_target):
if ('mock_patches' in test_target):
for (mock_target, config) in six.iteritems(test_target['mock_patches']):
if ('magic_mock' in config):
for (path, expectations) in six.iteritems(config['expectations']):
obj = confi... |
def test_pluralize():
assert (pluralize('build', [2, 1, 3], be_suffix=True) == 'builds 2, 1, and 3 are')
assert (pluralize('build', [2, 1, 'others'], be_suffix=True) == 'builds 2, 1, and others are')
assert (pluralize('action', [1], be_suffix=True) == 'action 1 is')
assert (pluralize('sth', [1, 2], be_s... |
class OrdnerTablePage(Webpage):
def __init__(self, ordner, title, dirname, num=10000, sort=None, integers=True, between=None, index_splits=None, max_expressions=4, max_entries=10):
self.num = num
self.ordner = ordner
self.integers = integers
self.between = between
self.filepa... |
def count_elements(elems: Dict[(int, str)], filter_fn, parse_fn) -> Dict:
counts = Counter()
for elem in filter(filter_fn, elems.values()):
group = parse_fn(elem.tags)
if (group is None):
continue
counts[group] += 1
counts = recover_hierarchy(counts)
return counts |
def test_internal_jumps(accounts, testproject, tester):
tx = tester.makeInternalCalls(False, True)
assert (max([i['depth'] for i in tx.trace]) == 0)
assert (max([i['jumpDepth'] for i in tx.trace]) == 1)
tx = tester.makeInternalCalls(True, False)
assert (max([i['depth'] for i in tx.trace]) == 0)
... |
def compute_ma(data: SERIES_OR_DATAFRAME, fun: Callable[([SERIES_OR_DATAFRAME, int], pd.Series)], spans: List[int], plot: bool=True) -> pd.DataFrame:
type_validation(data=data, fun=fun, spans=spans, plot=plot)
m_a = data.copy(deep=True)
if isinstance(m_a, pd.Series):
m_a = m_a.to_frame()
for spa... |
def test_mqtt_broker_default_config():
from feeder.util.mqtt.broker import FeederBroker
from feeder import settings
broker = FeederBroker()
assert (broker.config['listeners']['tcp-1'] == {'bind': f'0.0.0.0:{settings.mqtt_port}'})
assert (broker.config['listeners']['tcp-ssl-1'] == {'bind': f'0.0.0.0:... |
class OptionSeriesWordcloudSonificationTracksMappingHighpassResonance(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
... |
('/callers/blocked')
def callers_blocked():
total = get_row_count('Blacklist')
(page, per_page, offset) = get_page_args(page_parameter='page', per_page_parameter='per_page')
sql = 'SELECT * FROM Blacklist ORDER BY datetime(SystemDateTime) DESC LIMIT {}, {}'.format(offset, per_page)
g.cur.execute(sql)
... |
def test_android_sha_certificates(android_app):
for cert in android_app.get_sha_certificates():
android_app.delete_sha_certificate(cert)
android_app.add_sha_certificate(project_management.SHACertificate(SHA_256_HASH_1))
android_app.add_sha_certificate(project_management.SHACertificate(SHA_256_HASH_2... |
class MockClassFit(BaseDiscretiser):
def fit(self, X):
california_dataset = fetch_california_housing()
data = pd.DataFrame(california_dataset.data, columns=california_dataset.feature_names)
self.variables_ = ['HouseAge']
self.binner_dict_ = {'HouseAge': [0, 20, 40, 60, np.Inf]}
... |
class TestMrtlibMrtPeer(unittest.TestCase):
def test_parse_two_octet_as(self):
bgp_id = '1.1.1.1'
ip_addr = '10.0.0.1'
buf = (((b'\x00' + addrconv.ipv4.text_to_bin(bgp_id)) + addrconv.ipv4.text_to_bin(ip_addr)) + b'\xfd\xe8')
(peer, rest) = mrtlib.MrtPeer.parse(buf)
eq_(0, pe... |
class CEMOptimizerActor(OptimizerBasedActor):
def __init__(self, spec: specs.EnvironmentSpec, model_env: models.ModelEnv, replay: replay_lib.ReplayBuffer, variable_client: core.VariableSource, planning_horizon: int=25, n_iterations: int=5, population_size: int=400, elite_frac: float=0.1, alpha: float=0.1, return_me... |
def buildUsingBuck(dst, platform, buck_target):
_setUpTempDirectory(dst)
final_command = f'{buck_target} --out {dst}'
(result, _) = processRun(final_command.split())
getLogger().info('\n'.join(result))
if _isBuildSuccessful(dst, platform, final_command):
os.chmod(dst, 320)
return Tru... |
class OptionSeriesGaugeDataEvents(Options):
def click(self):
return self._config_get(None)
def click(self, value: Any):
self._config(value, js_type=False)
def drag(self):
return self._config_get(None)
def drag(self, value: Any):
self._config(value, js_type=False)
def ... |
class RNG():
def __init__(self, backend, seed=None, torch_save_state=False):
if (backend == 'numpy'):
import numpy as np
self._rng = np.random.default_rng(seed)
self.uniform = (lambda size, dtype: self._rng.random(size=size, dtype=dtype))
elif (backend == 'torch')... |
class TestAddConnectionFailsWhenConnectionAlreadyExists():
def setup_class(cls):
cls.runner = CliRunner()
cls.agent_name = 'myagent'
cls.cwd = os.getcwd()
cls.t = tempfile.mkdtemp()
cls.connection_name = '
cls.connection_author = 'fetchai'
cls.connection_versi... |
def _create_defaults_list(repo: IConfigRepository, config_name: Optional[str], overrides: Overrides, prepend_hydra: bool, skip_missing: bool) -> Tuple[(List[ResultDefault], DefaultsTreeNode)]:
root = _create_root(config_name=config_name, with_hydra=prepend_hydra)
defaults_tree = _create_defaults_tree(repo=repo,... |
def test_requirements():
assert (set(UnaryOperation(neg, [BinaryOperation(add, [a, b])]).requirements) == {a, b})
assert (UnaryOperation(neg, [UnaryOperation(neg, [a])]).requirements == [a])
assert (set(BinaryOperation(add, [a, BinaryOperation(add, [a, b])]).requirements) == {a, b})
assert (set(BinaryOp... |
class ParameterDialog(QDialog):
def __init__(self, params=dict(), buttons=(QDialogButtonBox.Cancel | QDialogButtonBox.Ok), sidebar_var='', parent=None, store_geometry=''):
QDialog.__init__(self, parent=parent)
self.setObjectName(('ParameterDialog - %s' % utf8(params)))
self.__current_path = ... |
def setup_test_data(db):
sub = baker.make('submissions.SubmissionAttributes', submission_id=1, toptier_code='123', quarter_format_flag=False, reporting_fiscal_year=2019, reporting_fiscal_period=6, published_date='2019-07-03')
sub2 = baker.make('submissions.SubmissionAttributes', submission_id=2, toptier_code='1... |
class LocalOptimizerAdam(LocalOptimizer, torch.optim.Adam):
def __init__(self, *, model: Model, **kwargs) -> None:
init_self_cfg(self, component_class=__class__, config_class=LocalOptimizerAdamConfig, **kwargs)
super().__init__(model=model, **kwargs)
torch.optim.Adam.__init__(self, params=se... |
def extract_genesis_params(genesis_config: RawEIP1085Dict) -> GenesisParams:
raw_params = genesis_config['genesis']
return GenesisParams(nonce=decode_hex(raw_params['nonce']), difficulty=to_int(hexstr=raw_params['difficulty']), extra_data=Hash32(decode_hex(raw_params['extraData'])), gas_limit=to_int(hexstr=raw_... |
def test_dependencies_not_include(dep_project):
package_config = ETHPM_CONFIG.copy()
package_config['settings']['include_dependencies'] = False
(manifest, _) = ethpm.create_manifest(dep_project._path, package_config)
assert (manifest['build_dependencies'] == {'utils': 'ipfs://testipfs-utils'})
asser... |
def create_list_of_file(file, binSizeList):
with open(file) as f:
newList = [line.rstrip() for line in f]
splittedList = []
binSize =
pos = 0
for line in newList:
x = line.split('\t')
splittedList.append(x)
if (pos < 20):
pos += 1
(zeros, num)... |
def _check_stypes_names_category(roxutils, stype, name, category):
stype = stype.lower()
if (stype not in VALID_STYPES):
raise ValueError(f'Given stype {stype} is not supported, legal stypes are: {VALID_STYPES}')
if (not name):
raise ValueError('The name is missing or empty.')
if ((stype... |
def startAoeServer(dut):
sshProc = getSshProc(dut)
(ipAddr, err) = sshProc.communicate('killall fio -q; hostname -i')
fioSvrCmd = (('nohup fio --server=ip6:%s ' % ipAddr.rstrip()) + '> /tmp/fio.log 2> /tmp/fio.err &\n')
sshProc = getSshProc(dut)
sshProc.stdin.write(fioSvrCmd)
sshProc.stdin.close... |
def predict(model, video_or_folder_path, preprocessor_config: dict, labels: List[str], mode: str='first_clip', device: str=None):
labeled_video_paths = LabeledVideoPaths.from_path(video_or_folder_path)
labeled_video_paths.path_prefix = ''
video_sampler = torch.utils.data.SequentialSampler
device = (torc... |
class TestCSSEscapes(util.PluginTestCase):
def setup_fs(self):
template = self.dedent('\n <!DOCTYPE html>\n <html>\n <head>\n <meta content="text/html; charset=UTF-8">\n </head>\n <body>\n <h1>A contrived example</h1>\n ... |
class DirectCycle(Params, luigi.Task):
pka_exp = luigi.FloatParameter()
def output(self):
return luigi.LocalTarget(self.get_path('summary.yaml'))
def requires(self):
return (Minimization(self.id_, self.name, self.h_ind, is_base=False, charge=self.charge), Minimization(self.id_, self.name, se... |
def construct_trace_access_sets(wstates):
raise Exception('Broken')
trace_to_read_set = defaultdict(set)
trace_to_write_set = defaultdict(set)
for wstate in wstates:
for address_index in wstate.sstore_address_index:
trace_to_read_set[wstate.trace].add(address_index)
for addre... |
class OptionSeriesTimelineSonificationDefaultspeechoptionsMappingTime(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
... |
.parametrize('sub', (True, False), ids=['Z.sub(0)', 'V'])
.skipcomplexnoslate
def test_transfer_manager_inside_coarsen(sub, mesh):
V = FunctionSpace(mesh, 'N1curl', 2)
Q = FunctionSpace(mesh, 'P', 1)
Z = (V * Q)
(x, y) = SpatialCoordinate(mesh)
if sub:
bc_space = Z.sub(0)
else:
b... |
class OptionSeriesSolidgaugeSonificationDefaultinstrumentoptionsMappingTremoloSpeed(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, ... |
class ZISIntegrationResponseHandler(ResponseHandler):
def applies_to(api, response):
result = urlparse(response.request.url)
return result.path.startswith('/api/services/zis/registry/')
def deserialize(self, response_json):
return self.object_mapping.object_from_json('integration', respo... |
class Query():
__slots__ = ('constraints', 'model')
def __init__(self, constraints: List[ConstraintExpr], model: Optional[DataModel]=None) -> None:
self.constraints = constraints
self.model = model
self.check_validity()
def check(self, description: Description) -> bool:
retur... |
def test_extract_frames_with_trim_start_and_trim_end() -> None:
facefusion.globals.trim_frame_start = 124
facefusion.globals.trim_frame_end = 224
data_provider = [('.assets/examples/target-240p-25fps.mp4', 120), ('.assets/examples/target-240p-30fps.mp4', 100), ('.assets/examples/target-240p-60fps.mp4', 50)]... |
def test_sign_and_recover_message_deprecated(ethereum_private_key_file):
account = EthereumCrypto(ethereum_private_key_file)
message = b'hello'
message_hash = hashlib.sha256(message).digest()
sign_bytes = account.sign_message(message=message_hash, is_deprecated_mode=True)
assert (len(sign_bytes) > 0... |
class SessionProc(multiprocessing.Process):
def __init__(self, id, sleep_interval):
self.id = id
self.sleep_interval = sleep_interval
super().__init__()
def run(self):
C = pyexasol.connect(dsn=config.dsn, user=config.user, password=config.password, schema=config.schema)
p... |
class UdpTransportTarget(AbstractTransportTarget):
TRANSPORT_DOMAIN = udp.DOMAIN_NAME
PROTO_TRANSPORT = udp.UdpSocketTransport
def _resolveAddr(self, transportAddr):
try:
return socket.getaddrinfo(transportAddr[0], transportAddr[1], socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)[... |
def deprecated_api(replace_with: str) -> Callable[([Callable[(..., RT)]], Callable[(..., RT)])]:
def wrapper(f: Callable[(..., RT)]) -> Callable[(..., RT)]:
(f)
def wrapped(*args: Any, **kwargs: Any) -> RT:
warnings.warn(f'{f.__name__} is deprecated, use {replace_with} instead', Deprecat... |
class OptionSeriesArcdiagramSonificationContexttracksMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
... |
class SubawardSearch(models.Model):
broker_created_at = models.DateTimeField(null=True, blank=True, db_index=True)
broker_updated_at = models.DateTimeField(null=True, blank=True, db_index=True)
broker_subaward_id = models.BigIntegerField(primary_key=True, db_index=True, unique=True)
unique_award_key = m... |
class StateUpdateMessage(Message):
protocol_id = PublicId.from_str('fetchai/state_update:1.1.7')
protocol_specification_id = PublicId.from_str('fetchai/state_update:1.0.0')
class Performative(Message.Performative):
APPLY = 'apply'
END = 'end'
INITIALIZE = 'initialize'
def __s... |
class OptionPlotoptionsVariablepieSonificationDefaultinstrumentoptionsMappingNoteduration(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(... |
def task_log_exception(request, exc, task_id=None, **kwargs):
if (not task_id):
task_id = task_id_from_request(request, tt=TT_ERROR, dummy=True)
(task_result, task_status) = get_task_exception(exc)
task_log_error(task_id, LOG_API_FAILURE, user=request.user, task_result=task_result, task_status=task_... |
class TestFilmAdvanceMechanism(object):
def test_advance_film(self):
f = FilmAdvanceMechanism()
assert (f.advanced == False)
f.advance()
assert (f.advanced == True)
def test_advance_film_twice(self):
f = FilmAdvanceMechanism()
f.advance()
with pytest.raise... |
class DataAccess(object):
def delete(cls, session, inventory_index_id):
try:
result = cls.get(session, inventory_index_id)
session.query(Inventory).filter((Inventory.inventory_index_id == inventory_index_id)).delete()
session.query(InventoryWarnings).filter((InventoryWarn... |
class TeachTargetReforgeAction(UserAction):
card_usage = 'reforge'
def apply_action(self):
g = self.game
tgt = self.target
cl = user_choose_cards(self, tgt, ('cards', 'showncards', 'equips'))
if cl:
c = cl[0]
else:
c = random_choose_card(g, [tgt.ca... |
((detect_target().name() == 'rocm'), 'Not supported by ROCM.')
class ReduceTestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(ReduceTestCase, self).__init__(*args, **kwargs)
self.test_count = 0
def _run_reduce(self, *, test_name, reduce_op, torch_reduce_op, dim, input_shape,... |
class OptionSeriesStreamgraphDataMarker(Options):
def enabled(self):
return self._config_get(None)
def enabled(self, flag: bool):
self._config(flag, js_type=False)
def enabledThreshold(self):
return self._config_get(2)
def enabledThreshold(self, num: float):
self._config(... |
def create_default_tree(db_session):
root = add_resource(db_session, (- 1), 'root a', ordering=1)
res_a = add_resource(db_session, 1, 'a', parent_id=root.resource_id, ordering=1)
add_resource(db_session, 5, 'aa', parent_id=res_a.resource_id, ordering=1)
add_resource(db_session, 6, 'ab', parent_id=res_a.... |
class TestFileWithGoodExtension():
def test_raises_exception_on_unsupported_extension(self):
file = BytesIO(b'fake audio file')
file.name = 'fake.txt'
file_info = FileInfo(123456, 'machintruc/txt', ['txt'], mediainfo(file.name).get('sample_rate', '44100'), mediainfo(file.name).get('channels'... |
def _fetch_agent_deps(ctx: Context) -> None:
for item_type in (PROTOCOL, CONTRACT, CONNECTION, SKILL):
item_type_plural = '{}s'.format(item_type)
required_items = getattr(ctx.agent_config, item_type_plural)
for item_id in required_items:
add_item(ctx, item_type, item_id) |
def get_bert_feature_with_token(tokens, word2ph, device=config.bert_gen_config.device):
if ((sys.platform == 'darwin') and torch.backends.mps.is_available() and (device == 'cpu')):
device = 'mps'
if (not device):
device = 'cuda'
if (device not in models.keys()):
models[device] = Auto... |
.flaky(reruns=MAX_FLAKY_RERUNS)
.integration
.ledger
def test_format_default():
account = FetchAICrypto()
cc2 = FetchAICrypto()
cosmos_api = FetchAIApi(**FETCHAI_TESTNET_CONFIG)
amount = 10000
transfer_transaction = cosmos_api.get_transfer_transaction(sender_address=account.address, destination_addr... |
class OptionsLegend(DataClass):
def enabled(self):
return self._attrs['enabled']
def enabled(self, val):
self._attrs['enabled'] = val
def icons(self):
return self._attrs['icons']
def icons(self, val):
self._attrs['icons'] = val
def left(self):
return self.has_... |
def test_metrics_multithreaded(elasticapm_client):
metricset = MetricSet(MetricsRegistry(elasticapm_client))
pool = Pool(5)
def target():
for i in range(500):
metricset.counter('x').inc((i + 1))
time.sleep(1e-07)
[pool.apply_async(target, ()) for i in range(10)]
pool.... |
def test_client_run_task(client):
client.run_task(cluster=u'test-cluster', task_definition=u'test-task', count=2, started_by='test', overrides=dict(foo='bar'))
client.boto.run_task.assert_called_once_with(cluster=u'test-cluster', taskDefinition=u'test-task', count=2, startedBy='test', overrides=dict(foo='bar')) |
def test_seed_per_observation_with_only_1_variable_as_seed(df_na):
df_na = df_na.copy()
df_na['Age'] = df_na['Age'].fillna(1)
imputer = RandomSampleImputer(variables=['City', 'Studies'], random_state='Age', seed='observation')
X_transformed = imputer.fit_transform(df_na)
ref = {'Name': ['tom', 'nick... |
class TestComposerThread_remove_state(ComposerThreadBaseTestCase):
def test_remove_state(self):
t = ComposerThread(self.semmock, self._make_task()['composes'][0], 'bowlofeggs', self.Session, self.tempdir)
t.compose = self.db.query(Compose).one()
t.db = self.db
t.remove_state()
... |
class TestUsers():
def test_patch_id_status_codes(self, openapi_dict):
route = openapi_dict['paths']['/{id}']['patch']
assert (list(route['responses'].keys()) == ['200', '401', '403', '404', '400', '422'])
def test_delete_id_status_codes(self, openapi_dict):
route = openapi_dict['paths']... |
class TestUtilities():
def test_regressor_coefficients(self, daily_univariate_ts, backend):
m = Prophet(stan_backend=backend)
df = daily_univariate_ts.copy()
np.random.seed(123)
df['regr1'] = np.random.normal(size=df.shape[0])
df['regr2'] = np.random.normal(size=df.shape[0])
... |
def setup_california_gps_map(ax, region=(235.2, 245.3, 31.9, 42.3), coastlines=True, **kwargs):
if kwargs:
warnings.warn(('All kwargs are being ignored. They are accepted to ' + 'guarantee backward compatibility.'), stacklevel=2)
_setup_map(ax, xticks=np.arange((- 124), (- 115), 4), yticks=np.arange(33,... |
class OptionPlotoptionsAreasplinerangeSonificationDefaultspeechoptionsActivewhen(Options):
def crossingDown(self):
return self._config_get(None)
def crossingDown(self, num: float):
self._config(num, js_type=False)
def crossingUp(self):
return self._config_get(None)
def crossingUp... |
def extractIAmABananaFreshieTranslation(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())):
return None
if item['title'].startswith('(ON)The Yin Emperor or The Yang Empress'):
return build... |
.integration_saas
.integration_hubspot
('fides.api.models.privacy_request.PrivacyRequest.trigger_policy_webhook')
def test_create_and_process_access_request_saas_hubspot(trigger_webhook_mock, connection_config_hubspot, dataset_config_hubspot, db, cache, policy, policy_pre_execution_webhooks, policy_post_execution_webho... |
class Class_File(Compilation_Unit):
def __init__(self, name, dirname, loc, file_length, n_classdef, l_functions, l_pragmas):
super().__init__(name, dirname, loc, file_length)
assert isinstance(n_classdef, Class_Definition)
assert isinstance(l_functions, list)
for n_function in l_func... |
class PoissonAsyncTrainingStartTimeDistr(IAsyncTrainingStartTimeDistr):
def __init__(self, **kwargs) -> None:
init_self_cfg(self, component_class=__class__, config_class=PoissonAsyncTrainingStartTimeDistrConfig, **kwargs)
super().__init__(**kwargs)
def _set_defaults_in_cfg(cls, cfg):
pas... |
class Decoder(object):
def __init__(self):
self._clear_codes()
self.remainder = []
def code_size(self):
return len(self._codepoints)
def decode(self, codepoints):
codepoints = [cp for cp in codepoints]
for cp in codepoints:
decoded = self._decode_codepoint... |
def extractEvtranslationHomeBlog(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('Quan Qiu Gao Wu', 'Quan Qiu Gao Wu', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous',... |
class OEFConnection(Connection):
connection_id = PUBLIC_ID
def __init__(self, **kwargs: Any) -> None:
super().__init__(**kwargs)
addr = cast(str, self.configuration.config.get('addr'))
port = cast(int, self.configuration.config.get('port'))
if ((addr is None) or (port is None)):
... |
def test_instructions_with_tags_at_start():
cfg = ControlFlowGraph()
var_eax = Variable('eax', Integer.int32_t())
var_ecx = Variable('ecx', Integer.int32_t())
const_1 = Constant(1, Integer.int32_t())
const_10 = Constant(10, Integer.int32_t())
const_0 = Constant(0, Integer.int32_t())
tags = [... |
class BaseOperator(DAGNode, ABC, Generic[OUT], metaclass=BaseOperatorMeta):
def __init__(self, task_id: Optional[str]=None, task_name: Optional[str]=None, dag: Optional[DAG]=None, runner: WorkflowRunner=None, **kwargs) -> None:
super().__init__(node_id=task_id, node_name=task_name, dag=dag, **kwargs)
... |
def _minilyrics(artist, song):
service_name = 'Mini Lyrics'
url = ''
timed = False
try:
data = minilyrics.MiniLyrics(artist, song)
for item in data:
if item['url'].endswith('.lrc'):
url = item['url']
break
lyrics = requests.get(url, pro... |
class TestSLCReader(DataReaderTestBase):
def setup_reader(self):
r = PolyDataReader()
r.initialize(get_example_data('nut.slc'))
self.e.add_source(r)
self.bounds = (0.0, 67.0, 0.0, 40.0, 0.0, 58.0)
def test_slc_data_reader(self):
self.check(self.scene, self.bounds)
def... |
def unpack_and_flatten_and_dedup_list_of_strings(list_maybe_jsoned: Optional[Union[(List[str], str)]]) -> List[str]:
if (not list_maybe_jsoned):
return []
ret = []
if isinstance(list_maybe_jsoned, str):
ret = unpack_and_flatten_str_to_list(list_maybe_jsoned)
elif isinstance(list_maybe_js... |
def symtable_snapshot(obj):
if isinstance(obj, str):
obj = symtable.symtable(obj, UNKNOWN_FILE, 'exec')
if isinstance(obj, symtable.SymbolTable):
snapshot = types.SimpleNamespace(__obj__=obj, id=obj.get_id(), type=obj.get_type(), name=obj.get_name(), nested=obj.is_nested(), optimized=obj.is_opti... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.