code stringlengths 101 5.91M |
|---|
def get_init_file():
os.makedirs(str(get_shared_folder()), exist_ok=True)
init_file = (get_shared_folder() / f'{uuid.uuid4().hex}_init')
if init_file.exists():
os.remove(str(init_file))
return init_file |
def main():
args = get_args()
if (args.num_exp == 1):
score = run(args)
score_str = ''.join([f'{s: .4f} ' for s in score])
elif (args.num_exp > 1):
(score_mean, score_std) = repeat_run(args)
score_str = ''.join(([f'{s: .4f} ' for s in score_mean] + [f'{s: .4f} ' for s in scor... |
def fundamental_group_arrangement(flist, simplified=True, projective=False, puiseux=False):
if (len(flist) > 0):
f = prod(flist)
R = f.parent()
else:
R = PolynomialRing(QQ, ('x', 'y'))
f = R(1)
(x, y) = R.gens()
F = R.base_ring()
flist1 = [_ for _ in flist]
d = f.... |
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, downsample=None, noactivation=False):
super(Bottleneck, self).__init__()
self.bottleneck_sub = BottleneckSub(inplanes, planes, stride, noactivation)
self.downsample = downsample
self.stride ... |
def test_constructor_mutate_parameters_args(test_case_mock, constructor_mock, variable_reference_mock):
signature = MagicMock(original_parameters={'a': float, 'b': int})
const = stmt.ConstructorStatement(test_case_mock, MagicMock(inferred_signature=signature), {'a': variable_reference_mock, 'b': variable_refere... |
def reproducible_repr(val):
def sorted_pairs(iterable, pairs=False):
res = sorted(((reproducible_repr(item), item) for item in iterable))
if (not pairs):
res = [r for (r, i) in res]
return res
if isinstance(val, frozenset):
itms = sorted_pairs(val)
return 'fro... |
def test_get_option_reward():
goal = ['grey', 'XL', 'pack of 12']
purchased = ['pack of 12', 'grey', 'XL']
(r_option, matches) = get_option_reward(purchased, goal)
assert (matches == len(goal))
assert (r_option == 1)
goal = ['grey', 'XL', 'pack of 12']
purchased = ['pack of 12', 'blue', 'XL'... |
def inference_small_config(x, c):
c['bottleneck'] = False
c['ksize'] = 3
c['stride'] = 1
with tf.variable_scope('scale1'):
c['conv_filters_out'] = 16
c['block_filters_internal'] = 16
c['stack_stride'] = 1
x = conv(x, c)
x = bn(x, c)
x = activation(x)
... |
class Analyser():
def __init__(self, cfg, model, param_details=False):
self.cfg = cfg
if isinstance(model, (nn.parallel.distributed.DistributedDataParallel, nn.DataParallel)):
self.model = model.module
else:
self.model = model
self.param_details = param_detail... |
class _ROIPool(Function):
_fwd(cast_inputs=torch.float32)
def forward(ctx, input, roi, output_size, spatial_scale):
ctx.output_size = _pair(output_size)
ctx.spatial_scale = spatial_scale
ctx.input_shape = input.size()
(output, argmax) = _C.roi_pool_forward(input, roi, spatial_sca... |
def _seg_58():
return [(92768, 'V'), (92778, 'X'), (92782, 'V'), (92784, 'X'), (92880, 'V'), (92910, 'X'), (92912, 'V'), (92918, 'X'), (92928, 'V'), (92998, 'X'), (93008, 'V'), (93018, 'X'), (93019, 'V'), (93026, 'X'), (93027, 'V'), (93048, 'X'), (93053, 'V'), (93072, 'X'), (93760, 'M', u''), (93761, 'M', u''), (93... |
class TestQuicGraphicalLasso(object):
.parametrize('params_in, expected', [({}, [3., 3., 9., 3.e-11]), ({'lam': 1.0, 'max_iter': 100}, [3., 3., 10.0, 0.0]), ({'lam': 0.5, 'mode': 'trace'}, [3., 3., 32., 0.]), ({'lam': 0.5, 'mode': 'path', 'path': np.array([1.0, 0.9, 0.8, 0.7, 0.6, 0.5])}, [8., 9., 22., 1.e-08]), ({... |
def get_number_of_jobs(alidir):
try:
num_jobs = int(open('{0}/num_jobs'.format(alidir)).readline().strip())
except (IOError, ValueError) as e:
logger.error('Exception while reading the number of alignment jobs: ', exc_info=True)
raise SystemExit(1)
return num_jobs |
class TwistedAffineLieAlgebra(AffineLieAlgebra):
def __init__(self, R, cartan_type, kac_moody):
if (cartan_type.type() == 'BC'):
classical = cartan_type.classical().dual()
n = classical.rank()
classical = classical.relabel({(n - i): i for i in range(n)})
else:
... |
def description_print():
print(pyrgg.params.PYRGG_LINKS)
line(40)
print('\n')
print(fill(pyrgg.params.PYRGG_DESCRIPTION, width=100))
print('\n')
line(40) |
def _record_to_complex(layout, complex_record_fields):
if (complex_record_fields is None):
return layout
elif (isinstance(complex_record_fields, Sized) and isinstance(complex_record_fields, Iterable) and (len(complex_record_fields) == 2) and isinstance(complex_record_fields[0], str) and isinstance(compl... |
def test_dict():
data = {k: x for (k, x) in enumerate(X)}
assert (compute_estimate(X) == compute_estimate(data)) |
def detach(sgv, control_inputs=False, control_outputs=None, control_ios=None):
(control_inputs, control_outputs) = select.check_cios(control_inputs, control_outputs, control_ios)
(_, detached_inputs) = detach_inputs(sgv, control_inputs)
(_, detached_outputs) = detach_outputs(sgv, control_outputs)
return... |
def min_max_quantize(input, bits):
assert (bits >= 1), bits
if (bits == 1):
return (torch.sign(input) - 1)
(min_val, max_val) = (input.min(), input.max())
if isinstance(min_val, Variable):
max_val = float(max_val.data.cpu().numpy()[0])
min_val = float(min_val.data.cpu().numpy()[0... |
def ljspeech_example_cacher(text, n_timesteps, mel_temp, length_scale, spk=(- 1)):
global CURRENTLY_LOADED_MODEL
if (CURRENTLY_LOADED_MODEL != 'matcha_ljspeech'):
global model, vocoder, denoiser
(model, vocoder, denoiser) = load_model('matcha_ljspeech', 'hifigan_T2_v1')
CURRENTLY_LOADED_... |
class DiscreteIQNQFunctionForwarder(DiscreteQFunctionForwarder):
_q_func: DiscreteIQNQFunction
_n_quantiles: int
def __init__(self, q_func: DiscreteIQNQFunction, n_quantiles: int):
self._q_func = q_func
self._n_quantiles = n_quantiles
def compute_expected_q(self, x: TorchObservation) -> ... |
class BottomRightPool(nn.Module):
def forward(self, x, guide):
x = x.contiguous()
guide = guide.expand_as(x).contiguous()
return BottomRightPoolFunction.apply(x, guide) |
class TBBProcessPool27(multiprocessing.pool.Pool):
def _repopulate_pool(self):
from multiprocessing.util import debug
for i in range((self._processes - len(self._pool))):
w = self.Process(target=tbb_process_pool_worker27, args=(self._inqueue, self._outqueue, self._initializer, self._init... |
.parametrize('num_threshold, expected_predictions', [((- np.inf), [0, 1, 1, 1]), (10, [0, 0, 1, 1]), (20, [0, 0, 0, 1]), (ALMOST_INF, [0, 0, 0, 1]), (np.inf, [0, 0, 0, 0])])
def test_infinite_values_and_thresholds(num_threshold, expected_predictions):
X = np.array([(- np.inf), 10, 20, np.inf]).reshape((- 1), 1)
... |
def tarball(snakemake_args=(), cores=1, conda_frontend='conda'):
snakefile = (paths.showyourwork().workflow / 'build.smk')
run_snakemake(snakefile.as_posix(), run_type='tarball', cores=cores, conda_frontend=conda_frontend, extra_args=(list(snakemake_args) + ['syw__arxiv_entrypoint']), check=True) |
class ToMaskConverter(BaseConverter):
registry = {}
dst_type = BitMasks
def convert(cls, densepose_predictor_outputs: Any, boxes: Boxes, image_size_hw: ImageSizeType, *args, **kwargs) -> BitMasks:
return super(ToMaskConverter, cls).convert(densepose_predictor_outputs, boxes, image_size_hw, *args, **... |
class AbsoluteValue(OptimizationFunction):
def __init__(self, objective):
super().__init__(objective)
def eval(self, input_vals: List[np.ndarray]) -> np.ndarray:
return np.abs(input_vals[0])
def grad(self, input_vals: List[np.ndarray], grad_val: np.ndarray) -> List[np.ndarray]:
grad ... |
def get_trans_func(name):
trans_funcs = {'bottleneck_transform': BottleneckTransform, 'basic_transform': BasicTransform, 'x3d_transform': X3DTransform}
assert (name in trans_funcs.keys()), "Transformation function '{}' not supported".format(name)
return trans_funcs[name] |
def _create_int_feature(values):
f = tf.train.Feature(int64_list=tf.train.Int64List(value=list(values)))
return f |
def stack_backward(grad_inputs, inputs, input_shapes, outputs, output_shapes, axis=0):
dy = grad_inputs[0]
yshape = dy.shape
if (yshape[axis] == 1):
reshape = (yshape[:axis] + yshape[(axis + 1):])
return F.reshape(dy, reshape, inplace=False)
dx_list = F.split(dy, axis=axis)
return dx... |
class Saver(object):
def __init__(self, model, optimizer, keep_every_n=None):
self._model = model
self._optimizer = optimizer
self._keep_every_n = keep_every_n
def restore(self, model_dir, map_location=None, step=None):
last_step = load_checkpoint(self._model, self._optimizer, mo... |
def center_stim3_fenics(I_s, t):
V = I_s.function_space()
mesh = V.mesh()
frequency = 30
start = 3.0
length = 1.0
threshold = ufl.cos(((ufl.pi / frequency) * length))
timer = ufl.cos((((2 * ufl.pi) / frequency) * ((t - start) - (length / 2))))
(x, y) = SpatialCoordinate(mesh)
zero = ... |
class FullyConnectedQFunction(nn.Module):
def __init__(self, observation_dim, action_dim, arch='256-256', orthogonal_init=False):
super().__init__()
self.observation_dim = observation_dim
self.action_dim = action_dim
self.arch = arch
self.orthogonal_init = orthogonal_init
... |
(0.1)
_service.route('/inch_2_cm', methods=['POST'])
def funcInch2Cm():
dm_msg = request.json['entities']
entity_name = 'inch'
inch = float(dm_msg[entity_name])
cm = (2.54 * inch)
return json_resp(True, '{} inch equals to {} centimeter'.format(inch, cm)) |
def eval_step(eval_len=args.seq_len, ood=False, n_evals=100):
model.eval()
total_loss = 0.0
with torch.no_grad():
for _ in range(n_evals):
(data, label, op) = rules(args.batch_size, eval_len, args.gt_rules, 2, args.search_version, args.data_seed, ood)
data = torch.Tensor(data... |
def get_ydist(nlabels, device=None):
logits = torch.zeros(nlabels, device=device)
ydist = distributions.categorical.Categorical(logits=logits)
ydist.nlabels = nlabels
return ydist |
def understand_file(file_name, things_to_look_for, work_dir='.', **kwargs):
lines = read_file(file_name, work_dir=work_dir, **kwargs).split('\n')
counter = 0
blocks = []
while (counter < len(lines)):
block = []
start_line_number = (counter + 1)
while ((counter < len(lines)) and (... |
class TestCausal(unittest.TestCase):
def test_1(self):
graph = pd.DataFrame([[0, 1, 0, 0], [0, 0, 0, 0], [0, 0, 0, 1], [0, 0, 1, 0]], columns=['a', 'b', 'c', 'd'], index=['a', 'b', 'c', 'd'])
(levels, cycles) = CausalDiscovery.causal_order(graph)
self.assertEqual(levels, None)
self.a... |
def unet3_l2(base_n_filt, x, y, deconv=False, kernel_conv=[3, 3, 3], kernel_deconv=[1, 3, 3], is_training=True, is_pad=False, varlist=None):
conv_0_1 = conv_batch_relu3d_layer(x, base_n_filt, kernel=kernel_conv, is_training=is_training, is_pad=is_pad)
conv_0_2 = conv_batch_relu3d_layer(conv_0_1, (base_n_filt * ... |
def get_columns(data):
columns = [constants.LATITUDE, constants.LONGITUDE, constants.DATETIME]
columns = (columns + list((set(data.columns) - set(columns))))
return columns |
class AlgebraIdeals(Category_ideal):
def __init__(self, A):
try:
base_ring = A.base_ring()
except AttributeError:
raise TypeError(f'A (={A}) must be an algebra')
else:
if ((base_ring not in Rings()) or (A not in Algebras(base_ring.category()))):
... |
def make_mm_config(data_args):
return dict(is_multimodal=data_args.is_multimodal, sep_audio_conv_front=data_args.sep_audio_conv_front, audio_folder=data_args.audio_folder, use_audio_start_end=getattr(data_args, 'mm_use_audio_start_end', False), audio_processor=getattr(data_args, 'audio_encoding_processor', None)) |
def get_checkpoint_shard_files(pretrained_model_name_or_path, index_filename, cache_dir=None, force_download=False, proxies=None, resume_download=False, local_files_only=False, use_auth_token=None, user_agent=None, revision=None, subfolder='', _commit_hash=None):
import json
if (not os.path.isfile(index_filenam... |
class RejoinRequestPayload(Payload):
_OFFSET_RJTYPE = 0
_LEN_RJTYPE = 1
_OFFSET_NETID = (_OFFSET_RJTYPE + _LEN_RJTYPE)
_LEN_NETID = 3
_OFFSET_DEVEUI = (_OFFSET_NETID + _LEN_NETID)
_LEN_DEVUI = 8
_OFFSET_RJCOUNT = (_OFFSET_DEVEUI + _LEN_DEVUI)
_LEN_RJCOUNT = 2
def __init__(self, msg):... |
def to_Brauer_partition(l, k=None):
L = to_set_partition(l, k=k)
L2 = []
paired = []
not_paired = []
for i in L:
L2.append(list(i))
for i in L2:
if (len(i) > 2):
raise ValueError('blocks must have size at most 2, but {} has {}'.format(i, len(i)))
if (len(i) ==... |
def createResolutionCallbackFromFrame(frames_up: int=0):
frame = inspect.currentframe()
i = 0
while (i < (frames_up + 1)):
assert (frame is not None)
frame = frame.f_back
i += 1
assert (frame is not None)
f_locals = frame.f_locals
f_globals = frame.f_globals
class env... |
class BatchNormPreprocessor(object):
def __call__(self, graph):
for node in graph.nodes:
if (node.kind != NodeKind.BatchNorm):
continue
assert (node.data is not None)
assert (len(node.data) == 3)
(mean, variance, scale) = node.data
... |
class STDCNet(nn.Module):
def __init__(self, subtype='stdc1', out_channels=[32, 64, 256, 512, 1024], layers=[2, 2, 2], block_num=4, out_stages=[2, 3, 4], output_stride=32, classifier=False, num_classes=1000, backbone_path=None, pretrained=False):
super(STDCNet, self).__init__()
self.subtype = subtyp... |
def log_norm_cdf_prime(x):
with warnings.catch_warnings():
warnings.simplefilter('ignore')
d = ((np.sqrt((2 * np.pi)) * 0.5) * erfcx(((- x) / np.sqrt(2))))
return (1.0 / d) |
class Saturation(BaseTimeRx):
def __call__(self, U, simulation):
P = self.getP(simulation.mesh, simulation.time_mesh)
usat = np.concatenate([simulation.water_retention(ui) for ui in U])
return (P * usat)
def deriv(self, U, simulation, du_dm_v=None, v=None, adjoint=False):
P = sel... |
def clean_fr_nir(df: Union[(pd.DataFrame, dd.DataFrame)], column: str, output_format: str='standard', inplace: bool=False, errors: str='coerce', progress: bool=True) -> pd.DataFrame:
if (output_format not in {'compact', 'standard'}):
raise ValueError(f'output_format {output_format} is invalid. It needs to b... |
def not_number_date_field_table(identifier):
return ((identifier != '*') and (not re.fullmatch(number_pattern, identifier)) and (not re.fullmatch(datetime_pattern, identifier)) and (not re.fullmatch(field_pattern, identifier)) and (not re.fullmatch(table_pattern, identifier)) and (not re.fullmatch(alias_pattern, id... |
def reduce_tensor(inp):
world_size = get_world_size()
if (world_size < 2):
return inp
with torch.no_grad():
reduced_inp = inp
dist.reduce(reduced_inp, dst=0)
return reduced_inp |
def right_mark_index(pinyin_no_number):
for c in ['a', 'o', 'e']:
if (c in pinyin_no_number):
return pinyin_no_number.index(c)
for c in ['iu', 'ui']:
if (c in pinyin_no_number):
return (pinyin_no_number.index(c) + 1)
for c in ['i', 'u', 'v', 'u']:
if (c in pin... |
def register_datasets(datasets, cfg):
if (not isinstance(datasets, (tuple, list))):
datasets = [datasets]
for seq_name in datasets:
print('Registering dataset ', seq_name)
if (seq_name not in __image_datasets):
seq_class = get_sequence_class(seq_name, cfg)
torchre... |
def main():
colorama.init()
steps = [ConfirmGitStatus(branch='main'), MakeClean(), UpdateVersion(), CheckVersionNumber(), UpdateReadme(), UpdateChangelog(), MakeDocs(), CheckLocalDocs(), MakeDist(), UploadToTestPyPI(), InstallFromTestPyPI(), PushToGitHub(), CheckCIStatus(), GitTagRelease(), PushTagToGitHub(), C... |
class OrProver(Prover):
def __init__(self, stmt, subprover):
self.subprover = subprover
self.stmt = stmt
self.true_prover_idx = self.stmt.chosen_idx
self.setup_simulations()
def setup_simulations(self):
self.simulations = []
for (index, subproof) in enumerate(self... |
_checkable
class Serializer(Protocol):
def as_requests(self, context: SerializerContext, payload: Any) -> dict[(str, Any)]:
raise NotImplementedError
def as_werkzeug(self, context: SerializerContext, payload: Any) -> dict[(str, Any)]:
raise NotImplementedError |
def timeit(method):
def timed(*args, **kw):
ts = time.time()
result = method(*args, **kw)
te = time.time()
if ('log_time' in kw):
name = kw.get('log_name', method.__name__.upper())
kw['log_time'][name] = int(((te - ts) * 1000))
else:
print(... |
def test_property_executor(mosa_strategy):
executor = TestCaseExecutor(MagicMock(ExecutionTracer))
mosa_strategy.executor = executor
assert (mosa_strategy.executor == executor) |
class EvaluationTest(absltest.TestCase):
def test_reduce_permutations(self):
b = 8
n = 16
pred = jnp.stack([jax.random.permutation(jax.random.PRNGKey(i), n) for i in range(b)])
heads = jax.random.randint(jax.random.PRNGKey(42), (b,), 0, n)
perm = probing.DataPoint(name='test'... |
class LabelledRootedTree(AbstractLabelledClonableTree, RootedTree):
def __classcall_private__(cls, *args, **opts):
return cls._auto_parent.element_class(cls._auto_parent, *args, **opts)
_class_attribute
def _auto_parent(cls):
return LabelledRootedTrees()
def sort_key(self):
l = l... |
def Lipschitz_W1(X, corrupted_rate, gamma, z):
term_1 = (gamma * X.dot(np.transpose(X)))
term_2 = ((((1 - corrupted_rate) * (1 - corrupted_rate)) * (np.ones([z, z]) - np.diag(np.ones([z])))) * np.dot(X, np.transpose(X)))
term_2 += (((1 - corrupted_rate) * np.diag(np.ones([z]))) * np.dot(X, np.transpose(X)))... |
def mminfo(source):
(cursor, stream_to_close) = _get_read_cursor(source, 1)
h = cursor.header
cursor.close()
if stream_to_close:
stream_to_close.close()
return (h.nrows, h.ncols, h.nnz, h.format, h.field, h.symmetry) |
class OmniglotClassDataset(ClassDataset):
folder = 'omniglot'
download_url_prefix = '
zips_md5 = {'images_background': '68d2efa1b9178cc56df9314c21c6e718', 'images_evaluation': '6b91aef0f799c5bb55b94e3f2daec811'}
filename = 'data.hdf5'
filename_labels = '{0}{1}_labels.json'
def __init__(self, roo... |
class TCrossNetAIntI(object):
thisown = _swig_property((lambda x: x.this.own()), (lambda x, v: x.this.own(v)), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_snap.TCrossNetAIntI_swiginit(self, _snap.new_TCrossNetAIntI(*args))
def Next(self):
return _snap.TCr... |
def get_bucket_sizes(size, direction: 'down', min_size):
multipliers = [64, 128]
for (i, m) in enumerate(multipliers):
res = up_down_bucket(m, size, direction)
multipliers[i] = min_res(res, min_size=min_size)
return multipliers |
def load_word_embedding(embedding_path, word_idx):
with codecs.open(embedding_path, 'r', 'utf-8') as f:
vecs = []
for line in f:
line = line.strip()
if (len(line.split(' ')) == 2):
continue
info = line.split(' ')
word = info[0]
... |
class Gone(HTTPException):
code = 410
description = 'The requested URL is no longer available on this server and there is no forwarding address. If you followed a link from a foreign page, please contact the author of this page.' |
class TrecProcessor(Sst2Processor):
def get_train_examples(self, data_dir):
return self._create_examples(data_dir, 'train')
def get_dev_examples(self, data_dir):
return self._create_examples(data_dir, 'dev')
def get_test_examples(self, data_dir):
return []
def no_label_for_test(s... |
def unique_hook(testdir):
return testdir.make_importable_pyfile(hook='\n import schemathesis\n\n \n def unique_test_cases(response, case):\n if not hasattr(case.operation.schema, "seen"):\n case.operation.schema.seen = set()\n command = case.as_curl_command(... |
def is_tool_test(test_case):
if (not _run_tool_tests):
return unittest.skip('test is a tool test')(test_case)
else:
try:
import pytest
except ImportError:
return test_case
else:
return pytest.mark.is_tool_test()(test_case) |
def validate(ann_items, questions, answers, collections):
v_dataset = V_dataset(ann_items, questions, answers, collections)
v_dataloader = DataLoader(v_dataset, batch_size=128, shuffle=False, num_workers=24, collate_fn=DataCollator())
final_scores = []
for (k, scores) in enumerate(tqdm(v_dataloader, tot... |
.parametrize('method,inputs', [(ExecutionTracer.executed_code_object.__name__, (None,)), (ExecutionTracer.executed_compare_predicate.__name__, (None, None, None, None)), (ExecutionTracer.executed_bool_predicate.__name__, (None, None)), (ExecutionTracer.executed_exception_match.__name__, (None, None, None)), (ExecutionT... |
def padic_field():
from sage.rings.integer_ring import ZZ
from sage.rings.padics.factory import Qp
prec = ZZ.random_element(x=10, y=100)
p = ZZ.random_element(x=2, y=((10 ** 4) - 30)).next_prime()
return Qp(p, prec) |
def get_audio_paths(dataset_root_path, lst_name):
audio_paths = []
with open(((dataset_root_path / 'scoring') / lst_name)) as f:
for line in f:
(audio_path, lang) = tuple(line.strip().split())
if (lang != 'nnenglish'):
continue
audio_path = re.sub('^.*... |
_method
class pAdicLseries(SageObject):
def __init__(self, E, p, implementation='eclib', normalize='L_ratio'):
self._E = E
self._p = ZZ(p)
self._normalize = normalize
if (implementation not in ['eclib', 'sage', 'num']):
raise ValueError("Implementation should be one of 'e... |
def test_Reals():
R = Reals()
assert (R.union(Interval(2, 4)) == R)
assert (R.contains(0) == true) |
def sent2action(sent):
if (sent == '\n'):
return sent
sent = sent.split()
verb = sent[0]
if (verb in ['stand', 'wake']):
verb = (verb[0].upper() + verb[1:])
action = f'[{verb}Up]'
elif (verb == 'sleep'):
action = '[Sleep]'
elif (verb in ['put', 'take']):
i... |
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--model_type', default=None, type=str, required=True)
parser.add_argument('--base_model', default=None, type=str, required=True)
parser.add_argument('--lora_model', default='', type=str, help='If None, perform inference on the base mode... |
('set_location_header')
def set_location_header():
target = request.args.get('target')
response = Response('')
response.headers['Location'] = target
return response |
class GraphClasses(UniqueRepresentation):
def get_class(self, id):
classes = self.classes()
if (id in classes):
c = classes[id]
if c.get('name', ''):
name = c['name']
else:
name = ('class ' + str(id))
return GraphClass(n... |
def get_root_and_nouns(text: str, lazy=True) -> Tuple[(str, str, List[Tuple[(int, int)]], List[Tuple[(int, int)]])]:
sents = nlp(text)
negative_text = []
if (len([x for x in sents if (x.tag_ in ['NN', 'NNS', 'NNP', 'NNPS', 'PRP'])]) <= 1):
if (lazy or (len([x for x in sents if (x.tag_ in ['NN', 'NNS... |
def getPath(node_c, previous):
l = []
node_p = previous[node_c]
if (node_p is not None):
l.append(node_p)
while (node_p is not None):
node_p = previous.get(node_p)
if (node_p is not None):
l.append(node_p)
return l |
class Value():
def __init__(self, ptr):
self.ptr = ptr
def __del__(self):
if self.ptr:
check(lib.tract_value_destroy(byref(self.ptr)))
def _valid(self):
if (self.ptr == None):
raise TractError('invalid value (maybe already consumed ?)')
def from_numpy(arra... |
class Anyscale(HFModel):
def __init__(self, model, **kwargs):
super().__init__(model=model, is_client=True)
self.session = requests.Session()
self.api_base = os.getenv('OPENAI_API_BASE')
self.token = os.getenv('OPENAI_API_KEY')
self.model = model
self.kwargs = {'tempe... |
(message='scipy.misc.indentcount_lines is deprecated in Scipy 1.3.0')
def indentcount_lines(lines):
return _ld.indentcount_lines(lines) |
class CrossNet(nn.Module):
def __init__(self, in_features, layer_num=2, parameterization='vector', seed=1024, device='cpu'):
super(CrossNet, self).__init__()
self.layer_num = layer_num
self.parameterization = parameterization
if (self.parameterization == 'vector'):
self.k... |
def preserver_loss(logits, targets):
probs = logits.sigmoid()
(batch_size, num_classes) = probs.size()
num_object_classes_in_batch = 0
loss = 0.0
for i in range(batch_size):
for j in range(num_classes):
if (targets[i][j] == 1.0):
num_object_classes_in_batch += 1
... |
def create_resnet_32x32(model, data, num_input_channels, num_groups, num_labels, is_test=False):
brew.conv(model, data, 'conv1', num_input_channels, 16, kernel=3, stride=1)
brew.spatial_bn(model, 'conv1', 'conv1_spatbn', 16, epsilon=0.001, is_test=is_test)
brew.relu(model, 'conv1_spatbn', 'relu1')
filte... |
class GaussianMLPBaseline(Baseline):
def __init__(self, env_spec, subsample_factor=1.0, num_seq_inputs=1, regressor_args=None, name='GaussianMLPBaseline'):
super().__init__(env_spec)
if (regressor_args is None):
regressor_args = dict()
self._regressor = GaussianMLPRegressor(input... |
def insert_open_import_namespaces(import_files: List[str], lines: List[str]) -> List[str]:
after_imports = skip_imports(lines, skip_initial_comment(lines))
if ((after_imports == 0) or ((len(lines[(after_imports - 1)]) > 0) and (not lines[(after_imports - 1)].isspace()))):
lines = ((lines[:after_imports]... |
class phase_net(nn.Module):
def __init__(self, input_dim, hidden_dim=300, num_layers=3, embedding_dim=20, dropout=0.3, num_speaker=2):
super(phase_net, self).__init__()
chimera_net = chimera(input_dim, hidden_dim, num_layers, embedding_dim, dropout, num_speaker)
rnn = nn.LSTM((input_dim * 3)... |
def weighted_resampling(scores, k=1.0, num_samples=None):
num_rows = scores.shape[0]
scores = scores.reshape(num_rows, (- 1))
ranks = rankdata(scores, method='dense', axis=0)
ranks = ranks.max(axis=(- 1))
weights = softmax(((- np.log(ranks)) / k))
num_samples = (num_rows if (num_samples is None)... |
def test_smfish_dataset(save_path: str):
gene_dataset = scvi.data.smfish(save_path=save_path)
unsupervised_training_one_epoch(gene_dataset) |
class HarmonicEmbedding(torch.nn.Module):
def __init__(self, n_harmonic_functions: int=6, omega0: float=1.0, logspace: bool=True):
super().__init__()
if logspace:
frequencies = (2.0 ** torch.arange(n_harmonic_functions, dtype=torch.float32))
else:
frequencies = torch.... |
def make_index(data_path):
subsets = ['development']
split = ['train', 'test']
rec_sites = ['sony', 'tau']
annotations = {'development': 'metadata_dev'}
formats = ['foa', 'mic']
index = {'version': '1.0.0', 'clips': {}, 'metadata': {}}
for subset in subsets:
for formt in formats:
... |
class BasisAbstract(CombinatorialFreeModule, BindableClass):
def __getitem__(self, x):
L = self.realization_of()._lattice
return self.monomial(L(x)) |
_exceptions
def convert_ignore_expections(name, in_dir, out_dir, resolution, skip_existing):
return convert(name, in_dir, out_dir, resolution, skip_existing) |
class MFSeriesConstructor(SageObject, UniqueRepresentation):
def __classcall__(cls, group=HeckeTriangleGroup(3), prec=ZZ(10)):
if (group == infinity):
group = HeckeTriangleGroup(infinity)
else:
try:
group = HeckeTriangleGroup(ZZ(group))
except Type... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.