code stringlengths 101 5.91M |
|---|
class SegmentHead(nn.Module):
def __init__(self, num_classes=2):
super(SegmentHead, self).__init__()
self.segHead = nn.Sequential(Conv(256, 128, 3, 1), nn.Upsample(scale_factor=2, mode='nearest'), BottleneckCSP(128, 64, n=1, shortcut=False), Conv(64, 32, 3, 1), nn.Upsample(scale_factor=2, mode='near... |
def _ground_formula(match_parse, text_formula):
basic_ontology = text_formula.basic_ontolgy
current = text_formula.current
packs = []
if (isinstance(current, Constant) and basic_ontology.isinstance(current.type, basic_ontology.types['number'])):
assert isinstance(current.content, str)
nu... |
class LieConformalAlgebraWithStructureCoefficients(FinitelyFreelyGeneratedLCA):
def _standardize_s_coeff(s_coeff, index_set, ce, parity=None):
if (parity is None):
parity = ((0,) * index_set.cardinality())
index_to_parity = dict(zip(index_set, parity))
sc = {}
for mypair ... |
class LabeledFewShot(Teleprompter):
def __init__(self, k=16):
self.k = k
def compile(self, student, *, trainset, sample=True):
self.student = student.reset_copy()
self.trainset = trainset
if (len(self.trainset) == 0):
return self.student
rng = random.Random(0)... |
def main():
get_info_about_data_len_distribution([yahoo_output_full_data_filename], '/Users/sofias6/Downloads/')
make_class_balanced_train_dev_test_sets(yahoo_output_full_data_filename, 50, 350, 5000, 140000, 0.1, yahoo_output_train_filename, yahoo_output_dev_filename, yahoo_output_test_filename)
split_file... |
def test_NumpyArray():
v2_array = ak.contents.numpyarray.NumpyArray(np.array([0.0, 1.1, 2.2, 3.3], dtype=np.float64))
assert (to_list(ak._do.combinations(v2_array, 2, axis=0)) == [(0.0, 1.1), (0.0, 2.2), (0.0, 3.3), (1.1, 2.2), (1.1, 3.3), (2.2, 3.3)])
assert (ak._do.combinations(v2_array.to_typetracer(), 2... |
class MNISTNet(nn.Module):
def __init__(self):
super().__init__()
self.conv_layers = nn.Sequential(nn.Conv2d(1, 10, kernel_size=5), nn.MaxPool2d(2), nn.ReLU(), nn.Conv2d(10, 20, kernel_size=5), nn.Dropout(), nn.MaxPool2d(2), nn.ReLU())
self.fc_layers = nn.Sequential(nn.Linear(320, 50), nn.Re... |
def hsvThreshold():
os.chdir('./medirl-master/Code/')
VideoDir = './medirl-master/videos/crash-video'
videos = glob.glob((VideoDir + '/*.mp4'))
for v in videos:
cap = cv2.VideoCapture(v)
def nothing(x):
pass
useCamera = False
cv2.namedWindow('image')
c... |
class TriLinear(Layer):
def __init__(self, name='tri_linear', bias=False):
super(TriLinear, self).__init__(name)
self.projecting_layers = [tf.keras.layers.Dense(1, activation=None, use_bias=False) for _ in range(2)]
self.dot_w = None
self.bias = bias
def __call__(self, t0, t1):
... |
class LReLU_VGG(nn.Module):
def __init__(self, vgg_name):
super(LReLU_VGG, self).__init__()
self.features = self._make_layers(cfg[vgg_name])
self.classifier = nn.Linear(512, 10)
def forward(self, x):
out = self.features(x)
out = out.view(out.size(0), (- 1))
out = ... |
def test_search(tensor_db):
ret = _search(tensor_db.tensor_db, 'tensor_name', 'agg', 0, False, ('col1',))
assert_frame_equal(ret, tensor_db.tensor_db.drop([1])) |
def _normalize_sequence(input, rank):
is_str = isinstance(input, str)
if ((not is_str) and isinstance(input, Iterable)):
normalized = list(input)
if (len(normalized) != rank):
err = 'sequence argument must have length equal to input rank'
raise RuntimeError(err)
else:... |
def generate_event_change(inter_prob, intra_prob, alpha, increment):
cps = [15, 30, 60, 75, 90, 105, 135]
fname = (((((('eventCP_' + str(inter_prob)) + '_') + str(intra_prob)) + '_') + str(alpha)) + '.txt')
cps_sizes = []
cps_probs = []
sizes_1 = [250, 250]
probs_1 = construct_SBM_block(sizes_1,... |
def _flatten_helper(g, input, start_dim, end_dim, dim):
input_size = g.op('Shape', input)
slice1 = _slice_helper(g, input_size, axes=[0], starts=[0], ends=[start_dim])
slices = [slice1, g.op('Constant', value_t=torch.tensor([(- 1)], dtype=torch.long))]
if (end_dim < (dim - 1)):
slice3 = _slice_h... |
class IdentityConnector(nn.Module):
def __init(self):
super(IdentityConnector, self).__init__()
def forward(self, hidden_state):
return hidden_state |
def main():
text = args.text_root
text_len = text.apply((lambda x: len(text.split(' '))), axis=1)
text_len.to_csv('ogbn-arxiv_len.txt', sep='\t', header=None, index=False)
text_stat = pd.DataFrame(text_len.describe())
text_stat.index.rename('Statics', inplace=True)
text_stat.columns = ['Length']... |
def _scale_numerical(df1: pd.DataFrame, df2: pd.DataFrame) -> Tuple[(pd.DataFrame, pd.DataFrame)]:
(df1_min, df1_max) = (df1.min(), df1.max())
(df2_min, df2_max) = (df2.min(), df2.max())
mins = df1_min.where((df1_min < df2_min), df2_min)
maxs = df1_max.where((df1_max > df2_max), df2_max)
ranges = (m... |
def PercentDegree_PUndirNet(Graph, Threshold=0):
return _snap.PercentDegree_PUndirNet(Graph, Threshold) |
def test_estimate_competence_Q():
x = np.array([0, 1, 2, 3, 4, 5, 6]).reshape((- 1), 1)
y = np.array([0, 0, 0, 0, 1, 1, 1])
clf1 = create_base_classifier(np.array([1, 0, 1, 0, 0, 0, 0]))
clf2 = create_base_classifier(np.array([1, 0, 0, 0, 1, 0, 0]))
clf3 = create_base_classifier(np.array([0, 0, 1, 0... |
def ShearY(img, v, max_v, bias=0):
v = (_float_parameter(v, max_v) + bias)
if (random.random() < 0.5):
v = (- v)
return img.transform(img.size, PIL.Image.AFFINE, (1, 0, 0, v, 1, 0)) |
.parametrize('knn_methods', knn_methods)
def test_desp_proba(knn_methods):
(pool_classifiers, X_dsel, y_dsel, X_test, y_test) = setup_classifiers()
desp = DESP(pool_classifiers, knn_classifier=knn_methods, voting='soft')
desp.fit(X_dsel, y_dsel)
probas = desp.predict_proba(X_test)
expected = np.load... |
def test_nnef_register():
tract.nnef().with_tract_core().with_onnx().with_pulse().with_tract_extra() |
def test_pipeline_none_classifier():
(X, y) = make_classification(n_classes=2, class_sep=2, weights=[0.1, 0.9], n_informative=3, n_redundant=1, flip_y=0, n_features=20, n_clusters_per_class=1, n_samples=5000, random_state=0)
clf = LogisticRegression(solver='lbfgs', random_state=0)
pipe = make_pipeline(None,... |
def transpose(A: dace.float32[(M, N)], B: dace.float32[(N, M)]):
def mytasklet(i: _[0:M], j: _[0:N]):
(a << A[(i, j)])
(b >> B[(j, i)])
b = a |
_level_function()
def nanmax(array, axis=None, *, keepdims=False, initial=None, mask_identity=True, highlevel=True, behavior=None, attrs=None):
(yield (array,))
return _impl(ak.operations.ak_nan_to_none._impl(array, False, None), axis, keepdims, initial, mask_identity, highlevel, behavior, attrs) |
class WeightedLoss(_Loss):
def __init__(self, loss, weight=1.0):
super().__init__()
self.loss = loss
self.weight = weight
def forward(self, *input):
return (self.loss(*input) * self.weight) |
def make_code_builder(data: tp.List[tp.List[str]]) -> HuffmanCodeBuilder:
builder = HuffmanCodeBuilder()
for sentence in data:
builder.add_symbols(*sentence)
return builder |
def get_parser():
parser = argparse.ArgumentParser(description='Quantize using K-means clustering over acoustic features.')
parser.add_argument('--feature_type', type=str, choices=['logmel', 'hubert', 'w2v2', 'cpc'], default=None, required=True, help='Acoustic feature type')
parser.add_argument('--kmeans_mo... |
def test_signature_kwonly():
mod = ast.parse('\ndef func(x, *, y, z=None):\n ...\n')
node = mod.body[0]
assert (dosig(node) == 'x, *, y, z=None') |
def translate_strips_conditions_aux(conditions, dictionary, ranges):
condition = {}
for fact in conditions:
if fact.negated:
continue
for (var, val) in dictionary.get(fact, ()):
if ((condition.get(var) is not None) and (val not in condition.get(var))):
ret... |
class TensorPipeRpcAgentTestFixture(RpcAgentTestFixture):
def rpc_backend(self):
return rpc.backend_registry.BackendType['TENSORPIPE']
def rpc_backend_options(self):
return rpc.backend_registry.construct_rpc_backend_options(self.rpc_backend, init_method=self.init_method)
def get_shutdown_err... |
def _parse_dataset_id(url: str):
match = re.search('/e/(.+)', url)
if match:
return match.group(1).split('.')[0]
else:
raise ValueError(f'Could not parse dataset id from url {url}') |
def op2d_add_const_collapsing_node_matchers() -> Tuple[(NodeOperationMatcher, NodeOperationMatcher)]:
first_node = (((NodeOperationMatcher(DepthwiseConv2D) | NodeOperationMatcher(Conv2D)) | NodeOperationMatcher(Conv2DTranspose)) | NodeOperationMatcher(Dense))
second_node = NodeOperationMatcher(tf.math.add)
... |
def remove_nones(dict_data: Dict[(str, Any)]) -> Dict[(str, Any)]:
return {key: value for (key, value) in dict_data.items() if (value is not None)} |
def AffineSpace(n, R=None, names=None, ambient_projective_space=None, default_embedding_index=None):
if ((is_MPolynomialRing(n) or is_PolynomialRing(n)) and (R is None)):
R = n
if (names is not None):
names = normalize_names(R.ngens(), names)
if (n.variable_names() != names):... |
def test_inout_connector_validation_fail():
sdfg = dace.SDFG('test_inout_connector_validation_fail')
sdfg.add_array('A', [1], dace.int32)
sdfg.add_array('B', [1], dace.int32)
nsdfg = dace.SDFG('nested_sdfg')
nsdfg.add_array('C', [1], dace.int32)
nstate = nsdfg.add_state()
read_c = nstate.add... |
def test_multiple_path_variables(testdir):
testdir.make_test('\(endpoint="/users/{user_id}/{event_id}")\(max_examples=3, deadline=None)\ndef test_(case):\n assert_int(case.path_parameters["user_id"])\n assert_int(case.path_parameters["event_id"])\n assert_requests_call(case)\n ', paths={'/users/{use... |
class SawyerStickPullV1Policy(Policy):
_fully_parsed
def _parse_obs(obs):
return {'hand_pos': obs[:3], 'stick_pos': obs[3:6], 'obj_pos': obs[6:(- 3)], 'goal_pos': obs[(- 3):]}
def get_action(self, obs):
o_d = self._parse_obs(obs)
action = Action({'delta_pos': np.arange(3), 'grab_pow'... |
class IntegerVectorsModPermutationGroup_All(UniqueRepresentation, RecursivelyEnumeratedSet_forest):
def __init__(self, G, sgs=None):
RecursivelyEnumeratedSet_forest.__init__(self, algorithm='breadth', category=InfiniteEnumeratedSets().Quotients())
self._permgroup = G
self.n = G.degree()
... |
class SubsetVisDial():
def __init__(self, config):
super().__init__()
self.data_dir = config.data_dir
self.save_data_dir = config.save_data_dir
self.image_id_list_path = config.image_id_list_path
self.subset_image_ids = self.read_file_as_list(self.image_id_list_path)
... |
def intmd_retrain_models(args, old_networks, aligned_wts, train_loader, test_loader, config, tensorboard_obj=None, initial_acc=None):
accuracies = []
retrained_networks = []
for i in range(len(old_networks)):
nick = ('intmd_retrain_model_' + str(i))
print('Retraining model : ', nick)
... |
class DataCollatorCTCWithPadding():
processor: AutoProcessor
padding: Union[(bool, str)] = 'longest'
pad_to_multiple_of: Optional[int] = None
pad_to_multiple_of_labels: Optional[int] = None
def __call__(self, features: List[Dict[(str, Union[(List[int], torch.Tensor)])]]) -> Dict[(str, torch.Tensor)]... |
def get_parser():
global _parser
if (_parser is None):
_parser = _init_parser()
return _parser |
def add_sync_op_only_between(worker_id, local_worker_id, machine_id, num_local_workers, num_worker_machines, master_var_op_to_mirror_vars, ps_device, worker_device, average_sparse, tensor_or_op_name_to_replica_names, only_sparse, local_aggregation):
def _get_accum_apply_and_agg_grad(var, grad, indices, dense_shape)... |
.parametrize('ctx, func_name', ctxs)
.parametrize('seed', [314])
.parametrize('num_inputs', [2, 3, 5])
def test_mul_n_forward_backward(num_inputs, seed, ctx, func_name):
rng = np.random.RandomState(seed)
shape0 = [2, 3, 4]
inputs = []
for i in range(num_inputs):
inputs.append(rng.randn(*shape0).... |
class Evaluator(object):
def __init__(self, data: ds.DataSet, params: SimpleNamespace):
self.logger = logging.get_logger(self.__class__.__name__, (pylog.CRITICAL if data.config.config_test else pylog.DEBUG))
self._data = data
self._params = params
self._k = getattr(data.config.evalua... |
def clean_de_stnr(df: Union[(pd.DataFrame, dd.DataFrame)], column: str, output_format: str='standard', inplace: bool=False, errors: str='coerce', progress: bool=True) -> pd.DataFrame:
if (output_format not in {'compact', 'standard'}):
raise ValueError(f'output_format {output_format} is invalid. It needs to ... |
def heatmap(points, filename='heatmap.png'):
(x, y) = np.rollaxis(points, 1)
axis_lim = 3
axis_bins = np.linspace((- axis_lim), axis_lim, 100)
plt.gca().set_aspect('equal')
plt.hist2d(x, y, bins=[axis_bins, axis_bins])
plt.axis('off')
plt.savefig(filename, bbox_inches='tight', transparent=Tr... |
def create_dataset_protocols(dataset, data_dir):
assert (dataset in VALID_DATASET)
if (dataset == 'iPER'):
from .iPER import IPERProtocol
return IPERProtocol(data_dir)
elif (dataset == 'iPER_ICCV'):
from .iPER import ICCVIPERProtocol
return ICCVIPERProtocol(data_dir)
elif... |
def main():
args = ArgParser().parse_args()
prepare_save_path(args)
assert (args.dataset == 'wikikg90m')
args.neg_sample_size_eval = 1000
set_global_seed(args.seed)
init_time_start = time.time()
dataset = get_dataset(args.data_path, args.dataset, args.format, args.delimiter, args.data_files,... |
class BaseTextProcessFunc():
def __call__(self, conv: Conversation, preprocessor: Dict[(str, Any)], mode: str, **tokenize_kwargs) -> Dict[(str, Any)]:
raise NotImplementedError |
def Trainer(model, model_optimizer, train_dl, val_dl, test_dl, device, logger, config, experiment_log_dir, idx):
save_path = ('./best_network/' + config.dataset)
os.makedirs(save_path, exist_ok=True)
early_stopping = EarlyStopping(save_path, idx)
scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(mo... |
class ResNet(nn.Module):
def __init__(self, block, layers, num_classes=1000, zero_init_residual=False, groups=1, width_per_group=64, norm_layer=None, filter_size=1, pool_only=True):
super(ResNet, self).__init__()
if (norm_layer is None):
norm_layer = nn.BatchNorm2d
planes = [int(... |
def is_planar(sp):
to_consider = [x for x in map(list, sp) if (len(x) > 1)]
n = len(to_consider)
for i in range(n):
ap = [x for x in to_consider[i] if (x > 0)]
an = [(- x) for x in to_consider[i] if (x < 0)]
if (ap and an):
for j in range(n):
if (i == j):
... |
class ModelOutputTest(ModelOutput):
a: float
b: Optional[float] = None
c: Optional[float] = None |
class EvaluationTap(Tap):
dataset_test_path: str
dataset_dev_path: str
prediction_test_path: str
prediction_dev_path: str
evaluate_bootstrap: bool = False
filtering: Optional[str] = None
document_level: bool = False |
class SchemeMorphism_fan_toric_variety(SchemeMorphism, Morphism):
def __init__(self, parent, fan_morphism, check=True):
SchemeMorphism.__init__(self, parent)
if (check and (self.domain().fan() != fan_morphism.domain_fan())):
raise ValueError('the fan morphism domain must be the fan of th... |
def convert_range(relevant_scope, featureScope, meta_types, conditions, inverted_features):
parameters = (relevant_scope, featureScope)
for (idx, condition) in enumerate(conditions):
if (meta_types[idx] == MetaType.DISCRETE):
parameters += _convert_categorical(condition)
elif (meta_t... |
def load_examples_sst5(path):
data = []
with open(path) as f:
for line in f:
(l, s) = line.strip().split('\t')
label = int(l[(- 1)])
d = {}
d['correct_hypothesis'] = (label - 1)
d['sentence'] = s
data.append(d)
examples = []
... |
def load(filename):
logger.info('Load config from "{}"'.format(filename))
f = open(filename)
data = json.load(f)
for (key, value) in data.items():
if (key in globals()):
logger.error('Conflict in config with key "{}"'.format(key))
else:
globals()[key] = value |
def test_local_standard_deviation():
mean = 100
std = 5
shape = (30, 30, 30, 3)
for N in [1, 4, 8, 12]:
noise = 0
for _ in range(N):
noise += ((np.random.normal(mean, std, shape) ** 2) + (np.random.normal(mean, std, shape) ** 2))
noise = np.sqrt(noise)
correct... |
class Material(object):
def __init__(self, ambient, diffuse, specular, shininess):
self.__ambient = np.array(ambient, dtype=np.float32)
self.__diffuse = np.array(diffuse, dtype=np.float32)
self.__specular = np.array(specular, dtype=np.float32)
self.__shininess = np.array([shininess],... |
def register_Ns3CallbackImplBase_methods(root_module, cls):
cls.add_constructor([])
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
cls.add_method('GetTypeid', 'std::string', [], is_pure_virtual=True, is_const=True, is_virtual=True)
cls.add_method('IsEqual', 'bool', [param('ns3::Pt... |
class ImgObsWrapper(gym.core.ObservationWrapper):
def __init__(self, env):
super().__init__(env)
self.observation_space = env.observation_space.spaces['image']
def observation(self, obs):
return obs['image'] |
def rank_GF(n=500, p=16411, system='sage'):
if (system == 'sage'):
A = random_matrix(GF(p), n, (n + 10))
t = cputime()
v = A.rank()
return cputime(t)
elif (system == 'magma'):
code = ('\nn := %s;\nA := Random(MatrixAlgebra(GF(%s), n));\nt := Cputime();\nK := Rank(A);\ns :... |
_model
def mpvit_tiny(**kwargs):
model = MPViT(img_size=224, num_stages=4, num_path=[2, 3, 3, 3], num_layers=[1, 2, 4, 1], embed_dims=[64, 96, 176, 216], mlp_ratios=[2, 2, 2, 2], num_heads=[8, 8, 8, 8], **kwargs)
model.default_cfg = _cfg_mpvit()
return model |
class StatefulTest():
name: str
def parse(self, case: Case, response: GenericResponse) -> ParsedData:
raise NotImplementedError
def make_operation(self, collected: list[ParsedData]) -> APIOperation:
raise NotImplementedError |
class HTTPSConnectionPool(HTTPConnectionPool):
scheme = '
ConnectionCls = HTTPSConnection
def __init__(self, host, port=None, strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False, headers=None, retries=None, _proxy=None, _proxy_headers=None, key_file=None, cert_file=None, cert_reqs=None, ca... |
def gen_data_set_sdm(data, seq_short_max_len=5, seq_prefer_max_len=50):
data.sort_values('timestamp', inplace=True)
train_set = []
test_set = []
for (reviewerID, hist) in tqdm(data.groupby('user_id')):
pos_list = hist['movie_id'].tolist()
genres_list = hist['genres'].tolist()
rat... |
def run_PTI(run_name='', use_wandb=False, use_multi_id_training=False):
os.environ['CUDA_DEVICE_ORDER'] = 'PCI_BUS_ID'
os.environ['CUDA_VISIBLE_DEVICES'] = global_config.cuda_visible_devices
if (run_name == ''):
global_config.run_name = ''.join((choice(ascii_uppercase) for i in range(12)))
else:... |
def parse_args():
argv = sys.argv
if ('--' in argv):
argv = argv[(argv.index('--') + 1):]
else:
argv = []
OBJ_IDS = dict(table='', chair='', mug='', bench='', lamp='', bowl='')
parser = argparse.ArgumentParser()
parser.add_argument('--out_dir', required=True, help='Where to write... |
def nasnet_cifar_arg_scope(weight_decay=0.0005, batch_norm_decay=0.9, batch_norm_epsilon=1e-05):
batch_norm_params = {'decay': batch_norm_decay, 'epsilon': batch_norm_epsilon, 'scale': True, 'fused': True}
weights_regularizer = tf.contrib.layers.l2_regularizer(weight_decay)
weights_initializer = tf.contrib.... |
class ModelVarType(enum.Enum):
LEARNED = enum.auto()
FIXED_SMALL = enum.auto()
FIXED_LARGE = enum.auto()
LEARNED_RANGE = enum.auto() |
def test_count_axis_None():
array = ak.highlevel.Array([[[np.datetime64('2022'), np.datetime64('2023'), np.datetime64('2025')], [], [np.datetime64('2027'), np.datetime64('2011')], [np.datetime64('2013')]], [], [[np.datetime64('2017'), np.datetime64('2019')], [np.datetime64('2023')]]], check_valid=True)
assert (... |
def check_rbf3d_interpolation(function):
x = ((random.rand(50, 1) * 4) - 2)
y = ((random.rand(50, 1) * 4) - 2)
z = ((random.rand(50, 1) * 4) - 2)
d = (x * exp(((- (x ** 2)) - (y ** 2))))
rbf = Rbf(x, y, z, d, epsilon=2, function=function)
di = rbf(x, y, z)
di.shape = x.shape
assert_array... |
class YahooProcessor(DataProcessor):
def get_train_examples(self, data_dir):
logger.info('LOOKING AT {}'.format(os.path.join(data_dir, 'train.tsv')))
return self._create_examples(self._read_tsv(os.path.join(data_dir, 'yahoo_train.tsv')), 'train')
def get_dev_examples(self, data_dir):
ret... |
def do_input_map(fn, input):
return _nested_map((lambda t: isinstance(t, torch.Tensor)), fn)(input) |
def get_dataset(args, is_train=True):
(shards_path, rest) = get_shards_path(args, suffix='.pkl', f=get_shards_size, is_train=is_train)
data = FeatureDataset(args, shards_path, rest['all_shards_path'], is_train=is_train)
if isinstance(args.computation.num_gpus, int):
world_size = min(du.get_world_siz... |
def configure_her(params):
env = cached_make_env(params['make_env'])
env.reset()
def reward_fun(ag_2, g, info):
return env.compute_reward(achieved_goal=ag_2, desired_goal=g, info=info)
her_params = {'reward_fun': reward_fun}
for name in ['replay_strategy', 'replay_k', 'et_w_schedule']:
... |
class Err(Generic[E]):
__slots__ = ('_error',)
def __init__(self, error: E):
self._error = error
def err(self) -> E:
return self._error |
def parse_args():
parser = argparse.ArgumentParser(description='Script that converts a single file of text to silver standard trees')
selftrain.common_args(parser)
parser.add_argument('--input_file', default='vi_part_1.aa', help='Path to the file to read')
args = parser.parse_args()
return args |
.parametrize('n_rounds, n_actions, dim_context, base_model_for_iw_estimator, base_model_for_reg_model, base_model_for_pscore_estimator', offline_experiment_configurations)
def test_offline_estimation_performance(n_rounds: int, n_actions: int, dim_context: int, base_model_for_iw_estimator: str, base_model_for_reg_model:... |
_module()
class WrapFieldsToLists(object):
def __call__(self, results):
for (key, val) in results.items():
results[key] = [val]
return results
def __repr__(self):
return f'{self.__class__.__name__}()' |
_task('vqa_gen', dataclass=VqaGenConfig)
class VqaGenTask(OFATask):
def __init__(self, cfg: VqaGenConfig, src_dict, tgt_dict):
super().__init__(cfg, src_dict, tgt_dict)
if (not self.cfg.unconstrained_training):
self.ans2label_dict = None
if (self.cfg.ans2label_file is not Non... |
class Figure():
_default_label_alias = dict(yhat='Forecast', anom='Anomaly Score')
def __init__(self, y: UnivariateTimeSeries=None, anom: UnivariateTimeSeries=None, yhat: UnivariateTimeSeries=None, yhat_lb: UnivariateTimeSeries=None, yhat_ub: UnivariateTimeSeries=None, y_prev: UnivariateTimeSeries=None, yhat_pr... |
class WebBrowserClickElement(VirtualFunctionTool):
name = 'WebBrowserClickElement'
summary = 'Clicks an element in the current web page.'
parameters: List[ArgParameter] = [{'name': 'element_id', 'type': 'string', 'description': 'The id of the element to click.', 'required': True}]
returns: List[ArgRetur... |
.parametrize('X', [X, X_sparse])
.parametrize('Datafit, Penalty', [(Quadratic, L1), (Logistic, L1), (QuadraticSVC, IndicatorBox)])
def test_fista_solver(X, Datafit, Penalty):
_y = (y if isinstance(Datafit, Quadratic) else y_classif)
datafit = compiled_clone(Datafit())
_init = ((y X.T) if isinstance(Datafit... |
def infer_init_method(cfg: DistributedTrainingConfig, force_distributed=False):
if ((cfg.distributed_init_method is not None) or cfg.tpu):
return
num_pipelines_per_node = None
if cfg.pipeline_model_parallel:
(num_pipeline_devices, num_pipelines_per_node) = _pipeline_parallel_pre_init(cfg)
... |
class BinaryCrossEntropyLoss(torch.nn.BCEWithLogitsLoss):
def __init__(self, label_name='labels', logits_name='logits', pos_weight=None):
super().__init__(reduction='mean', pos_weight=(torch.Tensor([pos_weight]).cuda() if pos_weight else None))
self.label_name = label_name
self.logits_name =... |
class RGCNConv(torch.nn.Module):
def __init__(self, in_channels, out_channels, node_types, edge_types):
super(RGCNConv, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.rel_lins = ModuleDict({f'{key[0]}_{key[1]}_{key[2]}': Linear(in_channels, out_... |
def get_model_parameters_count(model):
return np.sum([np.prod([int(e) for e in p.shape]) for p in model.parameters()]) |
_utils.test(arch=archs_support_ndarray_ad, default_fp=ti.f64, require=ti.extension.adstack)
def test_integer_stack():
N = 5
a = ti.ndarray(ti.f32, shape=N, needs_grad=True)
b = ti.ndarray(ti.f32, shape=N, needs_grad=True)
c = ti.ndarray(ti.i32, shape=N)
f = ti.ndarray(ti.f32, shape=N, needs_grad=Tru... |
def add_mask_transformer(self, temperature=0.66, hard_sigmoid=((- 0.1), 1.1)):
self.temperature = temperature
self.hard_sigmoid = hard_sigmoid
if (hard_sigmoid is False):
self.transform = (lambda x: torch.sigmoid((x / temperature)))
elif (hard_sigmoid is True):
self.transform = (lambda x... |
class BalancedPositiveNegativeSampler(object):
def __init__(self, batch_size_per_image, positive_fraction):
self.batch_size_per_image = batch_size_per_image
self.positive_fraction = positive_fraction
def __call__(self, matched_idxs):
pos_idx = []
neg_idx = []
for matched_... |
class NoiseModel():
def whiten(self, unwhitened_residual: sf.Matrix.MatrixT) -> sf.Matrix.MatrixT:
pass
def reduce(whitened_residual: sf.Matrix.MatrixT) -> sf.Scalar:
return (whitened_residual.squared_norm() / 2)
def error(self, unwhitened_residual: sf.Matrix.MatrixT) -> sf.Scalar:
r... |
def add_prefix(inputs, prefix):
outputs = dict()
for (name, value) in inputs.items():
outputs[f'{prefix}_{name}'] = value
return outputs |
def mesh_to_pointcloud(vertices, faces, npoints):
areas = []
for face in faces:
p1 = vertices[face[0]]
p2 = vertices[face[1]]
p3 = vertices[face[2]]
v1 = (p2 - p1)
v2 = (p3 - p1)
areas.append(compute_area(v1, v2))
areas = np.asarray(areas)
probabilities = ... |
def load_tf2_weights_in_pytorch_model(pt_model, tf_weights, allow_missing_keys=False):
try:
import tensorflow as tf
import torch
except ImportError:
logger.error('Loading a TensorFlow model in PyTorch, requires both PyTorch and TensorFlow to be installed. Please see and for installatio... |
class Optimizer(object):
def __init__(self, model, solver, local_rank=0):
self.model = model
self.solver = solver
self.local_rank = local_rank
self.bias_params_list = []
self.gn_params_list = []
self.nonbias_params_list = []
self.params = []
self.gn_pa... |
def bootstrap():
config = get_config()
set_seed(seed=config[GENERAL][SEED])
log_file_name = config[LOG][FILE_PATH]
print('Writing logs to file name: {}'.format(log_file_name))
logging.basicConfig(filename=log_file_name, format='%(message)s', filemode='w', level=logging.DEBUG)
return config |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.