code stringlengths 101 5.91M |
|---|
class FunnelForMultipleChoice():
def __init__(self, *args, **kwargs):
requires_pytorch(self)
def from_pretrained(self, *args, **kwargs):
requires_pytorch(self) |
def checkDefaultsMatArray(arr):
mats = []
for strval in arr:
mat = strval.split(':', 1)[1]
if (mat in default_material_names):
mats.append(strval)
return mats |
def get_data_loader(args, batch_size):
if (args.mask == 'indep'):
data = IndepMaskedCelebA(data_dir=args.data_dir, obs_prob=args.obs_prob, obs_prob_high=args.obs_prob_high)
elif (args.mask == 'block'):
data = BlockMaskedCelebA(data_dir=args.data_dir, block_len=args.block_len)
data_size = len... |
def test_abi_language_decoder():
decoder = ABILanguageDecoder(max_seq_len=25)
logits = torch.randn(2, 25, 90)
result = decoder(feat=None, out_enc=logits, targets_dict=None, img_metas=None)
assert (result['feature'].shape == torch.Size([2, 25, 512]))
assert (result['logits'].shape == torch.Size([2, 2... |
class SelfNormalizedSlateIndependentIPS(SlateIndependentIPS, BaseSlateSelfNormalizedInverseProbabilityWeighting):
estimator_name: str = 'sniips' |
class Prefetcher(object):
def __init__(self, dataloader):
self.loader = iter(dataloader)
self.stream = torch.cuda.Stream()
self.preload()
def preload(self):
try:
self.next_input = next(self.loader)
except StopIteration:
self.next_input = None
... |
class DayOfMonth(TimeFeature):
def __call__(self, index: pd.DatetimeIndex) -> np.ndarray:
return (((index.day - 1) / 30.0) - 0.5) |
.parametrize('w_dim', [1, 5])
def test_latent_variable_layer_losses(mocker, w_dim):
(num_data, x_dim, y_dim) = (43, 3, 1)
prior_shape = (w_dim,)
posteriors_shape = (num_data, w_dim)
prior = tfp.distributions.MultivariateNormalDiag(loc=np.random.randn(*prior_shape), scale_diag=(np.random.randn(*prior_sha... |
def register_Ns3LteUeRrcSapProviderCompleteSetupParameters_methods(root_module, cls):
cls.add_constructor([])
cls.add_constructor([param('ns3::LteUeRrcSapProvider::CompleteSetupParameters const &', 'arg0')])
cls.add_instance_attribute('srb0SapUser', 'ns3::LteRlcSapUser *', is_const=False)
cls.add_instan... |
def getMotifResult(motifFname, motifRankedFname):
f = open(motifFname, 'rb')
motifs = pickle.load(f)
f = open(motifRankedFname, 'rb')
motifRanked = pickle.load(f)
motifResult = [0 for _ in range(TOTAL_N)]
if ONLY_PICK_BEST:
top = motifRanked[0][0]
motifs = {top: motifs[top]}
... |
_fl_task(model='model', data_loader='val_loader', device='device')
def validate(model, val_loader, device):
model.eval()
model.to(device)
val_loader = tqdm.tqdm(val_loader, desc='validate')
with torch.no_grad():
epoch_val_accuracy = 0
epoch_val_loss = 0
for (data, target) in val_... |
class reused_model(torch.nn.Module):
def __init__(self):
super(reused_model, self).__init__()
self.conv1 = Conv2d(3, 3, kernel_size=1, stride=1)
self.bn1 = BatchNorm2d(3)
self.relu = ReLU()
def forward(self, inp):
x = self.conv1(inp)
x1 = self.bn1(x)
x1 = ... |
class BrauerDiagrams(AbstractPartitionDiagrams):
Element = BrauerDiagram
options = BrauerDiagram.options
_name = 'Brauer'
_diagram_func = brauer_diagrams
def __contains__(self, obj):
if (self.order in ZZ):
r = ZZ(self.order)
else:
r = ZZ((self.order + (ZZ(1) /... |
class SuperGELU(SuperModule):
def __init__(self) -> None:
super(SuperGELU, self).__init__()
def abstract_search_space(self):
return spaces.VirtualNode(id(self))
def forward_candidate(self, input: torch.Tensor) -> torch.Tensor:
return self.forward_raw(input)
def forward_raw(self, ... |
def test_dynamic_constant_pool_max_size(rpool):
provider = DynamicConstantProvider(rpool, EmptyConstantProvider(), 0, 5)
provider.add_value('abcd')
provider.add_value('abcde')
provider.add_value('abcdef')
assert (rpool.get_all_constants_for(str) == OrderedSet(['abcd', 'abcde'])) |
_with_default_init(frozen=True)
class HFCheckpointConverter(Generic[LevConfig]):
LevConfigClass: Type[LevConfig]
reference_checkpoint: Optional[RepoRef]
HfConfigClass: Type
tokenizer: (PreTrainedTokenizerFast | PreTrainedTokenizer)
config_overrides: Optional[dict] = None
trust_remote_code: bool ... |
class ModelTemplate(metaclass=ABCMeta):
def __init__(self, token_emb_mat, glove_emb_mat, tds, cds, tl, scope):
self.scope = scope
self.global_step = tf.get_variable('global_step', shape=[], dtype=tf.int32, initializer=tf.constant_initializer(0), trainable=False)
(self.token_emb_mat, self.glo... |
class LarkOptions(Serialize):
OPTIONS_DOC = '\n **=== General Options ===**\n\n start\n The start symbol. Either a string, or a list of strings for multiple possible starts (Default: "start")\n debug\n Display debug information and extra warnings. Use only when debugging (default: F... |
def test_listener():
rospy.init_node('listener', anonymous=True)
rospy.Subscriber('/nico_feet_forces', String, force_sensor_callback)
rospy.spin() |
class LSUN(data.Dataset):
def __init__(self, root, classes='train', transform=None, target_transform=None):
categories = ['bedroom', 'bridge', 'church_outdoor', 'classroom', 'conference_room', 'dining_room', 'kitchen', 'living_room', 'restaurant', 'tower']
dset_opts = ['train', 'val', 'test']
... |
def LF_severe(span):
rgx = '(sharp|knife-like|significant|extensive|extreme|(marked|severe)(ly)*|severity)'
text = get_left_span(span, span.sentence, window=6).text
return (SEVERE if re.search(rgx, text, re.I) else ABSTAIN) |
class IBertForSequenceClassification(metaclass=DummyObject):
_backends = ['torch']
def __init__(self, *args, **kwargs):
requires_backends(self, ['torch']) |
def eval_(model, device, valid_loader, query_length, feat_norm=True, remove_junk=True, max_rank=50, output_dir='', rerank=False, lambda_=0.5, split=0, output_html_path=''):
metric = Clck_R1_mAP(query_length, max_rank=max_rank, rerank=rerank, remove_junk=remove_junk, feat_norm=feat_norm, output_path=output_dir, lamb... |
def recovery_hook(pb, ncoors, region, ts, naming_scheme='step_iel', recovery_file_tag=''):
from sfepy.base.ioutils import get_print_info
from sfepy.homogenization.recovery import get_output_suffix
import os.path as op
for (ii, icell) in enumerate(region.cells):
out = {}
pb.set_mesh_coors... |
def conv_relation_model_test():
prototypes = paddle.ones(shape=(5, 64, 21, 21), dtype='float32')
query_embeddings = paddle.ones(shape=(10, 64, 21, 21), dtype='float32')
embed_model = ConvRelationModel()
print(embed_model(prototypes, query_embeddings)) |
_criterion('bert_loss')
class BertLoss(FairseqCriterion):
def __init__(self, args, task):
super().__init__(args, task)
def forward(self, model, sample, reduce=True):
net_output = model(**sample['net_input'])
sentence_targets = sample['sentence_target'].view((- 1))
lm_targets = sa... |
def resume_from_ckpt(ckpt_path, model, optimizer=None, lr_scheduler=None):
ckpt = torch.load(ckpt_path)
model.load_state_dict(ckpt['model'], strict=False)
if (optimizer is not None):
optimizer.load_state_dict(ckpt['optimizer'])
if (lr_scheduler is not None):
lr_scheduler.load_state_dict(... |
def list_repos(user, token):
r = requests.get((' + user), headers=token)
r.raise_for_status()
ret = sorted((((repo['user'] + '/') + repo['name']) for repo in r.json().get('results', [])))
if ret:
print('repos found:')
print(''.join((('\n\t' + r) for r in ret)))
return ret |
class SpaceMappingProblem():
def __init__(self, fine_model: FineModel, coarse_model: CoarseModel, parameter_extraction: ParameterExtraction, method: Literal[('broyden', 'bfgs', 'lbfgs', 'sd', 'steepest_descent', 'ncg')]='broyden', max_iter: int=25, tol: float=0.01, use_backtracking_line_search: bool=False, broyden_... |
def _GroupByDevice(model, devices, params, non_data_params):
grouped = OrderedDict()
params = params[len(non_data_params):]
for (_i, p) in enumerate(params):
assert (isinstance(p, core.BlobReference) or isinstance(p, core.GradientSlice)), 'Param {} is not BlobReference or GradientSlice'.format(p)
... |
def _consolidate_dictionary_terms(d):
dnew = defaultdict(int)
for (kz, val) in d.items():
(k, z) = kz
k = np.array(k, dtype=np.int64)
if (k[2] != 0):
k *= np.sign(k[2])
elif (k[4] != 0):
k *= np.sign(k[4])
elif (k[3] != 0):
k *= np.sign... |
def import_or_raise(pkg_or_module_string, ExceptionType, *args, **kwargs):
try:
return __import__(pkg_or_module_string)
except ImportError:
raise ExceptionType(*args, **kwargs) |
class ComplexExpr():
def __init__(self, r, i):
self.r = r
self.i = i
def __add__(self, other):
other = _to_complex(other)
return ComplexExpr((self.r + other.r), (self.i + other.i))
def __radd__(self, other):
other = _to_complex(other)
return ComplexExpr((other... |
def commit_changes(filenames, contents, repo, commit_message='Commit'):
if (not isinstance(filenames, list)):
filenames = [filenames]
if (not isinstance(contents, list)):
contents = [contents]
folder = Path(repo.working_dir)
for (filename, content) in zip(filenames, contents):
wi... |
def get100(batch_size, data_root='/tmp/public_dataset/pytorch', train=True, val=True, **kwargs):
data_root = os.path.expanduser(os.path.join(data_root, 'cifar100-data'))
num_workers = kwargs.setdefault('num_workers', 1)
kwargs.pop('input_size', None)
print('Building CIFAR-100 data loader with {} workers... |
def main():
parser = argparse.ArgumentParser(description='OGBN-MAG (MLP)')
parser.add_argument('--device', type=int, default=0)
parser.add_argument('--log_steps', type=int, default=1)
parser.add_argument('--use_node_embedding', action='store_true')
parser.add_argument('--num_layers', type=int, defau... |
(Output('anomaly-select-label', 'options'), Input('anomaly-select-label-parent', 'n_clicks'), [State('anomaly-select-file', 'value'), State('anomaly-select-test-file', 'value'), State('anomaly-select-features', 'value')])
def select_label(n_clicks, train_file, test_file, features):
options = []
ctx = dash.callb... |
def download_and_extract_archive(url, path, md5=None):
path = Path(path)
extract_path = path
if (not path.exists()):
path.mkdir(parents=True, exist_ok=True)
file_path = (path / Path(url).name)
if ((not file_path.exists()) or (not check_integrity(file_path, md5))):
print(f... |
def run():
parser = argparse.ArgumentParser(description='Merge original and new assembly', usage='circlator merge [options] <original.fasta> <new.fasta> <outprefix>')
parser.add_argument('--diagdiff', type=int, help='Nucmer diagdiff option [%(default)s]', metavar='INT', default=25)
parser.add_argument('--mi... |
class TransformTuple(object):
def __init__(self, mean, std, modes):
self.transforms = [get_transform(mean, std, mode) for mode in modes]
def __call__(self, x):
return tuple((tf(x) for tf in self.transforms)) |
class NNAnomalyDetectionAlgo(abc.ABC):
def fit(self, train_data, dev_data: LogRecordObject):
pass
def predict(self, test_data: LogRecordObject):
pass |
.parametrize('sink', [(- 1), 2, 3])
def test_raises_when_sink_is_out_of_bounds(sink):
with pytest.raises(ValueError):
graph = csr_matrix([[0, 1], [0, 0]])
maximum_flow(graph, 0, sink) |
class TestClustering():
def setup_method(self):
self.point_1 = (43.8430139, 10.507994)
self.point_2 = (43.54427, 10.32615)
self.decimal = 43.8430139
self.DMS = (43, 50, 34.85)
def test_get_distance(self):
output = gislib.getDistance(self.point_1, self.point_2)
ass... |
def register_Ns3FfMacCschedSapProviderCschedLcReleaseReqParameters_methods(root_module, cls):
cls.add_constructor([])
cls.add_constructor([param('ns3::FfMacCschedSapProvider::CschedLcReleaseReqParameters const &', 'arg0')])
cls.add_instance_attribute('m_logicalChannelIdentity', 'std::vector< unsigned char >... |
.expansion
class ExpandBlockCyclicScatterMKL(ExpandTransformation):
environments = [environments.intel_mkl_mpich.IntelMKLScaLAPACKMPICH]
def expansion(node, parent_state, parent_sdfg, n=None, **kwargs):
(rows, cols) = node.validate(parent_sdfg, parent_state)
code = f'''
const double ... |
def test_case67():
url = (brokerIp + '/ngsi-ld/v1/entityOperations/upsert')
headers = {'Content-Type': 'application/json', 'Link': '<{{link}}>; rel=" type="application/ld+json"'}
r = requests.post(url, data=json.dumps(ld_data.subdata57), headers=headers)
print(r.content)
assert (r.status_code == 207... |
def OzaBagging(base_estimator=KNNADWINClassifier(), n_estimators=10, random_state=None):
warnings.warn("'OzaBagging' has been renamed to 'OzaBaggingClassifier' in v0.5.0.\nThe old name will be removed in v0.7.0", category=FutureWarning)
return OzaBaggingClassifier(base_estimator=base_estimator, n_estimators=n_e... |
def prepare_data(args, field, logger):
if (field is None):
logger.info(f'Constructing field')
FIELD = torchtext.data.ReversibleField(batch_first=True, init_token='<init>', eos_token='<eos>', lower=args.lower, include_lengths=True)
else:
FIELD = field
(train_sets, val_sets, vocab_sets... |
def register_Ns3MmWavePhyMacCommon_methods(root_module, cls):
cls.add_constructor([param('ns3::MmWavePhyMacCommon const &', 'arg0')])
cls.add_constructor([])
cls.add_method('GetCenterFrequency', 'double', [])
cls.add_method('GetChunkWidth', 'double', [])
cls.add_method('GetCtrlSymbols', 'uint32_t', ... |
def query_environment() -> dict[(str, int)]:
ws = os.environ.get('WORLD_SIZE', None)
r = os.environ.get('RANK', None)
lr = os.environ.get('LOCAL_RANK', None)
if ((ws is not None) and (r is not None) and (lr is not None)):
return {'world_size': int(ws), 'rank': int(r), 'local_rank': int(lr)}
... |
class Texture(object):
def __init__(self, texture_id: int):
self._texture_id = texture_id
def __eq__(self, other: object):
if (not isinstance(other, Texture)):
raise NotImplementedError
return (self.get_texture_id() == other.get_texture_id())
def get_texture_id(self) -> i... |
_start_docstrings('CamemBERT Model with a multiple choice classification head on top (a linear layer on top of\n the pooled output and a softmax) e.g. for RocStories/SWAG tasks. ', CAMEMBERT_START_DOCSTRING)
class TFCamembertForMultipleChoice(TFRobertaForMultipleChoice):
config_class = CamembertConfig |
.parametrize('dt', supported_floating_types)
_utils.test(arch=supported_archs_cgraph)
def test_matrix_float(dt):
if ((ti.lang.impl.current_cfg().arch == ti.opengl) and (dt not in [ti.f32])):
return
n = 4
A = ti.Matrix(([4.2, 5.7] * n), dt)
res = ti.ndarray(dt, shape=(1,))
graph = build_graph... |
class MouseDataGen():
def __init__(self):
self.prev_mouse = None
self.prev_color = None
def __call__(self, window):
mouse_data = np.zeros(8, dtype=np.float32)
if window.is_pressed(ti.ui.LMB):
mxy = (np.array(window.get_cursor_pos(), dtype=np.float32) * res)
... |
def clean_mem(mem):
mem = [(d, mem[d]['relations'], mem[d]['scores']) for d in mem.keys() if (len(mem[d]['relations']) > 0)]
mem = [(((((((' <|' + m[0]) + '|> ') + ' <|r|> ') + ' <|r|> '.join(m[1])) + ' <|r|> ') + ' <|s|> ') + ' <|s|> '.join([str(s) for s in m[2]])) for m in mem]
return mem |
def build_dataset(is_train, config):
transform = build_transform(is_train, config)
if (config.DATA.DATASET == 'imagenet'):
prefix = ('train' if is_train else 'val')
if config.DATA.ZIP_MODE:
ann_file = (prefix + '_map.txt')
prefix = (prefix + './')
dataset = Ca... |
class ArrayBinTests(unittest.TestCase):
def testNormalList(self):
inputs = [[list(range(10)), [4.2]], [[5, 6, 7, 8, 9, 10, 11, 12], [7, 11]], [[5, 6, 7, 8, 9, 10, 11, 12], [7, 11, 11.5, 13]]]
outputs = [[[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]], [[0, 1], [2, 3, 4, 5], [6, 7]], [[0, 1], [2, 3, 4, 5], [6], [... |
def _find_match(str_list, key_str, postfix):
split_str = key_str.split('.')
if (split_str[(- 1)] == postfix):
match_string = ''.join(key_str.split('.')[0:(- 1)])
for s2 in str_list:
pattern1 = ''.join(s2.split('.')[0:(- 1)])
pattern2 = ''.join(s2.split('.')[0:(- 2)])
... |
def count_frames_and_secs(path):
if isinstance(path, pathlib.PurePath):
path = str(path)
if (not isinstance(path, str)):
raise TypeError('Video path must be a string or pathlib.Path.')
cmd = [_get_exe(), '-i', path, '-map', '0:v:0', '-c', 'copy', '-f', 'null', '-']
try:
out = sub... |
class Multi_Scale_Fearue_Aggregation(nn.Module):
def __init__(self, num_img_channel, point_size, p_stride, num_map=2):
super().__init__()
self.num_img_channel = num_img_channel
self.point_x = point_size[1]
self.point_y = point_size[0]
self.tf_ratio = 4
self.conv = Enc... |
('data.dsprites', 'class')
class DSpritesData(base.ImageTfdsData):
def __init__(self, predicted_attribute, num_classes=None, data_dir=None):
dataset_builder = tfds.builder('dsprites:2.*.*', data_dir=data_dir)
dataset_builder.download_and_prepare()
info = dataset_builder.info
if (pred... |
def copy_without_dropout(hparams):
new_hparams = {k: (1.0 if ('dropout' in k) else v) for (k, v) in hparams.values().items()}
return tf.contrib.training.HParams(**new_hparams) |
def load_dataset(args, training_num=None, use_fixed_validation=False, no_binarization=False, **kwargs):
if (training_num is not None):
args.training_set_size = training_num
if (args.dataset_name == 'static_mnist'):
args.input_size = [1, 28, 28]
args.input_type = 'binary'
(train_l... |
_utils.test(print_preprocessed_ir=True)
def test_ifexp():
def foo(x: ti.i32) -> ti.i32:
return (1 if x else 0)
assert (foo(1) == 1)
assert (foo(0) == 0) |
def aps10_fp(x, n):
return ((np.exp(((- n) * x)) * (((- n) * (x - 1)) + 1)) + (n * (x ** (n - 1)))) |
.parametrize('slate_id, reward, pscore, position, evaluation_policy_pscore, description', valid_input_of_slate_estimators)
def test_slate_estimators_using_valid_input_data(slate_id, reward, pscore, position, evaluation_policy_pscore, description) -> None:
_ = sips.estimate_policy_value(slate_id=slate_id, reward=rew... |
def bidirectional_dynamic_rnn(cell_fw, cell_bw, inputs, sequence_length=None, initial_state_fw=None, initial_state_bw=None, dtype=None, parallel_iterations=None, swap_memory=False, time_major=False, scope=None):
assert (not time_major)
flat_inputs = flatten(inputs, 2)
flat_len = (None if (sequence_length is... |
class Elliott_SENet(nn.Module):
def __init__(self, block, num_blocks, num_classes=10):
super(Elliott_SENet, self).__init__()
self.in_planes = 64
self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.layer1 = self._mak... |
def contextual_precision(expected, observed, data=None, start=None, end=None, weighted=True):
def _cm(x, y, z, w, f):
return contextual_confusion_matrix(x, y, z, w, f, weighted)
return _precision(expected, observed, data, start, end, _cm) |
def accuracy_tf(predictions, targets, mask):
with tf.name_scope('accuracy'):
return tf.metrics.accuracy(labels=targets, predictions=predictions, weights=mask) |
def basic_check_build():
if ('PYODIDE_PACKAGE_ABI' in os.environ):
return
code = textwrap.dedent(' #include <stdio.h>\n int main(void) {\n return 0;\n }\n ')
compile_test_program(code) |
def get_from_translation_cache(source_language: str, entity: str):
global global_translation_cache
if ((source_language not in global_translation_cache) or (entity not in global_translation_cache[source_language])):
return None
return global_translation_cache[source_language][entity] |
_spec_function('twitter_aae')
def get_twitter_aae_spec(demographic: str) -> RunSpec:
scenario_spec = ScenarioSpec(class_name='helm.benchmark.scenarios.twitter_aae_scenario.TwitterAAEScenario', args={'demographic': demographic})
return RunSpec(name=f'twitter_aae:demographic={demographic}', scenario_spec=scenario... |
def _format(val: Any, output_format: str='standard', split: bool=False, errors: str='coarse') -> Any:
val = str(val)
result: Any = []
if (val in NULL_VALUES):
return [np.nan]
if (not validate_meid(val)):
if (errors == 'raise'):
raise ValueError(f'Unable to parse value {val}')... |
class RteProcessor(DataProcessor):
def get_train_examples(self, data_dir):
return self._create_examples(self._read_tsv(os.path.join(data_dir, 'train.tsv')), 'train')
def get_dev_examples(self, data_dir):
return self._create_examples(self._read_tsv(os.path.join(data_dir, 'dev.tsv')), 'dev')
d... |
class Lighting(object):
def __init__(self, alphastd, eigval=imagenet_pca['eigval'], eigvec=imagenet_pca['eigvec']):
self.alphastd = alphastd
assert (eigval.shape == (3,))
assert (eigvec.shape == (3, 3))
self.eigval = eigval
self.eigvec = eigvec
def __call__(self, img):
... |
def strong_transforms(img_size=224, scale=(0.08, 1.0), ratio=(0.75, 1.), hflip=0.5, vflip=0.0, color_jitter=0.4, auto_augment='rand-m9-mstd0.5-inc1', interpolation='random', use_prefetcher=True, mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD, re_prob=0.25, re_mode='pixel', re_count=1, re_num_splits=0, color_aug=F... |
class List():
def __init__(self, PRIMITIVE, content, parameters):
self.starts_ = GrowableBuffer(PRIMITIVE)
self.stops_ = GrowableBuffer(PRIMITIVE)
self.content_ = content
self.parameters_ = parameters
self.set_id(Ref(0))
def content(self):
return self.content_
... |
def evaluate(args, config, eval_dataset, model, prefix=''):
eval_output_dir = args.output_dir
if ((not os.path.exists(eval_output_dir)) and (args.local_rank in [(- 1), 0])):
os.makedirs(eval_output_dir)
if (args.n_gpu > 1):
model = torch.nn.DataParallel(model)
model.eval()
args.eval_... |
def _build_synset_lookup(imagenet_metadata_file):
lines = tf.gfile.FastGFile(imagenet_metadata_file, 'r').readlines()
synset_to_human = {}
for l in lines:
if l:
parts = l.strip().split('\t')
assert (len(parts) == 2)
synset = parts[0]
human = parts[1]
... |
class ValueCritic(nn.Module):
hidden_dims: Sequence[int]
def __call__(self, observations: jnp.ndarray) -> jnp.ndarray:
critic = MLP((*self.hidden_dims, 1))(observations)
return jnp.squeeze(critic, (- 1)) |
class ClickDiv(Div):
def __init__(self, innerHTML='', **kwargs):
super().__init__(innertHTML, **kwargs)
self.click = Trigger()
def widget_js(self):
return (super().widget_js() + minify("\n element.addEventListener('click', (ev) => {\n var target = ev.target;\n ... |
def weights_init_orthogonal(m):
classname = m.__class__.__name__
if (classname.find('Conv') != (- 1)):
init.orthogonal(m.weight.data, gain=1)
elif (classname.find('Linear') != (- 1)):
init.orthogonal(m.weight.data, gain=1)
elif (classname.find('BatchNorm2d') != (- 1)):
init.norma... |
class Uniform(Distribution):
arg_constraints = {'low': constraints.dependent, 'high': constraints.dependent}
has_rsample = True
def mean(self):
return ((self.high + self.low) / 2)
def stddev(self):
return ((self.high - self.low) / (12 ** 0.5))
def variance(self):
return ((sel... |
def main(device='cpu'):
experiment_dir = pathlib.Path(__file__).resolve().parent
hparams_file = (experiment_dir / 'hyperparams.yaml')
data_folder = '../../samples/ASR/'
data_folder = (experiment_dir / data_folder).resolve()
with open(hparams_file) as fin:
hparams = load_hyperpyyaml(fin)
... |
class PIDStepSizeController():
def __init__(self, h, pcoeff, icoeff, dcoeff, order=1, accept_safety=0.81, eps=1e-08):
self.h = h
self.b1 = (((pcoeff + icoeff) + dcoeff) / order)
self.b2 = ((- (pcoeff + (2 * dcoeff))) / order)
self.b3 = (dcoeff / order)
self.accept_safety = ac... |
def preactresnet50(num_classes=10, dropout=False, stride=1):
return PreActResNet(PreActBottleneck, [3, 4, 6, 3], 64, num_classes, stride=stride) |
def iden_level(pred_list, gold_list):
tn = 0
tp = 0
tp_fp = 0
tp_fn = 0
total = 0
for (pred, gold) in zip(pred_list, gold_list):
pred_id = set()
gold_id = set()
for id in pred:
if (id == 'correct'):
pred_id.add(id)
else:
... |
def cauchy_conj(v, z, w, num=2, denom=2):
if (num == 1):
expr_num = 'z * ComplexReal(v) - Real2Complex(ComplexReal(v)*ComplexReal(w) + ComplexImag(v)*ComplexImag(w))'
elif (num == 2):
expr_num = 'z * ComplexReal(v) - Real2Complex(Sum(v * w))'
else:
raise NotImplementedError
if (d... |
class LargeScaleJitter(T.Augmentation):
def __init__(self, cfg):
super().__init__()
image_size = cfg.INPUT.LSJ.IMAGE_SIZE
min_scale = cfg.INPUT.LSJ.MIN_SCALE
max_scale = cfg.INPUT.LSJ.MAX_SCALE
pad_value = ((1.0 * sum(cfg.MODEL.PIXEL_MEAN)) / len(cfg.MODEL.PIXEL_MEAN))
... |
def save_pickle(path_, data):
fhand = get_file_handle(path_, 'wb+')
pickle.dump(data, fhand)
fhand.close() |
def get_mujoco_py_mjlib():
class MjlibDelegate():
def __init__(self, lib):
self._lib = lib
def __getattr__(self, name: str):
if name.startswith('mj'):
return getattr(self._lib, ('_' + name))
raise AttributeError(name)
return MjlibDelegate(get_m... |
class Integral(nn.Module):
def __init__(self, reg_max=16):
super(Integral, self).__init__()
self.reg_max = reg_max
self.register_buffer('project', torch.linspace(0, self.reg_max, (self.reg_max + 1)))
def forward(self, x):
x = F.softmax(x.reshape((- 1), (self.reg_max + 1)), dim=1)... |
_function
def _least_semi_primitive(p):
if (((p % 2) == 0) or (not p.is_prime_power())):
raise ValueError('{} is not an odd prime power'.format(p))
from sage.arith.misc import euler_phi
from sage.rings.finite_rings.integer_mod_ring import Integers
phip = euler_phi(p)
ord = (phip if ((p % 4) ... |
.parametrize('fdf', [fdf])
.parametrize('min_flow', [0, 2])
.parametrize('flow_popup', [False, True])
def test_plot_flows(fdf, min_flow, flow_popup):
map_f = plot.plot_flows(fdf, min_flow=min_flow, flow_popup=flow_popup)
assert isinstance(map_f, folium.folium.Map) |
class DmBenchEnv():
def __init__(self, name, action_repeat=1, size=(64, 64), camera=None):
(domain, task) = name.split('_', 1)
if (domain == 'cup'):
domain = 'ball_in_cup'
if isinstance(domain, str):
from dm_control import suite
self._env = suite.load(doma... |
def _overlap_segment(expected, observed, start=None, end=None):
(tp, fp, fn) = (0, 0, 0)
observed_copy = observed.copy()
for expected_seq in expected:
found = False
for observed_seq in observed:
if _overlap(expected_seq, observed_seq):
if (not found):
... |
def load_ppr_csr(input_dir='datasets/ppr/papers100M', dataset='ogbn-papers100M', alpha=0.1, eps=0.001, topk=64, ppr_normalization='row'):
batch_id = 0
csrs = []
while True:
start_batch = datetime.now()
logging.info(f'Read batch {batch_id}')
dump_suffix = f'{dataset}_alpha{int((alpha ... |
def conv(x, channels, kernel=4, stride=2, pad=0, pad_type='zero', use_bias=True, sn=False, scope='conv'):
with tf.variable_scope(scope):
if (pad > 0):
if (((kernel - stride) % 2) == 0):
pad_top = pad
pad_bottom = pad
pad_left = pad
... |
class IdentityLayer(My2DLayer):
def __init__(self, in_channels, out_channels, use_bn=False, act_func=None, dropout_rate=0, ops_order='weight_bn_act'):
super(IdentityLayer, self).__init__(in_channels, out_channels, use_bn, act_func, dropout_rate, ops_order)
def weight_op(self):
return None
de... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.