code
stringlengths
101
5.91M
def getblockname(block, unknown='unknown'): if ('name' in block): return block['name'] return unknown
def train(P, its, model, criterion, optimizer, scheduler, data_loader, data_iter, logger=None, simclr_aug=None): assert (simclr_aug is not None) try: (ims, lbls, path) = next(data_iter) except StopIteration: my_sampler = BalancedMultiSourceRandomSampler(data_loader.dataset, P.batch_p, P.loca...
class QuotientFields(Category_singleton): def super_categories(self): return [Fields()] class ParentMethods(): pass class ElementMethods(): _method def numerator(self): pass _method def denominator(self): pass _binop def...
def test_admission_discharge_placeholder(tmp_path: pathlib.Path): ontology = DummyAdmissionDischargeOntology() labeler = DummyAdmissionDischargeLabeler(ontology) events_with_labels: EventsWithLabels = [(event((2000, 1, 1), 'Visit/IP', end=datetime.datetime(2000, 1, 2), omop_table='visit_occurrence'), True),...
class VariablesTest(tf.test.TestCase): def testCreateVariable(self): with self.test_session(): with tf.variable_scope('A'): a = variables.variable('a', [5]) self.assertEquals(a.op.name, 'A/a') self.assertListEqual(a.get_shape().as_list(), [5]) ...
class TestAutoLogSummarization(): def setup(self): self.config = WorkFlowConfig(data_loader_config=DataLoaderConfig(filepath=TEST_LOG_PATH, log_type='csv', dimensions=dict({'attributes': ['Action', 'ID'], 'body': ['Details']}), reader_args={'header': None, 'sep': '|', 'on_bad_lines': 'skip', 'names': ['Time...
.parametrize('ctx, func_name', ctxs) .parametrize('beta', [1.0, 0.5, 0.1]) .parametrize('seed', [313]) def test_softplus_double_backward(seed, beta, ctx, func_name): from nbla_test_utils import cap_ignore_region, backward_function_tester rng = np.random.RandomState(seed) inputs = [(rng.randn(2, 3, 4).astype...
def init_glove_data(glove_fname, glove_outname): print(f'Constructing glove dictionary from {glove_fname} ...... ') word2coef_dict = {} running_sum = np.zeros((300,)) with open(os.path.join(glove_fname), 'r') as f: for (idx, line) in enumerate(tqdm(list(f))): values = line.split() ...
_task('multilingual_translation') class MultilingualTranslationTask(LegacyFairseqTask): def add_args(parser): parser.add_argument('data', metavar='DIR', help='path to data directory') parser.add_argument('--lang-pairs', default=None, metavar='PAIRS', help='comma-separated list of language pairs (in ...
def tsne_and_tests(states, labels): (states, labels) = (np.array(states), np.array(labels)) projected = tsne_by_gender(states, labels, title='tsne of hidden state by gender') NUM_CLUSTERS = [2, 10, 25, 100, 300] print('CALCULATING BIAS BY CLUSTERING (WITH K-MENAS)') for num_clusters in NUM_CLUSTERS:...
class LineLoader(Loader): def construct_mapping(self, node, deep=False): mapping = super().construct_mapping(node, deep=deep) mapping['__line__'] = (node.start_mark.line + 1) return mapping
class HeckeModuleElement(ModuleElement): def __init__(self, parent, x=None): ModuleElement.__init__(self, parent) if (x is not None): self.__element = x def _repr_(self): return self.element()._repr_() def _compute_element(self): raise NotImplementedError('_comput...
class ToLogarithmTransformer(BaseEstimator, TransformerMixin): def __init__(self, column): self.column = column def fit(self, X, *args): return self def transform(self, X): X[self.column] = np.log1p(X[self.column]) return X def inverse_transform(self, X): X[self.c...
class NasnetMobileEncoder(RetinaNetEncoder): def __init__(self, **kwargs): super().__init__() self.encoder = nasnet_mobile.NASNetAMobile(**kwargs) self.fpn_sizes = [44, 264, 528, 1056] print(self.fpn_sizes) def forward(self, inputs): x = torch.cat([inputs, inputs, inputs]...
def maskiou_loss_evaluator(): loss_weight = cfg.MRCNN.MASKIOU.LOSS_WEIGHT loss_evaluator = MaskIoULossComputation(loss_weight) return loss_evaluator
class Lenet5Model(model.Model): def __init__(self): super(Lenet5Model, self).__init__('lenet5', 28, 32, 0.005) def add_inference(self, cnn): cnn.conv(32, 5, 5) cnn.mpool(2, 2) cnn.conv(64, 5, 5) cnn.mpool(2, 2) cnn.reshape([(- 1), ((64 * 7) * 7)]) cnn.affi...
def hsv_to_rgb(h, s, v): if (s == 0.0): return (v, v, v) i = int((h * 6.0)) f = ((h * 6.0) - i) p = (v * (1.0 - s)) q = (v * (1.0 - (s * f))) t = (v * (1.0 - (s * (1.0 - f)))) i = (i % 6) if (i == 0): return (v, t, p) if (i == 1): return (q, v, p) if (i ==...
class AugmentMelSTFT(nn.Module): def __init__(self, n_mels=128, sr=32000, win_length=800, hopsize=320, n_fft=1024, freqm=48, timem=192, htk=False, fmin=0.0, fmax=None, norm=1, fmin_aug_range=1, fmax_aug_range=1000): torch.nn.Module.__init__(self) self.win_length = win_length self.n_mels = n_...
def get_val_str_from_dict(val_dict): sorted_list = list(sorted(val_dict.items(), key=(lambda item: item[0]))) str_return = '' for (key, val) in sorted_list: if (len(str_return) > 0): str_return += ', ' str_return += ('%s: %.4f' % (key, val)) return str_return
def _conv2d_gradfix(transpose, weight_shape, stride, padding, output_padding, dilation, groups): ndim = 2 weight_shape = tuple(weight_shape) stride = _tuple_of_ints(stride, ndim) padding = _tuple_of_ints(padding, ndim) output_padding = _tuple_of_ints(output_padding, ndim) dilation = _tuple_of_in...
def binary_stdout(): stdout = sys.stdout.buffer if (sys.platform == 'win32'): import msvcrt import os msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY) return stdout
class WordModel(object): def __init__(self, embed_size=None, filename=None, embed_type='glove', top_n=None, additional_vocab=Counter()): if (filename is None): if (embed_size is None): raise Exception('Either embed_file or embed_size needs to be specified.') self.embe...
class InferenceFact(): def __init__(self, ptr): self.ptr = ptr def __del__(self): if self.ptr: check(lib.tract_inference_fact_destroy(byref(self.ptr))) def __str__(self): return self.dump() def _valid(self): if (self.ptr == None): raise TractError(...
def fullyQualifiedTemplateTitle(templateTitle): if templateTitle.startswith(':'): return ucfirst(templateTitle[1:]) else: m = re.match('([^:]*)(:.*)', templateTitle) if m: prefix = normalizeNamespace(m.group(1)) if (prefix in knownNamespaces): retu...
class BlurFunctionBackward(Function): def forward(ctx, grad_output, kernel, kernel_flip): ctx.save_for_backward(kernel, kernel_flip) grad_input = F.conv2d(grad_output, kernel_flip, padding=1, groups=grad_output.shape[1]) return grad_input def backward(ctx, gradgrad_output): (kern...
def build_stamp(pyxes, include_dirs=()): pyx_defs = {} from Cython.Compiler.Main import compile from Cython.Compiler.CmdLine import parse_command_line includes = sum([['--include-dir', d] for d in include_dirs], []) for source in pyxes: (base, ext) = splitext(source) pyx_hash = sha1(...
class SplittingAlgebraElement(PolynomialQuotientRingElement): def __invert__(self): inv_elements = self.parent()._invertible_elements if (self in inv_elements): return inv_elements[self] return super().__invert__() def is_unit(self): inv_elements = self.parent()._inve...
class TrainDAVIS(torch.utils.data.Dataset): def __init__(self, root, year, split, clip_l, clip_n): self.root = root with open(os.path.join(root, 'ImageSets', '{}/{}.txt'.format(year, split)), 'r') as f: self.video_list = f.read().splitlines() self.clip_l = clip_l self.cli...
class AllGather(torch.autograd.Function): def forward(ctx, tensor, n_gpu, args): output = [torch.empty_like(tensor) for _ in range(n_gpu)] dist.all_gather(output, tensor) ctx.local_rank = args.local_rank ctx.batch_size = tensor.shape[0] return torch.cat(output, 0) def bac...
class asign_index(torch.autograd.Function): def forward(ctx, kernel, guide_feature): ctx.save_for_backward(kernel, guide_feature) guide_mask = torch.zeros_like(guide_feature).scatter_(1, guide_feature.argmax(dim=1, keepdim=True), 1).unsqueeze(2) return torch.sum((kernel * guide_mask), dim=1)...
_task('cross_lingual_lm') class CrossLingualLMTask(FairseqTask): def add_args(parser): parser.add_argument('data', help='colon separated path to data directories list, will be iterated upon during epochs in round-robin manner') parser.add_argument('--tokens-per-sample', d...
def _get_trunk_cfg(arch_def): num_stages = mbuilder.get_num_stages(arch_def) trunk_stages = arch_def.get('backbone', range((num_stages - 1))) ret = mbuilder.get_blocks(arch_def, stage_indices=trunk_stages) return ret
def get_loader_for_app(app: Any) -> Callable: from starlette.applications import Starlette if isinstance(app, Starlette): return from_asgi return from_wsgi
_utils.test(arch=[ti.cpu, ti.opengl], require=ti.extension.sparse) def test_require_extensions_2(): assert (ti.lang.impl.current_cfg().arch in [ti.cpu])
class ConvGenerator(): def test_init(self): pass def test_forward(self): pass
def worker2(remote, parent_remote, env_fn_wrapper): parent_remote.close() env = env_fn_wrapper.x() while True: (cmd, data) = remote.recv() if (cmd == 'step'): (ob, ob_c, reward, done, info) = env.step(data) if all(done): (ob, ob_c) = env.reset() ...
def test_egreedy_update_params(): policy = EpsilonGreedy(n_actions=2, epsilon=1.0) policy.action_counts_temp = np.array([4, 3]) policy.action_counts = np.copy(policy.action_counts_temp) policy.reward_counts_temp = np.array([2.0, 0.0]) policy.reward_counts = np.copy(policy.reward_counts_temp) act...
def set_mcast(num_groups=1, num_sequencers=1): all_ports = [p for p in range(8)] mcast_ports = [0, 1, 2] bfrt.pre.node.entry(MULTICAST_NODE_ID=0, MULTICAST_RID=0, MULTICAST_LAG_ID=[], DEV_PORT=all_ports).push() bfrt.pre.mgid.entry(MGID=1, MULTICAST_NODE_ID=[0], MULTICAST_NODE_L1_XID_VALID=[False], MULTI...
class TentClf(TentFull): def configure_model_optimizer(self, algorithm, alpha): adapted_algorithm = copy.deepcopy(algorithm) optimizer = torch.optim.Adam(adapted_algorithm.classifier.parameters(), lr=(algorithm.hparams['lr'] * alpha), weight_decay=algorithm.hparams['weight_decay']) adapted_a...
def test_multiple_modes_prewitt(): arr = numpy.array([[1.0, 0.0, 0.0], [1.0, 1.0, 0.0], [0.0, 0.0, 0.0]]) expected = numpy.array([[1.0, (- 3.0), 2.0], [1.0, (- 2.0), 1.0], [1.0, (- 1.0), 0.0]]) modes = ['reflect', 'wrap'] assert_equal(expected, ndimage.prewitt(arr, mode=modes))
def load_train_data(csv_file): tp = pd.read_csv(csv_file) n_users = (tp['uid'].max() + 1) (rows, cols) = (tp['uid'], tp['sid']) data = sparse.csr_matrix((np.ones_like(rows), (rows, cols)), dtype='float64', shape=(n_users, n_items)) return data
class ImagenetHierarchihcalDataset(Dataset): def __init__(self, hierarchy_file, root_dir, transform=None): self.transform = transform self.augment_transform = transforms.RandomChoice([transforms.RandomResizedCrop(size=(256, 256), scale=(0.7, 1.0)), transforms.RandomHorizontalFlip(1), transforms.Colo...
def do_solve(**kw): count[0] = 0 with suppress_warnings() as sup: sup.filter(DeprecationWarning, '.*called without specifying.*') (x0, flag) = lgmres(A, b, x0=zeros(A.shape[0]), inner_m=6, rtol=1e-14, **kw) count_0 = count[0] assert_(allclose((A x0), b, rtol=1e-12, atol=1e-12), norm(((A...
def test_logpprob(backend, model_setup): (model, data, init_pars) = model_setup model.logpdf(init_pars, data)
def url_to_file_path(url, filecache): key = CacheController.cache_url(url) return filecache._fn(key)
def seed_test_case0(): var0 = 10 var1 = module0.Simple(var0) var2 = var1.do_something([1, 2, 3]) assert (var2 == 'not empty!')
def CurrentDeviceScope(): global _threadlocal_scope if (not hasattr(_threadlocal_scope, 'devicescope')): _threadlocal_scope.devicescope = None return _threadlocal_scope.devicescope
.parametrize('jitted', (False, True)) def test_diffable_backend(jitted): pyhf.set_backend('jax', default=True) def example_op(x): y = pyhf.default_backend.astensor(x) return (2 * y) if jitted: assert (jax.jacrev(jax.jit(example_op))([1.0]) == [2.0]) else: assert (jax.jacr...
def _sympysage_rational(self): from sage.rings.integer import Integer from sage.rings.rational import Rational return Rational((Integer(self.p), Integer(self.q)))
def add_ResNet_roi_conv5_head_for_keypoints(model, blob_in, dim_in, spatial_scale): model.RoIFeatureTransform(blob_in, '_[pose]_pool5', blob_rois='keypoint_rois', method=cfg.KRCNN.ROI_XFORM_METHOD, resolution=cfg.KRCNN.ROI_XFORM_RESOLUTION, sampling_ratio=cfg.KRCNN.ROI_XFORM_SAMPLING_RATIO, spatial_scale=spatial_sc...
def MeetSemilattice(data=None, *args, **options): if (isinstance(data, FiniteMeetSemilattice) and (not args) and (not options)): return data if ('check' in options): check = options.pop('check') else: check = True P = Poset(data, *args, **options) if check: try: ...
_inherit(core.Dataset) class Dataset(core.Dataset): def __init__(self, data_home=None): super().__init__(data_home, name='tut2017se', clip_class=Clip, bibtex=BIBTEX, remotes=REMOTES, license_info=LICENSE_INFO) _docs(load_audio) def load_audio(self, *args, **kwargs): return load_audio(*args, ...
def gh_role(name, rawtext, pr_number, lineno, inliner, options={}, content=[]): ref = f' set_classes(options) node = reference(rawtext, ('#' + pr_number), refuri=ref, **options) return ([node], [])
def export_onnx_model(model, inputs): assert isinstance(model, torch.nn.Module) def _check_eval(module): assert (not module.training) model.apply(_check_eval) logger.info('Beginning ONNX file converting') with torch.no_grad(): with io.BytesIO() as f: torch.onnx.export(mod...
class pyparsing_unicode(unicode_set): _ranges = [(32, sys.maxunicode)] class Latin1(unicode_set): _ranges = [(32, 126), (160, 255)] class LatinA(unicode_set): _ranges = [(256, 383)] class LatinB(unicode_set): _ranges = [(384, 591)] class Greek(unicode_set): _ranges = ...
def test_call_guard(): assert (m.unguarded_call() == 'unguarded') assert (m.guarded_call() == 'guarded') assert (m.multiple_guards_correct_order() == 'guarded & guarded') assert (m.multiple_guards_wrong_order() == 'unguarded & guarded') if hasattr(m, 'with_gil'): assert (m.with_gil() == 'GIL...
def add_contrastive_loss(hidden, hidden_norm=True, temperature=1.0, tpu_context=None, weights=1.0): if hidden_norm: hidden = tf.math.l2_normalize(hidden, (- 1)) (hidden1, hidden2) = tf.split(hidden, 2, 0) batch_size = tf.shape(hidden1)[0] if (tpu_context is not None): hidden1_large = tpu...
_properties class PatternApplyOnceEverywhere(PatternMatchAndApplyRepeated): CATEGORY: str = 'Helper' def apply_pass(self, sdfg: SDFG, pipeline_results: Dict[(str, Any)]) -> Dict[(str, List[Any])]: return self._apply_pass(sdfg, pipeline_results, apply_once=True)
def do_validate(prefix): def _callback(iter_no, sym, arg, aux): validate(prefix, iter_no) return _callback
def read_data_physionet_4(path, window_size=1000, stride=500): with open(os.path.join(path, 'challenge2017.pkl'), 'rb') as fin: res = pickle.load(fin) all_data = res['data'] for i in range(len(all_data)): tmp_data = all_data[i] tmp_std = np.std(tmp_data) tmp_mean = np.mean(tm...
def creat_data_loader(cfg, root_dir): print('==> Preparing data...') if (cfg.DATA.DATASET == 'CUB'): train_loader = torch.utils.data.DataLoader(CUBDataset(root=root_dir, cfg=cfg, is_train=True), batch_size=cfg.TRAIN.BATCH_SIZE, shuffle=True, num_workers=cfg.BASIC.NUM_WORKERS, pin_memory=True) va...
def get_preprocess_args(): parser = argparse.ArgumentParser() parser.add_argument('--npy_path', default='../../data/mosei/mel160', type=str, help='Path to MOSEI segmented NPY files') parser.add_argument('--csv_path', default='../../data/mosei/mosei_no_semi.csv', type=str, help='Path to mosei_no_semi.csv', r...
def compute_cpp_args_construction_stmts_and_forward_arg_symbols(test_params): device = test_params.device cpp_forward_args_symbols = [] def add_cpp_forward_args(args): args_stmts = [] for (arg_name, _) in args: args_stmts.append('auto {} = arg_dict.at("{}")'.format(arg_name, arg_...
class TestJediTyper(TransformTest): def _test(self, code): return _test_typing(code)[0] def test_typing_global_int_loop(self): code = ' for i in range(10):\n a = i + 1\n ' types = self._test(code) self.assertIn((None, (1, 0)), types) variables = t...
class FastCrystal(UniqueRepresentation, Parent): def __classcall__(cls, cartan_type, shape, format='string'): cartan_type = CartanType(cartan_type) shape = tuple(shape) if (len(shape) > 2): raise ValueError('The shape must have length <=2') shape = (shape + ((0,) * (2 - l...
class StringStrip(): def __init__(self, strip=True, strip_pos='both', strip_str=None): assert isinstance(strip, bool) assert (strip_pos in ('both', 'left', 'right')) assert ((strip_str is None) or isinstance(strip_str, str)) self.strip = strip self.strip_pos = strip_pos ...
def conv_bn(in_channels, out_channels, kernel_size, stride, padding, groups=1): result = nn.Sequential() result.add_module('conv', nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding, groups=groups, bias=False)) result.add_module('bn', nn.Bat...
class PeriodicWriter(HookBase): def __init__(self, writers, period=20): self._writers = writers self._period = period def after_step(self): if ((((self.trainer.iter + 1) % self._period) == 0) or (self.trainer.iter == (self.trainer.max_iter - 1))): for writer in self._writers:...
def _impl(array, highlevel, behavior, attrs): with HighLevelContext(behavior=behavior, attrs=attrs) as ctx: layout = ctx.unwrap(array, allow_record=False, primitive_policy='error') out = ak._do.remove_structure(layout, function_name='ak.ravel', drop_nones=False) assert (isinstance(out, tuple) and al...
class OSNet(BaseNet): def __init__(self, num_classes, blocks, layers, channels, feature_dim=512, loss='softmax', pool='avg', **kwargs): super(OSNet, self).__init__() num_blocks = len(blocks) assert (num_blocks == len(layers)) assert (num_blocks == (len(channels) - 1)) self.lo...
def r_input(val): if (sys.version_info[0] >= 3): return eval(input(val)) return raw_input(val)
def check_items(all_dict, names, deprecated, others, module_name, dots=True): num_all = len(all_dict) num_ref = len(names) output = '' output += ('Non-deprecated objects in __all__: %i\n' % num_all) output += ('Objects in refguide: %i\n\n' % num_ref) (only_all, only_ref, missing) = compare(all_d...
def define_student(depth, width): definitions = {18: [2, 2, 2, 2], 34: [3, 4, 6, 5]} assert (depth in list(definitions.keys())) widths = [int((w * width)) for w in (64, 128, 256, 512)] blocks = definitions[depth] def gen_block_params(ni, no): return {'conv0': utils.conv_params(ni, no, 3), 'c...
def train_val_test(): torch.backends.cudnn.benchmark = True if getattr(FLAGS, 'distributed', False): gpu_id = init_dist(init_method=getattr(FLAGS, 'dist_url', None)) else: gpu_id = None if (getattr(FLAGS, 'use_diff_seed', False) and (not getattr(FLAGS, 'stoch_valid', False))): mp...
def rand(*args, torch_device=None, **kwargs): if (torch_device is None): torch_device = device return torch.rand(*args, **kwargs, device=torch_device)
def extract_features(model, data_loader, print_freq=10): model.eval() batch_time = AverageMeter() data_time = AverageMeter() features = OrderedDict() labels = OrderedDict() end = time.time() for (i, (imgs, fnames, pids, _)) in enumerate(data_loader): data_time.update((time.time() - e...
class InternalServerError(HTTPException): code = 500 description = 'The server encountered an internal error and was unable to complete your request. Either the server is overloaded or there is an error in the application.' def __init__(self, description=None, response=None, original_exception=None): ...
class guaranteed_datapipes_determinism(object): prev: bool def __init__(self) -> None: global _determinism self.prev = _determinism _determinism = True def __enter__(self) -> None: pass def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None: glo...
def set_seed(args): random.seed(args.seed) np.random.seed(args.seed) tf.random.set_seed(args.seed)
def create_safe_directory(directory, logger=None): if os.path.exists(directory): if (logger is not None): warn = 'Directory {} already exists. Archiving it to {}.zip' logger.warning(warn.format(directory, directory)) shutil.make_archive(directory, 'zip', directory) sh...
def do_analyse_snli(file_path, dev=True, use_loss=False, stop=None): results = [] with open(file_path, 'r', encoding='utf-8') as file: find_entry = False output = [0, 0.0, 0.0, 0.0, 0.0] for line in file: if (not find_entry): if line.startswith('data round'): ...
class ModelB(optplan.ProblemGraphNode.Schema): type = types.StringType(default='ModelB') int_field = types.IntType()
class ResidualGroup(nn.Module): def __init__(self, conv, n_feat, kernel_size, reduction, depth): super(ResidualGroup, self).__init__() rcab = (lambda : RCAB(conv, n_feat, kernel_size, reduction)) m = [rcab() for _ in range(depth)] m.append(conv(n_feat, n_feat, kernel_size)) s...
def register_Ns3LteRrcSapRrcConnectionReconfiguration_methods(root_module, cls): cls.add_constructor([]) cls.add_constructor([param('ns3::LteRrcSap::RrcConnectionReconfiguration const &', 'arg0')]) cls.add_instance_attribute('haveMeasConfig', 'bool', is_const=False) cls.add_instance_attribute('haveMobil...
def test_regular_numpy_1(): text = '5 * int64' parsedtype = ak.types.from_datashape(text, highlevel=False) assert isinstance(parsedtype, ak.types.RegularType) assert (str(parsedtype) == text)
def serialize_resource_info(resource_info): def serialize_machine(m): return ('%s:%s:%s' % (m['hostname'], ','.join([str(port) for port in m['port']]), ','.join([str(gpu) for gpu in m['gpus']]))) def serialize_machines(machines): return '+'.join([serialize_machine(m) for m in machines]) retu...
def test_original_problem(): array = ak.Array([[[1, 2], [1]], [[1], [1], [1]]]) assert (ak.mean(array) == pytest.approx(1.)) assert (ak.var(array) == pytest.approx(0.))
def split_string_at_last_occurence_of_certain_char(string, char): (left, right) = string.rsplit(sep=char, maxsplit=1) return (left, right)
class QUESST14Testset(Dataset): def __init__(self, split, **kwargs): assert (split in ['dev', 'eval']) dataset_root = Path(kwargs['quesst2014_root']) doc_paths = get_audio_paths(dataset_root, 'language_key_utterances.lst') query_paths = get_audio_paths(dataset_root, f'language_key_{s...
def _create_gpu_usage(n_gpus: int): global gpu_fake_usage for i in range(n_gpus): a = torch.FloatTensor([0.0]) a.cuda(i) gpu_fake_usage.append(a)
def center_crop(image, height, width, crop_proportion): shape = tf.shape(image) image_height = shape[0] image_width = shape[1] (crop_height, crop_width) = _compute_crop_shape(image_height, image_width, (height / width), crop_proportion) offset_height = (((image_height - crop_height) + 1) // 2) o...
def _parse_letter_version(letter, number): if letter: if (number is None): number = 0 letter = letter.lower() if (letter == 'alpha'): letter = 'a' elif (letter == 'beta'): letter = 'b' elif (letter in ['c', 'pre', 'preview']): l...
def mark_as_saver(method): sig = inspect.signature(method) try: sig.bind(object(), pathlib.Path('testpath')) except TypeError: MSG = 'Checkpoint saver must match signature (instance, path)' raise TypeError(MSG) method._speechbrain_saver = True return method
class SpatialGate(nn.Module): def __init__(self): super(SpatialGate, self).__init__() kernel_size = 5 self.compress = ChannelPool() self.spatial = BasicConv(2, 1, kernel_size, stride=1, padding=((kernel_size - 1) // 2), relu=False) def forward(self, x): x_compress = self....
_SAMPLERS.register_module() class OHEMPixelSampler(BasePixelSampler): def __init__(self, context, thresh=None, min_kept=100000): super(OHEMPixelSampler, self).__init__() self.context = context assert (min_kept > 1) self.thresh = thresh self.min_kept = min_kept def sample(...
def main(): if (len(sys.argv) < 3): print('Not enough arguments\n') sys.exit() filename_A = sys.argv[1] filename_B = sys.argv[2] alpha = float(sys.argv[3]) with open(filename_A) as f: data_A = f.read().splitlines() with open(filename_B) as f: data_B = f.read().spl...
class RegressionModelConfig(PretrainedConfig): def __init__(self, a=0, b=0, double_output=False, **kwargs): super().__init__(**kwargs) self.a = a self.b = b self.double_output = double_output self.hidden_size = 1
.parametrize('inshape, kernel, multiplier, outshape', [((2, 2, 10, 10), (3, 2), 1, (2, 2, 8, 9)), ((2, 2, 10, 10), (3, 2), 2, (2, 4, 8, 9))]) def test_parametric_function_2d(inshape, kernel, multiplier, outshape): base_axis = (len(inshape) - 3) sample_channels = inshape[base_axis] outmap_channels = (sample_...
def radam(params: List[Tensor], grads: List[Tensor], exp_avg: List[Tensor], exp_avg_sq: List[Tensor], states: List[Dict], *, beta1: float, beta2: float, lr: float, weight_decay: float, eps: float): rho_inf = ((2 / (1 - beta2)) - 1) rho_t_list = [(rho_inf - (((2 * state['step']) * (beta2 ** state['step'])) / (1 ...
def main(_): n_threads = get_effective_download_threads(FLAGS.max_threads) download_pdb_files(n_threads)