code
stringlengths
101
5.91M
def exc_iter(*args): value = [None] def iterate(): for v in itertools.product(*args): value[0] = v (yield v) try: (yield iterate()) except Exception: import traceback msg = ('At: %r\n%s' % (repr(value[0]), traceback.format_exc())) raise Ass...
def _make_encoder(backbone, features, use_pretrained, groups=1, expand=False, exportable=True, hooks=None, use_vit_only=False, use_readout='ignore'): if (backbone == 'vitl16_384'): pretrained = _make_pretrained_vitl16_384(use_pretrained, hooks=hooks, use_readout=use_readout) scratch = _make_scratch(...
class FakeNode(Node): def __init__(self, name, tl, **kwargs): Node.__init__(self, name, tl) self.msg_log = [] self.resource_manager = FakeResourceManager(self) def receive_message(self, src: str, msg: 'Message'): self.msg_log.append((self.timeline.now(), src, msg)) for pr...
class RandomListenerLearner(Learner): def train(self, training_instances, validation_instances='ignored', metrics='ignored'): self.num_params = 0 def predict_and_score(self, eval_instances): predict = ([(128, 128, 128)] * len(eval_instances)) score = ([((- 3.0) * np.log(256.0))] * len(ev...
class Registry(dict): def __init__(self) -> None: super().__init__() self._information: MutableMapping[(str, Mapping)] = {} def register(self, obj: Any, info: Optional[Mapping[(Any, Any)]]=None) -> Any: name = obj.__name__ if (name in self): warnings.warn(f'Encountere...
class ImageDataManager(DataManager): data_type = 'image' def __init__(self, root='', sources=None, targets=None, height=256, width=128, transforms='random_flip', k_tfm=1, norm_mean=None, norm_std=None, use_gpu=True, split_id=0, combineall=False, load_train_targets=False, batch_size_train=32, batch_size_test=32,...
class SetKernelSpecPreprocessor(preprocessors.Preprocessor): def preprocess(self, nb, resources): if (('kernelspec' in nb.metadata) and (nb.metadata['kernelspec']['name'] != 'python3')): print('Incorrect kernelspec:', nb.metadata['kernelspec']) nb.metadata['kernelspec'] = {'display_name'...
def make_divisible(v, divisor=8, min_value=None): min_value = (min_value or divisor) new_v = max(min_value, ((int((v + (divisor / 2))) // divisor) * divisor)) if (new_v < (0.9 * v)): new_v += divisor return new_v
class Simulator(): def __init__(self, env, agent): self.env = env self.agent = agent def play_one_episode(self, epsilon, training=True): self.agent.epsilon = epsilon (state, valid_actions) = self.env.reset() done = False prev_cum_reward = 0 while (not done...
class ConcatDataset(_ConcatDataset): def __init__(self, datasets): super(ConcatDataset, self).__init__(datasets) if hasattr(datasets[0], 'flag'): flags = [] for i in range(0, len(datasets)): flags.append(datasets[i].flag) self.flag = np.concatenate...
def get_correlations(model, dataloaders, tier=None, device='cpu', as_dict=False, per_neuron=True, **kwargs): correlations = {} dl = (dataloaders[tier] if (tier is not None) else dataloaders) for (k, v) in dl.items(): (target, output) = model_predictions(dataloader=v, model=model, data_key=k, device=...
def camera_data() -> T.Dict[(str, T.Any)]: functions_to_doc = [sf.Camera.pixel_from_camera_point, sf.Camera.camera_ray_from_pixel, sf.Camera.maybe_check_in_view, sf.Camera.in_view] return class_template_data(sf.Camera, functions_to_doc)
_properties class PipelineScope(Map): init_size = SymbolicProperty(default=0, desc='Number of initialization iterations.') init_overlap = Property(dtype=bool, default=True, desc='Whether to increment regular map indices during initialization.') drain_size = SymbolicProperty(default=1, desc='Number of drain ...
class Text(Action): def perform(self, token_stream, text): return text def __repr__(self): return 'TEXT'
class BinL1SegmLoss(_SegLoss): def __init__(self, smooth=1.0): super(BinL1SegmLoss, self).__init__() def forward(self, pred_m, true_m, gate=None): assert (pred_m.ndim == 2), "'pred_m.ndim' = {}. must be {}.".format(pred_m.ndim, 2) assert (true_m.ndim == 2), "'true_m.ndim' = {}. must be {...
def get_fname(order): fname = 'f' if (order == 1): fname = 'df' if (order > 1): fname = 'd{}f'.format(order) return fname
class NearestNDInterpolator(NDInterpolatorBase): def __init__(self, x, y, rescale=False, tree_options=None): NDInterpolatorBase.__init__(self, x, y, rescale=rescale, need_contiguous=False, need_values=False) if (tree_options is None): tree_options = dict() self.tree = cKDTree(sel...
def preprocess_for_train(image, height, width, random_mirror, fast_mode=False, scope=None): with tf.name_scope(scope, 'distort_image', [image, height, width]): if (image.dtype != tf.float32): image = tf.image.convert_image_dtype(image, dtype=tf.float32) else: cast = tf.cast(i...
def add_tabular_output(file_name, relative_to_snapshot_dir=False): if relative_to_snapshot_dir: file_name = osp.join(_snapshot_dir, file_name) _add_output(file_name, _tabular_outputs, _tabular_fds, mode='w')
def apply_adjustment_delete_closeby_nodes(graph, adjustment): thr = 9.5 for (k, v) in adjustment.items(): if (len(v) >= 4): ds = [] for vv in v: ds.append(((1.0 - distance(vv, (0, 0))) * thr)) sorted(ds) gap = (sum(ds[0:4]) / 2.0) ...
def gen_split(root_dir, stackSize): DatasetX = [] DatasetY = [] DatasetF = [] Labels = [] NumFrames = [] root_dir = os.path.join(root_dir, 'flow_x') for dir_user in sorted(os.listdir(root_dir)): class_id = 0 dir = os.path.join(root_dir, dir_user) for target in sorted(...
def get_config(): config = default_lib.get_config() config.batch_size = 500 return config
_start_docstrings('\n SegFormer Model transformer with an image classification head on top (a linear layer on top of the final hidden\n states) e.g. for ImageNet.\n ', SEGFORMER_START_DOCSTRING) class SegformerForImageClassification(SegformerPreTrainedModel): def __init__(self, config): super().__i...
class GVRT_Config(): def __init__(self, ste_flag): self.align_loss = True self.expl_loss = True self.align_loss_lambda = 1.0 self.expl_loss_lambda = 1.0 self.ste_flag = ste_flag self.proj_size = 128 self.lstm_size = 128 self.embed_size = (512 if (not s...
def check_backward_validity(inputs): if (not any((inp.requires_grad for inp in inputs))): warnings.warn('None of the inputs have requires_grad=True. Gradients will be None')
class LyndonWords_evaluation(UniqueRepresentation, Parent): def __init__(self, e): self._e = e self._words = FiniteWords(len(e)) from sage.categories.enumerated_sets import EnumeratedSets Parent.__init__(self, category=EnumeratedSets().Finite(), facade=(self._words,)) def __repr_...
class TamariIntervalPosets_all(DisjointUnionEnumeratedSets, TamariIntervalPosets): def __init__(self): DisjointUnionEnumeratedSets.__init__(self, Family(NonNegativeIntegers(), TamariIntervalPosets_size), facade=True, keepkey=False, category=(Posets(), EnumeratedSets(), Monoids())) def _repr_(self) -> st...
class BaseRecognizer(BaseModule, metaclass=ABCMeta): def __init__(self, init_cfg=None): super().__init__(init_cfg=init_cfg) self.fp16_enabled = False def extract_feat(self, imgs): pass def forward_train(self, imgs, img_metas, **kwargs): pass def simple_test(self, img, img...
class TestRemez(object): def test_bad_args(self): assert_raises(ValueError, remez, 11, [0.1, 0.4], [1], type='pooka') def test_hilbert(self): N = 11 a = 0.1 h = remez(11, [a, (0.5 - a)], [1], type='hilbert') assert_((len(h) == N), 'Number of Taps') assert_array_al...
def _prod(a, axis=None, dtype=None, out=None, keepdims=False, initial=_NoValue, where=True): return umr_prod(a, axis, dtype, out, keepdims, initial, where)
def nll_loss(input, target, weight=None, size_average=None, ignore_index=(- 100), reduce=None, reduction='elementwise_mean'): if ((size_average is not None) or (reduce is not None)): reduction = _Reduction.legacy_get_string(size_average, reduce) dim = input.dim() if (dim < 2): raise ValueErr...
def CloseExpression(clean_lines, linenum, pos): line = clean_lines.elided[linenum] if ((line[pos] not in '({[<') or Match('<[<=]', line[pos:])): return (line, clean_lines.NumLines(), (- 1)) (end_pos, stack) = FindEndOfExpressionInLine(line, pos, []) if (end_pos > (- 1)): return (line, li...
def nested_symbol(A: dace.float64[N], B: dace.float64[N]): nested(A[0:5], B[0:5], 0.5) nested(A=A[5:N], B=B[5:N], factor=2.0)
def test_fetch_asframe(fetch_covtype_fxt): pytest.importorskip('pandas') bunch = fetch_covtype_fxt(as_frame=True) assert hasattr(bunch, 'frame') frame = bunch.frame assert (frame.shape == (581012, 55)) assert (bunch.data.shape == (581012, 54)) assert (bunch.target.shape == (581012,)) col...
class StringAttribute(Attribute): def __init__(self, name): super().__init__(name) self.type_name = 'string' def parse_attribute(cls, name, attr_string): attr_string = attr_string.lower().strip() if (attr_string[:len('string')] == 'string'): return cls(name) e...
.gpu def test_types(): with change_default(blas, 'cuBLAS'): _test_matmul('cuBLAS double', dace.float64, 'cuBLAS', dace.StorageType.GPU_Global, eps=1e-06) _test_matmul('cuBLAS half', dace.float16, 'cuBLAS', dace.StorageType.GPU_Global, eps=1) _test_matmul('cuBLAS scmplx', dace.complex64, 'cuB...
def test__sort_leaderboard_rank(): rank = 'f1' metrics = METRICS score = pd.DataFrame({'pipeline': range(5), 'f1': range(5)}) expected_return = pd.DataFrame({'pipeline': range(5)[::(- 1)], 'rank': range(1, 6), 'f1': range(5)[::(- 1)]}) returned = benchmark._sort_leaderboard(score, rank, metrics) ...
_utils.test(debug=True, short_circuit_operators=True) def test_and_shorted(): a = ti.field(ti.i32, shape=10) def explode() -> ti.u1: return ti.u1(a[(- 1)]) def func() -> ti.u1: return (False and explode()) assert (func() == False)
def test_eval_api() -> None: impl = DummyImpl() impl.fc1.train() impl.fc2.train() impl.eval_api_func()
def main_lab_lab(tsv_dir, lab_dir, lab_name, lab_sets, ref_dir, ref_name, pad_len=0, upsample=1, verbose=False): tsv_dir = (lab_dir if (tsv_dir is None) else tsv_dir) uid2refs = {} for s in lab_sets: uid2refs.update(read_lab(f'{tsv_dir}/{s}.tsv', f'{ref_dir}/{s}.{ref_name}')) uid2hyps = {} f...
(python=ALL_PYTHONS, reuse_venv=True) def tests(session): session.install('--upgrade', '--editable', '.[all,test]') session.install('--upgrade', 'pytest') if ('coverage' in session.posargs): runner_commands = ['coverage', 'run', '--append', '--module', 'pytest'] session.posargs.pop(session.p...
class DrinfeldModule_charzero(DrinfeldModule): _method def _compute_coefficient_exp(self, k): k = ZZ(k) if k.is_zero(): return self._base.one() q = self._Fq.cardinality() c = self._base.zero() for i in range(k): j = (k - i) c += (self._...
_model def caformer_m364_in21k(pretrained=False, **kwargs): model = MetaFormer(depths=[3, 12, 18, 3], dims=[96, 192, 384, 576], token_mixers=[SepConv, SepConv, Attention, Attention], head_fn=MlpHead, **kwargs) model.default_cfg = default_cfgs['caformer_m364_in21k'] if pretrained: state_dict = torch....
def adaptive_avg_pool3d(input: Tensor, output_size: BroadcastingList2[int]) -> Tensor: if (not input.is_quantized): raise ValueError("Input to 'quantized.functional.adaptive_avg_pool3d' must be quantized!") return torch.nn.functional.adaptive_avg_pool3d(input, output_size)
def main_test(): (X_train, X_test, y_train, y_test) = load_dialogs_and_labels('data/dialogs_and_labels.pickle') model = DialogModel.load() loss_function = nn.NLLLoss() measure_model_quality(model, loss_function, X_test, y_test, 0, False)
def test(): layout = ak.to_layout({'x': 1, 'y': 1}) assert (ak.backend(layout) == 'cpu') assert (layout.backend is NumpyBackend.instance()) layout_tt = layout.to_backend('typetracer') assert (ak.backend(layout_tt) == 'typetracer') assert (layout_tt.backend is TypeTracerBackend.instance()) wi...
def test_clip(): default_clipid = 'development/airport-barcelona-0-0-a' dataset = tau2019uas.Dataset(TEST_DATA_HOME) clip = dataset.clip(default_clipid) expected_attributes = {'audio_path': os.path.join(os.path.normpath('tests/resources/sound_datasets/tau2019uas/'), 'TAU-urban-acoustic-scenes-2019-devel...
def unpickle_extension(code): from copyreg import _inverted_registry, _extension_cache nil = [] obj = _extension_cache.get(code, nil) if (obj is not nil): return obj key = _inverted_registry.get(code) if (not key): raise ValueError(('unregistered extension code %d' % code)) o...
def skip_member(app, what, name, obj, skip, options): if (obj.__doc__ is None): return True return None
def load_from_yml(filename: str) -> MonitorDescriptionList: with open(filename) as fp: monitor_descriptions = MonitorDescriptionList(yaml.load(fp)) return monitor_descriptions
def pytest_addoption(parser): parser.addoption('--matrixdesign', default=False, help='sub-sample estimators in tests by os/version matrix partition design') parser.addoption('--only_cython_estimators', default=False, help='test only cython estimators, with tag requires_cython=True') parser.addoption('--only...
class TFConvNextPreTrainedModel(metaclass=DummyObject): _backends = ['tf'] def __init__(self, *args, **kwargs): requires_backends(self, ['tf'])
def convert_dtypes(dtype_template, order_code): dtypes = dtype_template.copy() for k in dtypes: dtypes[k] = np.dtype(dtypes[k]).newbyteorder(order_code) return dtypes
def yield_chunks(data, size): data = list(data) for i in range(0, len(data), size): (yield data[i:(i + size)])
_function_dispatch(_unary_dispatcher) def arccos(x): x = _fix_real_abs_gt_1(x) return nx.arccos(x)
def calculate_fid_for_all_tasks(args, domains, step, mode): print('Calculating FID for all tasks...') fid_values = OrderedDict() for trg_domain in domains: src_domains = [x for x in domains if (x != trg_domain)] for src_domain in src_domains: task = ('%s2%s' % (src_domain, trg_do...
def feedInput(features, weights=None, segmentName=None): feedInputAndTarget(features=features, weights=weights, segmentName=segmentName)
class ActiveTaskCounterPosix(): def __init__(self, task_number): self._active_tasks = mp.Semaphore(task_number) def __repr__(self): return ('ActiveTaskCounter(value=%s)' % self._active_tasks.get_value()) def task_start(self): logger.debug('_signal_task_start called') if self....
def _quadratic_coeff(signal): zi = ((- 3) + (2 * sqrt(2.0))) K = len(signal) yplus = zeros((K,), signal.dtype.char) powers = (zi ** arange(K)) yplus[0] = (signal[0] + (zi * add.reduce((powers * signal)))) for k in range(1, K): yplus[k] = (signal[k] + (zi * yplus[(k - 1)])) output = z...
def _extract_version_from_fragment(fragment, canonical_name): try: version_start = (_find_name_version_sep(fragment, canonical_name) + 1) except ValueError: return None version = fragment[version_start:] if (not version): return None return version
class ResNetBottle3d(nn.Module): def __init__(self, in_dim, mid_dim, out_dim, resolution=64): super(ResNetBottle3d, self).__init__() (ksize, stride, padding) = (4, 2, 1) self.down_sampler0 = nn.Sequential(nn.Conv3d(in_channels=in_dim, out_channels=mid_dim, kernel_size=4, stride=2, padding=1)...
def _compare_owner_value(context_id, rref, grad): grads = dist_autograd.get_gradients(context_id) return torch.equal(grads[rref.local_value()], grad)
def test_broadcast_gh7933_regression(): stats.truncnorm.logpdf(np.array([3.0, 2.0, 1.0]), a=((1.5 - np.array([6.0, 5.0, 4.0])) / 3.0), b=np.inf, loc=np.array([6.0, 5.0, 4.0]), scale=3.0)
def register_Ns3SequenceNumber32_methods(root_module, cls): cls.add_binary_comparison_operator('!=') cls.add_binary_numeric_operator('+', root_module['ns3::SequenceNumber32'], root_module['ns3::SequenceNumber32'], param('ns3::SequenceNumber< unsigned int, int > const &', u'right')) cls.add_binary_numeric_op...
def eval_complex(vec, conn, geo, mode, shape, bf=None): (n_el, n_qp, dim, n_en, n_comp) = shape if (mode == 'val'): function = terms.dq_state_in_qp rout = nm.empty((n_el, n_qp, n_comp, 1), dtype=nm.float64) iout = nm.empty((n_el, n_qp, n_comp, 1), dtype=nm.float64) if (bf is not ...
def penalty_builder(penalty_config): if (penalty_config == ''): return (lambda x, y: y) (pen_type, alpha) = penalty_config.split('_') alpha = float(alpha) if (pen_type == 'wu'): return (lambda x, y: length_wu(x, y, alpha)) if (pen_type == 'avg'): return (lambda x, y: length_a...
class Partition19(nn.Module): LAYER_SCOPES = ['T5ForConditionalGeneration/T5Stack[encoder]/T5Block[22]/T5LayerFF[1]/T5DenseReluDense[DenseReluDense]/Linear[wi]', 'T5ForConditionalGeneration/T5Stack[encoder]/T5Block[22]/T5LayerFF[1]/T5DenseReluDense[DenseReluDense]/Dropout[dropout]', 'T5ForConditionalGeneration/T5St...
def config_cl(): parser = argparse.ArgumentParser(description='Configure the experiment from command line') parser.add_argument('-agent', '--num_agent', default='3', type=int, help='Number of agents') parser.add_argument('-item', '--num_item', default='1', type=int, help='Number of items') parser.add_ar...
.parametrize('attr', simulation_state_scalar_attrs) def test_hdf_simulation_state_scalars(hdf_file_path, simulation_verysimple, attr): path = 'simulation_state/scalars' expected = pd.read_hdf(hdf_file_path, path)[attr] actual = getattr(simulation_verysimple.simulation_state, attr) if hasattr(actual, 'cg...
def register_Ns3FfMacSchedSapProviderSchedDlMacBufferReqParameters_methods(root_module, cls): cls.add_constructor([]) cls.add_constructor([param('ns3::FfMacSchedSapProvider::SchedDlMacBufferReqParameters const &', 'arg0')]) cls.add_instance_attribute('m_ceBitmap', 'ns3::CeBitmap_e', is_const=False) cls....
def check_dir_and_mkdir(path): if ((os.path.basename(path).find('.') == (- 1)) or path.endswith('/')): dirname = path else: dirname = os.path.dirname(path) if (not os.path.exists(dirname)): print('make dirs:', dirname) os.makedirs(dirname) return
class TestEqual(TestArrayEqual): def setup(self): self._assert_func = assert_equal def test_nan_items(self): self._assert_func(np.nan, np.nan) self._assert_func([np.nan], [np.nan]) self._test_not_equal(np.nan, [np.nan]) self._test_not_equal(np.nan, 1) def test_inf_ite...
def test_recursive_cprop(): sdfg = dace.SDFG('program') a = sdfg.add_state() b = sdfg.add_state() sdfg.add_edge(a, b, dace.InterstateEdge(assignments=dict(i=1))) nsdfg = dace.SDFG('nested') b.add_nested_sdfg(nsdfg, None, {}, {}, symbol_mapping={'i': 'i + 1'}) nstate = nsdfg.add_state() t...
class CNNDecoder(nn.Module): class Decoder(nn.Module): def __init__(self, in_channel, out_channel, block_num, norm=nn.InstanceNorm3d, use_skip=False, bias=True): super(CNNDecoder.Decoder, self).__init__() self.__input = nn.Sequential(nn.Conv3d(in_channel, out_channel, 3, 1, 1, bias=b...
def convert_rules(prog): RULE_TYPES = {'join': JoinRule, 'product': ProductRule, 'project': ProjectRule} result = [] for rule in prog.rules: RuleType = RULE_TYPES[rule.type] (new_effect, new_conditions) = variables_to_numbers(rule.effect, rule.conditions) rule = RuleType(new_effect, ...
class GradientNoiseScale(): def __init__(self, beta=0.9998, eps=1e-08): self.beta = beta self.eps = eps self.ema_sq_norm = 0.0 self.ema_var = 0.0 self.beta_cumprod = 1.0 self.gradient_noise_scale = float('nan') def state_dict(self): return dict(self.__dict...
def merge_sparse_track(p_midi, CANDI_THRES=50, MIN_THRES=5): good_instruments = [] bad_instruments = [] good_instruments_idx = [] for instrument in p_midi.instruments: if (len(instrument.notes) < CANDI_THRES): bad_instruments.append(instrument) else: good_instrume...
def resnet50(nc, pretrained=False, progress=True, **kwargs): return _resnet('resnet50', nc, Bottleneck, [3, 4, 6, 3], pretrained, progress, **kwargs)
def uncrustify_config_file(level): level2 = '\nnl_collapse_empty_body=False\nnl_if_brace=Add\nnl_brace_else=Add\nnl_elseif_brace=Add\nnl_else_brace=Add\nnl_while_brace=Add\nnl_do_brace=Add\nnl_for_brace=Add\nnl_brace_while=Add\nnl_switch_brace=Add\nnl_after_case=True\nnl_namespace_brace=Remove\nnl_after_brace_open=...
class ResNet(nn.Module): def __init__(self, block, num_blocks, num_classes=10): super(ResNet, self).__init__() self.in_planes = 64 self.conv1 = conv3x3(3, 64) self.bn1 = nn.BatchNorm2d(64) self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1) self.layer2 ...
def setup_dist(): if dist.is_initialized(): return comm = MPI.COMM_WORLD backend = ('gloo' if (not th.cuda.is_available()) else 'nccl') if (backend == 'gloo'): hostname = 'localhost' else: hostname = socket.gethostbyname(socket.getfqdn()) os.environ['MASTER_ADDR'] = comm....
class MarianMTModel(metaclass=DummyObject): _backends = ['torch'] def __init__(self, *args, **kwargs): requires_backends(self, ['torch'])
def test_ListOffsetArray_RecordArray_NumpyArray(): v2a = ak.contents.listoffsetarray.ListOffsetArray(ak.index.Index(np.array([1, 4, 4, 6], np.int64)), ak.contents.recordarray.RecordArray([ak.contents.numpyarray.NumpyArray(np.array([6.6, 1.1, 2.2, 3.3, 4.4, 5.5, 7.7]))], ['nest'])) roundtrip(v2a) array = ak....
class AudioEmbeddingModel(): def __init__(self, from_id, to_id, audio_shape=(598, 257, 2)): self.from_id = from_id self.to_id = to_id def build_model(audio_shape): ip = tf.keras.layers.Input(shape=audio_shape) x = tf.keras.layers.Conv2D(filters=64, kernel_size=(4, 4),...
class ResNet(nn.Module): def __init__(self, block: Type[Union[(BasicBlock, Bottleneck)]], layers: List[int], num_classes: int=1000, zero_init_residual: bool=False, groups: int=1, width_per_group: int=64, replace_stride_with_dilation: Optional[List[bool]]=None, norm_layer: Optional[Callable[(..., nn.Module)]]=None) ...
_utils.test(arch=supported_archs_taichi_ndarray) def test_ndarray_numpy_io(): _test_ndarray_numpy_io()
def has_batchnorms(model): bn_types = (nn.BatchNorm1d, nn.BatchNorm2d, nn.BatchNorm3d, nn.SyncBatchNorm) for (name, module) in model.named_modules(): if isinstance(module, bn_types): return True return False
def check_if_row_is_locked(row: int, modified_rows: list, redundant_rows: list): if (row in map(get_index, modified_rows)): conflict = [x for x in modified_rows if (x.index == row)][0] return Information(True, True, conflict.presolver, ((('DETECTED CONFLICT for row ' + row.__str__()) + ' p ') + conf...
class MetaNeXt(nn.Module): def __init__(self, in_chans=3, num_classes=1000, depths=(3, 3, 9, 3), dims=(96, 192, 384, 768), token_mixers=nn.Identity, norm_layer=nn.BatchNorm2d, act_layer=nn.GELU, mlp_ratios=(4, 4, 4, 3), head_fn=MlpHead, drop_rate=0.0, drop_path_rate=0.0, ls_init_value=1e-06, **kwargs): supe...
class SokobanCtrlProblem(SokobanProblem): def __init__(self, cfg: Config): super(SokobanCtrlProblem, self).__init__(cfg=cfg) self._max_path_length = (np.ceil(((self._width / 2) + 1)) * self._height) self._reward_weights = {'player': 3, 'crate': 1, 'regions': 5, 'ratio': 2, 'dist-win': 0.0, '...
class KRTableauxSpinElement(KirillovReshetikhinTableauxElement): def e(self, i): if (i == self.parent()._cartan_type.special_node()): return KirillovReshetikhinTableauxElement.e(self, i) half = KirillovReshetikhinTableauxElement.e(self, i) if (half is None): return No...
class PretrainVideoTransformerTeacher(nn.Module): def __init__(self, img_size=224, patch_size=16, encoder_in_chans=3, encoder_num_classes=0, encoder_embed_dim=768, encoder_depth=12, encoder_num_heads=12, mlp_ratio=4.0, qkv_bias=False, qk_scale=None, drop_rate=0.0, attn_drop_rate=0.0, drop_path_rate=0.0, norm_layer=...
class Node(object): __slots__ = ['ntype', 'features', '_neighbors', 'rdkit_ix'] def __init__(self, ntype, features, rdkit_ix): self.ntype = ntype self.features = features self._neighbors = [] self.rdkit_ix = rdkit_ix def add_neighbors(self, neighbor_list): for neighbo...
def register_node_encoder(key: str, module: Any=None): return register_base(node_encoder_dict, key, module)
class MetricType(BenchmarkItem): name = 'get_metric' def __init__(self): self._items = {'kernel_elapsed_time_ms': kernel_executor, 'end2end_time_ms': end2end_executor} def init_taichi(arch: str, tag_list: list): if set(['kernel_elapsed_time_ms']).issubset(tag_list): ti.init(kerne...
class RansacLineHelper(object): def __init__(self): pass self.__complete_list_of_points: list = list() self.__max_iterations: float = 0 self.min_points_for_model: float = 0 self.threshold_error: float = 0 self.threshold_inlier_count: int = 0 def add_points(self, p...
class TestMakeNonUniformGrid(unittest.TestCase): def test_make_nonuniform_grid(self): SimBorders = [(- 1000), 1000, (- 1000), 1000, (- 1000), 1000] dx_default = [50, 50, 50] Box1 = {'pos': [(- 100), 0, 0], 'size': [100, 100, 100], 'meshsize': [10, 10, 10]} Box2 = {'pos': [0, 0, 0], '...
def count_continents(unique_countries): full_names = {'AF': 'Africa', 'AN': 'Antarctica', 'AS': 'Asia', 'EU': 'Europe', 'NA': 'North America', 'OC': 'Oceania', 'SA': 'South and Central America'} continents = {} for country in unique_countries.keys(): try: cntry_code = country_to_code(cou...
class DatasetLoader(): name = '' directory_name = '' url = '' url_archive_format: Optional[str] = None url_archive_contains_directory: bool = True expected_files: List[str] = [] description = '' source = '' data_subdirectory_name: Optional[str] = None def __init_subclass__(cls, *...
def visualize_rec(graph, value_map, name_prefix, pb_graph, executors_it=None): def inline_graph(subgraph, name, node): rec_value_map = {inp.unique(): value_map[val.unique()] for (inp, val) in zip(subgraph.inputs(), node.inputs())} visualize_rec(graph=subgraph, value_map=rec_value_map, name_prefix=na...