code
stringlengths
101
5.91M
def paramset_to_rootnames(paramset): if (paramset.name == 'lumi'): return 'Lumi' if paramset.is_scalar: if paramset.constrained: return f'alpha_{paramset.name}' return f'{paramset.name}' return [f'gamma_{paramset.name}_{index}' for index in range(paramset.n_parameters)]
def target_update(critic: Model, target_critic: Model, tau: float) -> Model: new_target_params = jax.tree_util.tree_map((lambda p, tp: ((p * tau) + (tp * (1 - tau)))), critic.params, target_critic.params) return target_critic.replace(params=new_target_params)
def create_session(config_dict: dict=None, force_as_default: bool=False) -> tf.compat.v1.Session: cfg = _sanitize_tf_config(config_dict) config_proto = tf.compat.v1.ConfigProto() for (key, value) in cfg.items(): fields = key.split('.') if (fields[0] not in ['rnd', 'env']): obj = ...
def sinc3(t): e = 0.01 r = torch.zeros_like(t) a = torch.abs(t) s = (a < e) c = (s == 0) t2 = (t[s] ** 2) r[s] = ((1 / 6) * (1 - ((t2 / 20) * (1 - ((t2 / 42) * (1 - (t2 / 72))))))) r[c] = ((t[c] - sin(t[c])) / (t[c] ** 3)) return r
class T5PreTrainedModel(metaclass=DummyObject): _backends = ['torch'] def __init__(self, *args, **kwargs): requires_backends(self, ['torch'])
_utils.test() def test_vdir(): def make_test(): assert all((vdir((pi / 2)) == [0, 1])) make_test()
class Loss(Metric): def __init__(self, loss_fn, name=None): self.loss_fn = loss_fn if (name is None): name = 'loss' super().__init__(name=name) def _compute(self, y_pred, y_true): return self.loss_fn(y_pred, y_true) def worst(self, metrics): return maximum...
.parametrize('kernel_priors', [True, False]) .parametrize('likelihood_variance', [None, 1e-10, 10.0]) .parametrize('trainable_likelihood', [True, False]) def test_build_gpr_returns_correct_model(kernel_priors: bool, likelihood_variance: Optional[float], trainable_likelihood: bool) -> None: (qp, obs) = mock_data() ...
def get_free_gpu(min_mem=9000): try: with NamedTemporaryFile() as f: os.system(f'nvidia-smi -q -d Memory | grep -A5 GPU | grep Free > {f.name}') memory_available = [int(x.split()[2]) for x in open(f.name, 'r').readlines()] if (max(memory_available) < min_mem): war...
class Halo(): def __init__(self): self.pos = [0.0, 0.0, 0.0] self.pos_cm = [0.0, 0.0, 0.0] self.vel = [0.0, 0.0, 0.0] self.l = [0.0, 0.0, 0.0] self.vel_disp = 0.0 self.r = 0.0 self.m = 0.0 self.mp = 0
def BaggingEnsemble(*args, **kwargs): _top_level_deprecation_warning('BaggingEnsemble', 'ensemble') return ensemble.BaggingEnsemble(*args, **kwargs)
class MarkInfo(): def __init__(self, mark_id, begin_usec, end_usec, info=''): self.mark_id = mark_id self.begin_usec = begin_usec self.end_usec = end_usec self.info = info
def _clean_and_truncate(text: str, max_num_words: Optional[int]=None) -> str: text = text.replace('\n', ' ') whitespace_tokens = text.split() if max_num_words: whitespace_tokens = whitespace_tokens[:max_num_words] return ' '.join(whitespace_tokens)
class GridAssigner(nn.Module): def __init__(self): super(GridAssigner, self).__init__() def forward(self, x): pass
class UnusedStatementsTestCaseVisitor(ModificationAwareTestCaseVisitor): _logger = logging.getLogger(__name__) def visit_default_test_case(self, test_case) -> None: self._deleted_statement_indexes.clear() primitive_remover = UnusedPrimitiveOrCollectionStatementVisitor() size_before = tes...
class SupCLIPBottleneckCAD(AbstractCLIPBottleneck): def __init__(self, feature_dim, num_classes, num_domains, hparams, pretrained, idx2class): super(SupCLIPBottleneckCAD, self).__init__(feature_dim, num_classes, num_domains, hparams, pretrained, idx2class, CADBottleneck)
def fits_r(page, target, refutes): claim_ids = page_evidence[page].keys() cl_refutes = [id for id in claim_ids if any(((ev['label'] == 'REFUTES') for ev in page_evidence[page][id]))] return ((len(refutes) + len(cl_refutes)) <= target)
def setup_optimisers_and_schedulers(args, model): optimisers = get_optimisers(model=model, enc_optim_type='sgd', enc_lr=0.0006, enc_weight_decay=1e-05, enc_momentum=0.9, dec_optim_type='sgd', dec_lr=0.006, dec_weight_decay=1e-05, dec_momentum=0.9) schedulers = get_lr_schedulers(enc_optim=optimisers[0], dec_opti...
class TestRFFTFreq(): _if_array_api_backend('numpy.array_api') _if_array_api_backend('cupy') _api_compatible def test_definition(self, xp): device = SCIPY_DEVICE try: x = xp.asarray([0, 1, 2, 3, 4], dtype=xp.float64, device=device) x2 = xp.asarray([0, 1, 2, 3, 4, ...
def register_Ns3UanModesList_methods(root_module, cls): cls.add_output_stream_operator() cls.add_constructor([param('ns3::UanModesList const &', 'arg0')]) cls.add_constructor([]) cls.add_method('AppendMode', 'void', [param('ns3::UanTxMode', 'mode')]) cls.add_method('DeleteMode', 'void', [param('uint...
class SimpleObject(): def __init__(self, obj, name): self.obj = obj self.name = name def __str__(self): return f'<SimpleObject, name={self.name}, obj={self.obj}>' def __repr__(self): return f'<SimpleObject, name={self.name}, obj={self.obj}>' def load_state_dict(self, v): ...
def store_minimal(trial, model, working_dir='.', save_full_model=False, **kwargs): par_dir = os.path.join(working_dir, 'weights', ('trial_%s' % trial)) if os.path.isdir(par_dir): shutil.rmtree(par_dir) os.makedirs(par_dir) if save_full_model: model.save(os.path.join(working_dir, 'weights...
def load_config_from_yaml(path): c = yaml.safe_load(open(path)) config = EasyDict(c) return config
class RepeatFactorInstanceTrainingSampler(Sampler): def __init__(self, dataset, config, num_replicas=None, rank=None, shuffle=True): self.shuffle = shuffle self.config = config if (num_replicas is None): if (not dist.is_available()): raise RuntimeError('Requires d...
def get_transformed_features(transformer, train, test): transformer.fit(train) return transformer.transform(test)
def test_one_cluster(): X = np.array([[1, 2], [10, 2], [10, 8]]) bisect_means = BisectingKMeans(n_clusters=1, random_state=0).fit(X) assert all((bisect_means.labels_ == 0)) assert all((bisect_means.predict(X) == 0)) assert_allclose(bisect_means.cluster_centers_, X.mean(axis=0).reshape(1, (- 1)))
def process_e2e_mr_delex(s): items = s.split(', ') mr_data = ([None] * MR_KEY_NUM) lex = [None, None] for (idx, item) in enumerate(items): (key, raw_val) = item.split('[') key_idx = MR_KEYMAP[key] if (key == 'name'): mr_val = NAME_TOKEN lex[0] = raw_val[:(...
def test_real_integer(): a = ak.highlevel.ArrayBuilder() a.real(1.1) a.real(2.2) a.integer(3) a.real(4.4) a.real(5.5) assert (to_list(a.snapshot()) == [1.1, 2.2, 3.0, 4.4, 5.5]) assert (to_list(a) == [1.1, 2.2, 3.0, 4.4, 5.5]) assert (to_list(a.snapshot()[1:(- 1)]) == [2.2, 3.0, 4.4]...
def ResNet50RetinaNet(inputs, num_classes, **kwargs): warnings.warn('ResNet50RetinaNet is replaced by resnet50_retinanet and will be removed in a future release.') return resnet50_retinanet(num_classes, inputs, *args, **kwargs)
class Label(): time: datetime.datetime value: Union[(bool, int, float, SurvivalValue, str, None)]
class KR_type_E6(KirillovReshetikhinCrystalFromPromotion): def classical_decomposition(self): La = self.cartan_type().classical().root_system().weight_lattice().fundamental_weights() if (self.r() in [1, 6]): dw = [(self.s() * La[self.r()])] elif (self.r() == 2): dw = ...
def weight_boundary(graph, src, dst, n): default = {'weight': 0.0, 'count': 0} count_src = graph[src].get(n, default)['count'] count_dst = graph[dst].get(n, default)['count'] weight_src = graph[src].get(n, default)['weight'] weight_dst = graph[dst].get(n, default)['weight'] count = (count_src + ...
class ResNetV2(nn.Module): BLOCK_UNITS = {'r50': [3, 4, 6, 3], 'r101': [3, 4, 23, 3], 'r152': [3, 8, 36, 3]} def __init__(self, block_units, width_factor, head_size=21843, zero_head=False): super().__init__() wf = width_factor self.root = nn.Sequential(OrderedDict([('conv', StdConv2d(3, ...
class SEA(Benchmark): def __init__(self, p): self.__p = p self.repr_str = ('Do SEA on an elliptic curve over GF(%s)' % self.__p) def sage(self): E = EllipticCurve([1, 2, 3, 4, 5]) t = walltime() E.change_ring(GF(self.__p)).cardinality_pari() return (False, walltim...
def test_tf_idf_smoothing(): X = [[1, 1, 1], [1, 1, 0], [1, 0, 0]] tr = TfidfTransformer(smooth_idf=True, norm='l2') tfidf = tr.fit_transform(X).toarray() assert (tfidf >= 0).all() assert_array_almost_equal((tfidf ** 2).sum(axis=1), [1.0, 1.0, 1.0]) X = [[1, 1, 0], [1, 1, 0], [1, 0, 0]] tr =...
class QueueListenerHandler(QueueHandler): def __init__(self, handlers, respect_handler_level=False, auto_run=True, queue=Queue((- 1))): queue = _resolve_queue(queue) super().__init__(queue) handlers = _resolve_handlers(handlers) self._listener = QueueListener(self.queue, *handlers, r...
def synchronize(): if (not dist.is_available()): return if (not dist.is_initialized()): return world_size = dist.get_world_size() if (world_size == 1): return dist.barrier()
class Heinrichs(CompositeBase): def __init__(self, N, quad='GC', bc=(0, 0), domain=((- 1), 1), dtype=float, scaled=False, padding_factor=1, dealias_direct=False, coordinates=None, **kw): CompositeBase.__init__(self, N, quad=quad, domain=domain, dtype=dtype, bc=bc, scaled=scaled, padding_factor=padding_facto...
class WikipediaNetwork(InMemoryDataset): url = ' def __init__(self, root, name, transform=None, pre_transform=None): self.name = name.lower() assert (self.name in ['chameleon', 'squirrel']) super(WikipediaNetwork, self).__init__(root, transform, pre_transform) (self.data, self.sl...
class ArrayField(Field[numpy.ndarray]): def __init__(self, array: numpy.ndarray, padding_value: int=0) -> None: self.array = array self.padding_value = padding_value def get_padding_lengths(self) -> Dict[(str, int)]: return {('dimension_' + str(i)): shape for (i, shape) in enumerate(self...
('characters') class TokenCharactersIndexer(TokenIndexer[List[int]]): def __init__(self, namespace: str='token_characters', character_tokenizer: CharacterTokenizer=CharacterTokenizer()) -> None: self._namespace = namespace self._character_tokenizer = character_tokenizer def count_vocab_items(sel...
def combine_results_from_alignments(pd_list, pd_cam, keys=None): if (keys is None): keys = range(len(pd_list)) assert (len(keys) == len(pd_list)) pd_all = pd.concat(pd_list, keys=keys, names=['alignment']) pd_best_min = pd_all.groupby(['noise', 'views', 'asin']).min() pd_best_max = pd_all.gr...
def noise_error_ps(nu_c, k, t, **kwargs): wavel = ((const.c / nu_c) * 0.001) t = ((t * 60.0) * 60.0) Rmax = kwargs.get('Rmax', 1500.0) Acore = ((Rmax ** 2) * np.pi) Aeff = kwargs.get('Aeff', (lambda nu: (526.0 * ((nu / 150.0) ** (- 2))))) if hasattr(Aeff, '__call__'): Aeff_val = Aeff(nu_...
def test_setup(plats): def items(): for plat in plats: (yield (plat, None)) (osname, arch) = plat.split('-') if (arch not in ('i686', 'arm64', '32')): (yield (plat, '64_')) if ((osname == 'linux') and (arch in ('i686', 'x86_64'))): ...
_module class DistSamplerSeedHook(Hook): def before_epoch(self, runner): runner.data_loader.sampler.set_epoch(runner.epoch)
class ExpandGerPure(ExpandTransformation): environments = [] def expansion(node, parent_state, parent_sdfg, **kwargs): node.validate(parent_sdfg, parent_state) inputs = ('_A', '_x', '_y') outputs = ('_res',) in_edges = [next(parent_state.in_edges_by_connector(node, conn)) for con...
def estimate_variance_sgd(input: T.Tensor, output: T.Tensor, bs: Optional[int]=None) -> T.Tensor: if (not bs): bs = input.shape[0] fst = (bs / (bs - 1)) snd = (1 / (bs - 1)) xs = (T.linalg.norm(input, dim=1) ** 2) ys = (T.linalg.norm(output, dim=1) ** 2) xy = (T.linalg.norm((input.T out...
class Err(Result[NoReturn]): def __init__(self, exc: Exception): self._exc = exc def __repr__(self) -> str: return f'Err({self._exc!r})' def is_ok(self) -> bool: return False def unwrap(self) -> NoReturn: raise self._exc
def asdict_as_float(obj: Any) -> Dict[(str, float)]: assert dataclasses.is_dataclass(obj) fields = dataclasses.fields(obj) ret: Dict[(str, float)] = {} for field in fields: value = getattr(obj, field.name) if isinstance(value, torch.Tensor): ret[field.name] = float(value.cpu(...
class DiscretePolicy(nn.Module): def __init__(self, state_dim, action_num, hidden_size=(100, 100), activation='tanh'): super().__init__() self.is_disc_action = True if (activation == 'tanh'): self.activation = F.tanh elif (activation == 'relu'): self.activatio...
def dot(u, v, with_mask=False, eps=1e-08): (B, R, M, _) = u.shape uv = F.sum((u * v), axis=(- 1), keepdims=True) uv = F.reshape(uv, (B, R, M, 1)) mask = F.greater_scalar(uv, eps).apply(need_grad=False) uv = F.maximum_scalar(uv, eps) if with_mask: return (uv, mask) return uv
class ImageCaptionRetrievalEval(object): def __init__(self, task_path, seed=1111): logging.debug('***** Transfer task: Image Caption Retrieval *****\n\n') self.seed = seed (train, dev, test) = self.loadFile(task_path) self.coco_data = {'train': train, 'dev': dev, 'test': test} de...
class LabelsWithUnlabeledObsField(CategoricalObsField): UNLABELED_CATEGORY = 'unlabeled_category' def __init__(self, registry_key: str, obs_key: Optional[str], unlabeled_category: Union[(str, int, float)]) -> None: super().__init__(registry_key, obs_key) self._unlabeled_category = unlabeled_cate...
def doc_index(name): def hey(f): setattr(f, 'doc_index', name) return f return hey
def test_exponential_func_multi_class_batch(): supports_correct = np.array([0.33, 0.0, 1.0]) expected = [(- 0.01), (- 1.0), 1.0] n_classes = 3 result = exponential_func(n_classes, supports_correct) assert np.allclose(result, expected, atol=0.01)
class PlyHeaderParseError(PlyParseError): def __init__(self, message, line=None): self.message = message self.line = line s = '' if self.line: s += ('line %r: ' % self.line) s += self.message Exception.__init__(self, s) def __repr__(self): retu...
def create_basic_stream_logger(format): logger = logging.getLogger('') logger.setLevel(logging.INFO) logger.handlers = [] ch = logging.StreamHandler() formatter = logging.Formatter(format) ch.setFormatter(formatter) logger.addHandler(ch) return logger
def interp_decomp(A, eps_or_k, rand=True): from scipy.sparse.linalg import LinearOperator real = _is_real(A) if isinstance(A, np.ndarray): if (eps_or_k < 1): eps = eps_or_k if rand: if real: (k, idx, proj) = _backend.iddp_aid(eps, A) ...
.parametrize('n_bins', [200, 256]) def test_regression_dataset(n_bins): (X, y) = make_regression(n_samples=500, n_features=10, n_informative=5, random_state=42) (X_train, X_test, y_train, y_test) = train_test_split(X, y, random_state=42) mapper = _BinMapper(n_bins=n_bins, random_state=42) X_train_binned...
class ClusterPooling(torch.nn.Module): def __init__(self, target, reduction='mean'): super(ClusterPooling, self).__init__() self.target = target self.reduction = reduction self.pos_cache = None def forward(self, data): data.x = scatter(data.x, data[(('scale' + str(self.ta...
def decorate_catlas_with_kmer_sizes(layer1_to_cdbg, dag, dag_levels, cdbg_kmer_sizes, cdbg_weighted_kmer_sizes): x = [] for (node_id, level) in dag_levels.items(): x.append((level, node_id)) x.sort() node_kmer_sizes = {} node_weighted_kmer_sizes = {} for (level, node_id) in x: if...
def _tensor_splits(draw, add_axis=False): tensor = draw(hu.tensor(min_value=4)) axis = draw(st.integers((- len(tensor.shape)), (len(tensor.shape) - 1))) if add_axis: return (axis, np.ones(tensor.shape[axis], dtype=np.int32), [np.array(tensor.take(i, axis=axis)) for i in range(tensor.shape[axis])]) ...
class ExposureSuite(): def setup(self): self.image_u8 = data.moon() self.image = img_as_float(self.image_u8) self.image = rescale(self.image, 2.0, anti_aliasing=False) (self.p2, self.p98) = np.percentile(self.image, (2, 98)) def time_equalize_hist(self): for i in range(10...
.experimental .parametrize('batch_size', BATCH_SIZES) def test_actor_forward(ddpg_actor_param, batch_size): (actor, param) = ddpg_actor_param memory_size = param['memory_size'] user_num = param['user_num'] item_num = param['item_num'] embedding_dim = param['embedding_dim'] user = torch.randint(h...
class conv1x1(nn.Module): def __init__(self, planes, out_planes=None, stride=1, adapt_method='series_adapters'): super(conv1x1, self).__init__() self.adapt_method = adapt_method if (adapt_method == 'series_adapters'): self.conv = nn.Sequential(nn.BatchNorm2d(planes), conv1x1_fonc...
def listdir_nohidden(path): file_list = [] for f in os.listdir(path): if (not f.startswith('.')): file_list.append(f) return file_list
class Greater(AttributeFilter): def __init__(self, attr: str, value: Any): super().__init__(attr=attr, value=value, op=operator.gt) def op_as_str(self): return '>'
class XxxConfig(PretrainedConfig): pretrained_config_archive_map = XXX_PRETRAINED_CONFIG_ARCHIVE_MAP def __init__(self, vocab_size=50257, n_positions=1024, n_ctx=1024, n_embd=768, n_layer=12, n_head=12, resid_pdrop=0.1, embd_pdrop=0.1, attn_pdrop=0.1, layer_norm_epsilon=1e-05, initializer_range=0.02, summary_ty...
def write(dirname, name, template, env): env['generated_comment'] = GENERATED_COMMENT.substitute(filename=template.filename) path = os.path.join(dirname, name) try: with open(path, 'r') as f: old_val = f.read() except IOError: old_val = None new_val = template.substitute(...
def _disable_emit_hooks_decorator(_DecoratorContextManager) -> None: def __enter__(self) -> None: self.hooks = torch._C._jit_get_emit_hooks() torch._C._jit_set_emit_hooks(None, None) def __exit__(self, *args) -> None: torch._C._jit_set_emit_hooks(self.hooks[0], self.hooks[1])
def get_sparse_lookup_predictor_version(version, blob_size=None, min_blob_size_4bits=None, embedding_dim=None, sparse_feature_name=None): assert (version in {'fp32', 'fp16', 'uint8rowwise', 'fused_uint8rowwise', 'fused_uint4rowwise'}), 'Unexpected version of sparse_lookup layer {0}'.format(version) if (version ...
class _DCNv2Pooling(Function): def forward(ctx, input, rois, offset, spatial_scale, pooled_size, output_dim, no_trans, group_size=1, part_size=None, sample_per_part=4, trans_std=0.0): ctx.spatial_scale = spatial_scale ctx.no_trans = int(no_trans) ctx.output_dim = output_dim ctx.group...
class GoogleHomeSetReminder(VirtualFunctionTool): name = 'GoogleHomeSetReminder' summary = 'Sets a reminder for a specified date and time.' parameters: List[ArgParameter] = [{'name': 'reminder_text', 'type': 'string', 'description': 'The text of the reminder.', 'required': True}, {'name': 'date_time', 'type...
def inject_into_urllib3(): _validate_dependencies_met() util.SSLContext = PyOpenSSLContext util.ssl_.SSLContext = PyOpenSSLContext util.HAS_SNI = HAS_SNI util.ssl_.HAS_SNI = HAS_SNI util.IS_PYOPENSSL = True util.ssl_.IS_PYOPENSSL = True
class LT_Dataset(Dataset): train_txt = '' test_txt = '' def __init__(self, root, train=True, transform=None): self.img_path = [] self.labels = [] self.train = train self.transform = transform if train: self.txt = self.train_txt else: se...
class VCRDataset(BaseDataset): def __init__(self, *args, split='', **kwargs): assert (split in ['train', 'val', 'test']) self.split = split self.metadata = None self._load_metadata() if (split == 'train'): names = ['vcr_train'] elif (split == 'val'): ...
def register_Ns3LteDataRadioBearerInfo_methods(root_module, cls): cls.add_constructor([]) cls.add_constructor([param('ns3::LteDataRadioBearerInfo const &', 'arg0')]) cls.add_method('GetTypeId', 'ns3::TypeId', [], is_static=True) cls.add_instance_attribute('m_drbIdentity', 'uint8_t', is_const=False) ...
def _patch_getitem_(gm: GraphModule, mapping: Union[(Dict[(Node, int)], Dict[(int, Node)])], lint_and_recompile: bool=True): graph = gm.graph for node in graph.nodes: if ((node.op == 'call_function') and (node.target == operator.getitem)): indices = node.args[1] if isinstance(ind...
('--project_path', type=str) def main(project_path): cam_info = read_json(f'{project_path}/intrinsics.json') H = cam_info['H'] W = cam_info['W'] n_sample = cam_info['n_sample'] multires = cam_info['multires'] near = cam_info['near'] far = cam_info['far'] focal = cam_info['focal'] wei...
def generate(config: CodegenConfig, output_dir: Path=None) -> Path: if (output_dir is None): output_dir = Path(tempfile.mkdtemp(prefix=f'sf_codegen_{type(config).__name__.lower()}_', dir='/tmp')) logger.debug(f'Creating temp directory: {output_dir}') cam_package_dir = (output_dir / 'sym') te...
class TestModStyleConv(): def setup_class(cls): cls.default_cfg = dict(in_channels=3, out_channels=1, kernel_size=3, style_channels=5, upsample=True) def test_mod_styleconv_cpu(self): conv = ModulatedStyleConv(**self.default_cfg) input_x = torch.randn((2, 3, 4, 4)) input_style = ...
def test_case68(): url = (brokerIp + '/ngsi-ld/v1/entityOperations/upsert') headers = {'Content-Type': 'application/json', 'Link': '<{{link}}>; rel=" type="application/ld+json"'} r = requests.post(url, data=json.dumps(ld_data.subdata58), headers=headers) print(r.content) assert (r.status_code == 404...
def binary_cross_entropy(pred, label, weight=None, reduction='mean', avg_factor=None): if (pred.dim() != label.dim()): (label, weight) = _expand_binary_labels(label, weight, pred.size((- 1))) if (weight is not None): weight = weight.float() loss = F.binary_cross_entropy_with_logits(pred, lab...
def _coco_eval_to_body_uv_results(coco_eval): res = _empty_body_uv_results() if (coco_eval is not None): s = coco_eval.stats res['body_uv']['AP'] = s[COCO_AP] res['body_uv']['AP50'] = s[COCO_AP50] res['body_uv']['AP75'] = s[COCO_BODY_UV_AP75] res['body_uv']['APm'] = s[COC...
class GaussianStrategy(RawExplorationStrategy): def __init__(self, action_space, max_sigma=1.0, min_sigma=None, decay_period=1000000): assert (len(action_space.shape) == 1) self._max_sigma = max_sigma if (min_sigma is None): min_sigma = max_sigma self._min_sigma = min_sig...
class Few_Shot_CLI(LightningCLI): def __init__(self, **kwargs) -> None: super().__init__(**kwargs) def add_arguments_to_parser(self, parser: LightningArgumentParser) -> None: parser.add_argument('is_test', type=bool, default=False, help='whether in testing only mode') parser.add_argument...
class CbamModule(nn.Module): def __init__(self, channels, spatial_kernel_size=7): super(CbamModule, self).__init__() self.channel = ChannelAttn(channels) self.spatial = SpatialAttn(spatial_kernel_size) def forward(self, x): x = self.channel(x) x = self.spatial(x) ...
class ResNet18(tf.keras.Model): MEAN = (np.array([0.485, 0.456, 0.406], np.float32).reshape([1, 1, 1, 3]) * 255) STD = (np.array([0.229, 0.224, 0.225], np.float32).reshape([1, 1, 1, 3]) * 255) def __init__(self, name='ResNet18'): super(ResNet18, self).__init__(name=name) self.conv1 = tf.kera...
def augment_with_s_expr(split): dataset = load_json(f'outputs/WebQSP.{split}.json') dataset = dataset['Questions'] total_num = 0 hit_num = 0 for data in dataset: aug_parses = [] for parse in data['Parses']: total_num += 1 (instance, flag_success) = convert_par...
class FrameElement(object): def __init__(self, id): self.id = id def get_str(self, fedict): return fedict.getstr(self.id) def __hash__(self): return hash(self.id) def __eq__(self, other): return (self.id == other.id) def __ne__(self, other): return (not (self ...
_utils.test(require=ti.extension.adstack) def test_ad_global_ptr(): N = 5 a = ti.field(ti.f32, shape=N, needs_grad=True) b = ti.field(ti.f32, shape=N, needs_grad=True) f = ti.field(ti.f32, shape=(), needs_grad=True) def task(): for i in range(N): p = 0 for j in range(...
def cross_attn_g_c(attn, temp_attn, resnet, temp_conv, hidden_states, encoder_hidden_states, cross_attention_kwargs, temb, num_frames, inverse_temp=False): def ordered_g_c(idx): if (idx == 0): return g_c(custom_checkpoint(attn, mode='attn'), hidden_states, encoder_hidden_states, cross_attention_...
def to_integer(tokens, vocab_dict, encode=False, subtract_1=False, dtype=np.uint32): if encode: tokens = [str(word, errors='ignore') for word in tokens] out = np.array([vocab_dict.get(token) for token in tokens], dtype=dtype) if subtract_1: out = (out - 1) return out
class Baseline(nn.Module): in_planes = 2048 def __init__(self, num_classes, last_stride, model_path, model_name, gem_pool, pretrain_choice): super(Baseline, self).__init__() if (model_name == 'resnet50'): self.base = ResNet(last_stride=last_stride, block=Bottleneck, layers=[3, 4, 6, ...
class SawyerPegUnplugSideEnv(SawyerXYZEnv): def __init__(self): liftThresh = 0.04 hand_low = ((- 0.5), 0.4, 0.05) hand_high = (0.5, 1, 0.5) obj_low = ((- 0.25), 0.6, 0.05) obj_high = ((- 0.15), 0.8, 0.05) goal_low = ((- 0.05), 0.6, 0.019) goal_high = (0.2, 0.8...
def test_isotonic_regression_ties_secondary_(): x = [8, 8, 8, 10, 10, 10, 12, 12, 12, 14, 14] y = [21, 23.5, 23, 24, 21, 25, 21.5, 22, 19, 23.5, 25] y_true = [22.22222, 22.22222, 22.22222, 22.22222, 22.22222, 22.22222, 22.22222, 22.22222, 22.22222, 24.25, 24.25] ir = IsotonicRegression() ir.fit(x, y...
def msvc9_find_vcvarsall(version): VC_BASE = 'Software\\%sMicrosoft\\DevDiv\\VCForPython\\%0.1f' key = (VC_BASE % ('', version)) try: productdir = Reg.get_value(key, 'installdir') except KeyError: try: key = (VC_BASE % ('Wow6432Node\\', version)) productdir = Reg....
class FileTrainLogger(TrainLogger): def __init__(self, save_file, precision=2): self.save_file = save_file self.precision = precision def _item_to_string(self, key, value, dataset=None): if (isinstance(value, float) and (1.0 < value < 100.0)): value = f'{value:.{self.precisio...
def changeEgoInThreeStar(G, A, i): return ((((G.indegree(i) * (G.indegree(i) - 1)) * (G.indegree(i) - 2)) / 6.0) if (G.indegree(i) > 2) else 0)
def main(args=sys.argv[1:]): p = argparse.ArgumentParser() p.add_argument('cdbg_prefix', help='cdbg prefix') p.add_argument('catlas_prefix', help='catlas prefix') p.add_argument('query') p.add_argument('output') p.add_argument('--minsize', type=float, default=100) p.add_argument('--maxsize',...