code
stringlengths
101
5.91M
def read_in_samples_task1_random_neg_whole_doc(dict_paragraphs: dict, qrels: dict): dict_whole_docs = dict_para_to_whole_docs(dict_paragraphs) samples = [] for query_id in qrels.keys(): print('now we start with this query {}'.format(query_id)) query_text = dict_whole_docs.get(query_id) ...
def test_build_layer_ltc(): layer = {'class': 'ncps.tf.LTC', 'parameters': {'units': {'class': 'ncps.wirings.AutoNCP', 'parameters': {'units': 10, 'output_size': 1}}}} built = build_layer(layer, {}) assert isinstance(built, ncps.tf.LTC)
class PyTorchTaskRunner(nn.Module, TaskRunner): def __init__(self, device: str=None, loss_fn=None, optimizer=None, **kwargs): super().__init__() TaskRunner.__init__(self, **kwargs) if device: self.device = device else: self.device = pt.device(('cuda' if pt.cud...
class GradedYangianNatural(GradedYangianBase): def __init__(self, Y): if (Y._filtration != 'natural'): raise ValueError('the Yangian must have the natural filtration') cat = GradedHopfAlgebrasWithBasis(Y.base_ring()).Connected().Commutative() GradedYangianBase.__init__(self, Y, c...
def fit_rbv2_glmnet(key='rbv2_glmnet', **kwargs): tfms = {} [tfms.update({k: ContTransformerClamp01Range}) for k in ['mmce', 'f1', 'auc']] [tfms.update({k: ContTransformerClamp0LogRange}) for k in ['timetrain', 'timepredict']] [tfms.update({k: ContTransformerLog2Range}) for k in ['s']] [tfms.update(...
def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, ca_certs=None, server_hostname=None, ssl_version=None, ciphers=None, ssl_context=None, ca_cert_dir=None, key_password=None, ca_cert_data=None): context = ssl_context if (context is None): context = create_urllib3_context(ssl_version,...
def SGD_U2(U1, sigma, U2, V2, V1, rating_matrix, beta, gamma, S1, X, W1, alpha, P1, n1, lamda): term_1 = np.transpose(U1).dot((sigma * (U1.dot(U2).dot(V2).dot(V1) - rating_matrix))).dot(np.transpose(V1)).dot(np.transpose(V2)) term_2 = (lamda * U2) term_3 = (gamma * np.transpose(U1).dot((U1.dot(U2).dot(np.tr...
def get_combos(llists, namekeys=None): cmds = [] for combo in itertools.product(*llists): partial = '' if (namekeys is not None): for pair in list(zip(namekeys, combo)): partial += '--{} {} '.format(pair[0], pair[1]) else: partial = ' '.join(combo)...
def get_truncated_expon(scale=100, low=0, upp=1000): return truncexpon(b=((upp - low) / scale), loc=low, scale=scale)
class FilteredNodes(NodeEnumerator): def __init__(self, enumerator: NodeEnumerator, condition: 'LogicalValue'): self.enumerator = enumerator self.condition = condition def enumerate(self, state: EnvironmentState, **kwargs): for n in self.enumerator.enumerate(state): if self.c...
class PairwiseEncoder(torch.nn.Module): def __init__(self, config: Config): super().__init__() emb_size = config.embedding_size self.genre2int = {g: gi for (gi, g) in enumerate(['bc', 'bn', 'mz', 'nw', 'pt', 'tc', 'wb'])} self.genre_emb = torch.nn.Embedding(len(self.genre2int), emb_s...
def get_parser(): parser = argparse.ArgumentParser(description='filters a lexicon given a unit dictionary') parser.add_argument('-d', '--unit-dict', help='unit dictionary', required=True) return parser
def soft_distillation_loss(predictions, pseudo_labels, ground_truths, alpha=0.1): log_softmax = torch.nn.LogSoftmax(dim=1) criterion_dice = smp.losses.DiceLoss(mode='multiclass') if ((alpha > 0.0) & (ground_truths is not None)): kl_divergence = torch.nn.KLDivLoss(reduction='batchmean', log_target=Tr...
def load_model(model, model_weights_fn): logger.info(('Loading the model <-- %s' % model_weights_fn)) model.load_state_dict(torch.load(open(model_weights_fn, 'rb')))
def _test_inv(implementation, dtype, id=0, size=4, in_shape=[4, 4], out_shape=[4, 4], in_offset=[0, 0], out_offset=[0, 0], in_dims=[0, 1], out_dims=[0, 1], overwrite=False, getri=True): assert np.all((np.array(in_shape)[in_dims] >= size)) assert np.all((np.array(out_shape)[out_dims] >= size)) assert np.all(...
def copy_image_u8_to_rgba8_np(src: ti.types.ndarray(), dst: ti.types.ndarray(), num_components: ti.template(), gray_scale: ti.template()): for I in ti.grouped(src): (i, j) = (I[0], I[1]) px = ti.Vector([0, 0, 0, 255], dt=u32) if ti.static(gray_scale): px[0] = px[1] = px[2] = ti.c...
class CLFTrainer(): def __init__(self, cf): self.cf = cf from transformers import logging as trfm_logging self.logger = cf.logger self.log = cf.logger.log trfm_logging.set_verbosity_error() _logger def train(self): self.d = d = Sequence((cf := self.cf)).init()...
_keyword(color='rgbcolor') (alpha=1, thickness=1, linestyle='solid', zorder=5, rgbcolor='blue', aspect_ratio=1.0) def arc(center, r1, r2=None, angle=0.0, sector=(0.0, (2 * pi)), **options): from sage.plot.all import Graphics scale = options.get('scale', None) if isinstance(scale, (list, tuple)): sca...
def read_audio(fname): (wav, sr) = sf.read(fname) assert (sr == 16000.0) return (wav, 16000.0)
def test_regular_numpy_2(): text = '5 * int64[parameters={"bar": "foo"}]' parsedtype = deduce_type(text) assert isinstance(parsedtype, ak.types.RegularType) assert (str(parsedtype) == text)
class RocAuc(Metric): def _get_metric_value_by_user(ks: List[int], ground_truth: List, pred: List) -> List[float]: if ((not ground_truth) or (not pred)): return [0.0 for _ in ks] set_gt = set(ground_truth) res = [] for k in ks: length = min(k, len(pred)) ...
def _make_players_dwg(dwg, state: MahjongState, i, color_set, BOARD_WIDTH, BOARD_HEIGHT, GRID_SIZE): players_g = dwg.g(style='stroke:#000000;stroke-width:0.01mm;fill:#000000', fill_rule='evenodd') x = 265 y = 435 fontsize = 22 players_g.add(dwg.text(text=wind[((i - state._oya) % 4)], insert=(x, y), ...
def search_for_tree(args, document_sents, summary_sent, oracle_sent_indices, compression_model, compression_tokenizer, fusion_model, fusion_tokenizer, paraphrase_model, paraphrase_tokenizer, saved_operations): queue = [] parent_child_score_map = {} intermediate_to_document_sents_map = {} best_programs =...
class WarmupLinearScheduler(torch.optim.lr_scheduler.LambdaLR): def __init__(self, optimizer, warmup_steps, scheduler_steps, min_ratio, fixed_lr, last_epoch=(- 1)): self.warmup_steps = warmup_steps self.scheduler_steps = scheduler_steps self.min_ratio = min_ratio self.fixed_lr = fixe...
class Timer(): def __init__(self): self.start_time = time.time() def tic(self): self.start() def show(self, prefix='', output=True): duration = (time.time() - self.start_time) if output: print((prefix + ('%fs' % duration))) return duration def toc(self...
def _calibrate_denoiser_search(image, denoise_function, denoise_parameters, *, stride=4, approximate_loss=True): image = img_as_float(image) parameters_tested = list(_product_from_dict(denoise_parameters)) losses = [] for denoiser_kwargs in parameters_tested: multichannel = (denoiser_kwargs.get(...
class distanceMap(): def __init__(self, rssi, euclid, label=None, name=None): self.rssi = rssi self.euclid = euclid self.label = label self.name = name def print(self): print('rssi: ', self.rssi, ' , euclid: ', self.euclid, ' , label: ', self.label)
class Feature(nn.Module): def __init__(self): super(Feature, self).__init__() self.conv1 = nn.Conv2d(3, 96, kernel_size=5, stride=1, padding=2) self.bn1 = nn.BatchNorm2d(96) self.conv2 = nn.Conv2d(96, 144, kernel_size=3, stride=1, padding=1) self.bn2 = nn.BatchNorm2d(144) ...
def test_jax_scvi(n_latent=5): adata = synthetic_iid() JaxSCVI.setup_anndata(adata, batch_key='batch') model = JaxSCVI(adata, n_latent=n_latent) model.train(2, train_size=0.5, check_val_every_n_epoch=1) model.get_latent_representation() model = JaxSCVI(adata, n_latent=n_latent, gene_likelihood='...
class RandomShortAcrobot(ModifiableAcrobotEnv): def __init__(self): super(RandomShortAcrobot, self).__init__() self.length = uniform_exclude_inner(self.np_random.uniform, self.EXTREME_LOWER_LENGTH, self.EXTREME_UPPER_LENGTH, self.RANDOM_LOWER_LENGTH, self.RANDOM_UPPER_LENGTH) def reset(self, new...
_utils.test() def test_nested_kernel_error(): def B(): pass def A(): B() with pytest.raises(ti.TaichiCompilationError): A()
def je(template, **kwargs): if (template and (template[0] == '\n')): template = template[1:] if (template and (template[(- 1)] == '\n')): template = (template + '\n') tmpl = JINJA_ENV.from_string(template) return tmpl.render(kwargs)
def test_option_array_integer(): result = ak.operations.from_json(' [ [ 1 ,2,3 ],null,[ ], [4, 5]]', schema={'type': 'array', 'items': {'type': ['null', 'array'], 'items': {'type': 'integer'}}}) assert (result.to_list() == [[1, 2, 3], None, [], [4, 5]]) assert (str(result.type) == '4 * option[var * int64]')...
def get_just_x_or_y_train_dev_dataset(just, DATA_DIR, **kw): train_ds = load_and_cache_examples_just_x_or_y(just=just, DATA_DIR=DATA_DIR, evaluate=False, output_examples=False, **kw) print('squad', 'version_2_with_negative', kw['version_2_with_negative']) (dev_ds, examples, features) = load_and_cache_exampl...
def test_constructor_mutate_parameter_choose_none(constructor_mock, default_test_case): float0 = stmt.FloatPrimitiveStatement(default_test_case, 5.0) const = stmt.ConstructorStatement(default_test_case, constructor_mock, {'a': float0.ret_val}) default_test_case.add_statement(float0) default_test_case.ad...
def find_indices_loader(loader, n_shots, n_classes): per_label_indices = defaultdict(partial(deque, maxlen=n_shots)) for (ibatch, (indices, (images, labels))) in enumerate(AddIndexIter(loader)): for (idx, lbl) in zip(indices, labels): per_label_indices[lbl.item()].append(idx) fin...
class FlavaTextModel(metaclass=DummyObject): _backends = ['torch'] def __init__(self, *args, **kwargs): requires_backends(self, ['torch'])
def test_arraytype_17(): text = str(ak.Array([1, 2, 3, None, [], [], []]).type) parsedtype = ak.types.from_datashape(text, highlevel=False) assert (str(parsedtype) == text)
def male_author_sources(args: Dict[(str, Any)]) -> List[object]: query = [{'$match': {'body': {'$ne': ''}, 'quotesUpdated': {'$exists': True}, 'outlet': {'$in': args['outlets']}, 'publishedAt': {'$gte': args['begin_date'], '$lt': (args['end_date'] + timedelta(days=1))}, 'authorsFemaleCount': 0, 'authorsMaleCount': ...
class LargeMatrixHelpRepr(ObjectReprABC): def __call__(self, obj, p, cycle): if (not p.toplevel()): return False from sage.structure.element import Matrix if (not isinstance(obj, Matrix)): return False from sage.matrix.constructor import options if ((o...
def _replsym(symlist, symrepl): if (symlist is None): return None if isinstance(symlist, (symbolic.SymExpr, symbolic.symbol, sp.Basic)): return _internal_replace(symlist, symrepl) for (i, dim) in enumerate(symlist): try: symlist[i] = tuple((_internal_replace(d, symrepl) f...
class Pix2PixPro(BaseModel): def name(self): return 'Pix2PixPro' def initialize(self, opt): BaseModel.initialize(self, opt) self.net_g = define_G(netG=opt.netG, gpu_id=opt.gpu_ids, param_rate=opt.param_rate) self.net_d = define_D(netD='multi_scale', gpu_id=opt.gpu_ids) se...
def patch_replication_callback(data_parallel): assert isinstance(data_parallel, DataParallel) old_replicate = data_parallel.replicate (old_replicate) def new_replicate(module, device_ids): modules = old_replicate(module, device_ids) execute_replication_callbacks(modules) return m...
class SlateIndependentIPS(BaseSlateInverseProbabilityWeighting): estimator_name: str = 'iips' def estimate_policy_value(self, slate_id: np.ndarray, reward: np.ndarray, position: np.ndarray, pscore_item_position: np.ndarray, evaluation_policy_pscore_item_position: np.ndarray, **kwargs) -> float: check_ii...
def find_checkpoint(dir, restore_epochs, epochs, rec, best=0): if best: for (r, d, f) in os.walk(dir): for file in f: if ('best-weights-'.format(restore_epochs) in file): return (dir + file.split('.')[0]) return '' if ((rec == 'apr') and (restore_e...
def get_basic_timestamp_embeddings(audio, model): model.eval() with torch.no_grad(): return model.get_timestamp_embeddings(audio)
def _add_test(paramSetting, methodname): def test_method(self): if paramSetting: opt = copy.deepcopy(self.opt) for (param, setting) in paramSetting: setattr(opt, param, setting) else: opt = self.opt getattr(self, methodname)(opt) if par...
class ResNet(nn.Module): def __init__(self, block, layers, output_stride, BatchNorm, nInputChannels=3, pretrained=False): self.inplanes = 64 super(ResNet, self).__init__() blocks = [1, 2, 4] if (output_stride == 16): strides = [1, 2, 2, 1] dilations = [1, 1, 1...
def test_pretrained_tokenizer_gpt2(): (tokenizer, model_config) = get_tokenizer(pretrained_tokenizer='gpt2', tokenizer_class=None, vocab_file=None, merges_file=None, special_tokens_dict=None) assert (type(tokenizer) == GPT2TokenizerFast) assert (type(model_config) == GPT2Config)
class sage_build_ext(build_ext): def finalize_options(self): build_ext.finalize_options(self) self.check_flags() def run(self): self.run_command('build_cython') build_ext.run(self) def check_flags(self): forbidden = None if (os.environ.get('SAGE_FAT_BINARY') =...
def opencv_distortion(camera, x): x_sq = np.square(x) xy = np.prod(x, axis=(- 1), keepdims=True) r_sq = x_sq.sum(axis=(- 1), keepdims=True) return ((x * (1.0 + (r_sq * (camera.k1 + (camera.k2 * r_sq))))) + np.concatenate(((((2.0 * camera.p1) * xy) + (camera.p2 * (r_sq + (2.0 * x_sq)))), ((camera.p1 * (r...
class EarlyStopping(): def __init__(self, checkpoint_path, patience=7, verbose=False, delta=0): self.checkpoint_path = checkpoint_path self.patience = patience self.verbose = verbose self.counter = 0 self.best_score = None self.early_stop = False self.delta = ...
_BOX_FEATURE_EXTRACTORS.register('FPNXconv1fcFeatureExtractor') class FPNXconv1fcFeatureExtractor(nn.Module): def __init__(self, cfg, in_channels): super(FPNXconv1fcFeatureExtractor, self).__init__() resolution = cfg.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION scales = cfg.MODEL.ROI_BOX_HEAD.POOLER...
def register_Ns3ConnectionManager_methods(root_module, cls): cls.add_constructor([param('ns3::ConnectionManager const &', 'arg0')]) cls.add_constructor([]) cls.add_method('AddConnection', 'void', [param('ns3::Ptr< ns3::WimaxConnection >', 'connection'), param('ns3::Cid::Type', 'type')]) cls.add_method('...
.parametrize('duplicate', [10000, 100000]) def test_balanced_weighted_sampler(duplicate: int): labels = ['a', 'a', 'b', 'a'] batch_size = 5 prev_diff_ratio = 1.0 sampler = BalancedWeightedSampler(labels, batch_size=batch_size, duplicate=duplicate, seed=0) indices = list(sampler) assert (len(indi...
def register_Ns3SixLowPanNhcExtension_methods(root_module, cls): cls.add_output_stream_operator() cls.add_constructor([param('ns3::SixLowPanNhcExtension const &', 'arg0')]) cls.add_constructor([]) cls.add_method('CopyBlob', 'uint32_t', [param('uint8_t *', 'blob'), param('uint32_t', 'size')], is_const=Tr...
class ATTFST(): def __init__(self, attfile, epsilon_symbol=u'', identity_symbol=u'_IDENTITY_SYMBOL_', unknown_symbol='_UNKNOWN_SYMBOL_'): self.epsilon_symbol = epsilon_symbol self.identity_symbol = identity_symbol self.unknown_symbol = unknown_symbol try: lines = [line.rs...
class AlmostAccuracy(): def __init__(self, thresh=0.25): self.thresh = thresh def __call__(self, eval_pred): (predictions, labels) = eval_pred true = (np.abs((predictions - labels)) <= self.thresh) return {'accuracy': true.astype(np.float32).mean().item()}
def compute_feature_stats_for_generator(opts, detector_url, detector_kwargs, rel_lo=0, rel_hi=1, batch_size=8, data_loader_kwargs=None, max_items=None, batch_gen=None, **stats_kwargs): dataset = dnnlib.util.construct_class_by_name(**opts.dataset_kwargs) if (data_loader_kwargs is None): data_loader_kwarg...
def test_psi_plus_phi_minus(): for i in range(200): (k1, k2, k3, k4, a3) = create_scenario(psi_plus, phi_minus, i) state = correct_order(k1.state, k1.keys) if (a3.msg_log[0][2].meas_res == [0, 0]): assert numpy.array_equal(state, [0, (- (0.5 ** 0.5)), (0.5 ** 0.5), 0]) el...
.parametrize('func, mock', zip(plan_funcs, plan_mocks)) def test_backend_plan(func, mock): x = np.arange(20).reshape((10, 2)) with pytest.raises(NotImplementedError, match='precomputed plan'): func(x, plan='foo') with set_backend(mock_backend, only=True): mock.number_calls = 0 y = fu...
def _add_image_summaries(): images = tf.get_collection('train_images') for im in images: tensor_name = re.sub('tower_[0-9]*/', '', im.op.name) tf.summary.image(tensor_name, im)
def _decode_and_center_crop(image_bytes: tf.Tensor, image_size: int=224, jpeg_shape: Optional[tf.Tensor]=None) -> tf.Tensor: if (jpeg_shape is None): jpeg_shape = tf.image.extract_jpeg_shape(image_bytes) image_height = jpeg_shape[0] image_width = jpeg_shape[1] padded_center_crop_size = tf.cast((...
def print_compatibility_error(language): from snips_nlu.cli.utils import PrettyPrintLevel, pretty_print pretty_print("Language resources for '{lang}' could not be loaded.\nYou may have to download resources again using 'python -m snips_nlu download {lang}'".format(lang=language), 'This can happen when you updat...
class FromRCIsomorphism(Morphism): def _repr_type(self): return 'Crystal Isomorphism' def __invert__(self): return FromTableauIsomorphism(Hom(self.codomain(), self.domain())) def _call_(self, x): lam = [(sum(nu) + 1) for nu in x] ct = self.domain().cartan_type() I = c...
def resblock_up_condition(x_init, z, channels, use_bias=True, is_training=True, sn=False, scope='resblock_up'): with tf.variable_scope(scope): with tf.variable_scope('res1'): x = condition_batch_norm(x_init, z, is_training) x = relu(x) x = deconv(x, channels, kernel=3, st...
class FlaubertModel(metaclass=DummyObject): _backends = ['torch'] def __init__(self, *args, **kwargs): requires_backends(self, ['torch'])
class KaldiRecognizer(object): thisown = property((lambda x: x.this.own()), (lambda x, v: x.this.own(v)), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args): _vosk.KaldiRecognizer_swiginit(self, _vosk.new_KaldiRecognizer(*args)) __swig_destroy__ = _vosk.delete_KaldiRecogn...
class CausalLoraKbitModel(CausalLoraModel): def __init__(self, engine: str, weights_path: Optional[str]=None, model_name: Optional[str]=None, target_modules: Optional[List[str]]=None, **kwargs): assert_not_cpu_int8() super().__init__(engine, weights_path=weights_path, model_name=model_name, target_m...
class Normal(ExponentialFamily): arg_constraints = {'loc': constraints.real, 'scale': constraints.positive} support = constraints.real has_rsample = True _mean_carrier_measure = 0 def mean(self): return self.loc def stddev(self): return self.scale def variance(self): ...
_lr_scheduler('inverse_sqrt') class InverseSquareRootSchedule(FairseqLRScheduler): def __init__(self, args, optimizer): super().__init__(args, optimizer) if (len(args.lr) > 1): raise ValueError('Cannot use a fixed learning rate schedule with inverse_sqrt. Consider --lr-scheduler=fixed in...
def run_pool(poolsize, chunksize): db_client = utils.init_client(MONGO_ARGS) new_col = db_client[DB_NAME][NEW_COL] new_old_ids = list(new_col.find({}, {'_id': 1, 'currentId': 1})) print('Obtained ID list of length {}.'.format(len(new_old_ids))) pool = Pool(processes=poolsize) pool.map(parse_chun...
class SentenceRE(nn.Module): def __init__(self): super().__init__() def infer(self, item): raise NotImplementedError
class TabRegrStrategy(BaseSupervisedLearningStrategy): def __init__(self, estimator, name=None, check_input=True): self._case = 'CSR' self._name = name super().__init__(estimator=estimator, check_input=check_input) def save(self, path): dump(self, path)
class ContextualLexer(Lexer): def __init__(self, conf, states, always_accept=()): terminals = list(conf.tokens) tokens_by_name = {} for t in terminals: assert (t.name not in tokens_by_name), t tokens_by_name[t.name] = t trad_conf = copy(conf) trad_conf...
def calc_blob_blob_forces_pycuda(r_vectors, *args, **kwargs): number_of_blobs = np.int32(len(r_vectors)) (threads_per_block, num_blocks) = set_number_of_threads_and_blocks(number_of_blobs) L = kwargs.get('periodic_length') eps = kwargs.get('repulsion_strength') b = kwargs.get('debye_length') blo...
def report_prf(tp, fp, fn, phase, logger=None, return_dict=False): precision = ((tp / (tp + fp)) if ((tp + fp) > 0) else 0) recall = ((tp / (tp + fn)) if ((tp + fn) > 0) else 0) if ((precision + recall) == 0): f1_score = 0 else: f1_score = ((2 * (precision * recall)) / (precision + recal...
def test_validate_lengths_equal(): annotations.validate_lengths_equal([np.array([0, 1])]) annotations.validate_lengths_equal([np.array([0, 1]), np.array([[0, 1, 2], [0, 2, 3]])]) annotations.validate_lengths_equal([np.array([]), None]) with pytest.raises(ValueError): annotations.validate_lengths...
def load_examples_rte(path): data = [] with open(path) as f: for line in f: data += [json.loads(line)] examples = [] label2synonym = {0: [' true'], 1: [' false']} hypotheses = [' true', ' false'] for d in data: premise = f''' {d['premise']} question: {d['hypothesis']...
def extract(): args = ArgumentParser().parse_args() if (not os.path.exists(args.spatial)): os.makedirs(args.spatial) device = torch.device(('cuda:0' if torch.cuda.is_available() else 'cpu')) transform = transforms.Compose([transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor()...
def test_count_complex(): content2 = ak.contents.NumpyArray(np.array([(1.1 + 0.1j), 2.2, 3.3, 0.0, 2.2, 0.0, 0.0, 2.2, 0.0, 4.4])) offsets3 = ak.index.Index64(np.array([0, 3, 6, 10], dtype=np.int64)) depth1 = ak.contents.ListOffsetArray(offsets3, content2) assert (to_list(depth1) == [[(1.1 + 0.1j), (2.2...
class InventoryManagementSystemSetupAlert(VirtualFunctionTool): name = 'InventoryManagementSystemSetupAlert' summary = 'Set up an alert for a specific item or category when the stock reaches a certain threshold.' parameters: List[ArgParameter] = [{'name': 'alert_id', 'type': 'string', 'description': 'The un...
def segments_to_sequence_example(segments, label): raw_segments = [np.array(segment, dtype=np.float32).reshape((- 1)).tostring() for segment in segments] raw_label = np.array(label, dtype=np.uint8).reshape((- 1)).tostring() sequence_example = tf.train.SequenceExample(context=tf.train.Features(feature={'labe...
def test_partial_examples(empty_open_api_3_schema): empty_open_api_3_schema['paths'] = {'/test/{foo}/{bar}/': {'post': {'parameters': [{'name': 'foo', 'in': 'path', 'required': True, 'schema': {'type': 'string', 'enum': ['A']}}, {'name': 'bar', 'in': 'path', 'required': True, 'schema': {'type': 'string', 'example':...
def synthetic_iid(batch_size: int=200, n_genes: int=100, n_proteins: int=100, n_regions: int=100, n_batches: int=2, n_labels: int=3, dropout_ratio: float=0.7, sparse_format: (str | None)=None, return_mudata: bool=False) -> AnnOrMuData: if (n_batches < 1): raise ValueError('`n_batches` must be greater than 0...
def _as_pred_data(x, nb_nodes, seed, batch_axis): key = jax.random.PRNGKey(seed) data = jax.random.permutation(key, x.data, axis=batch_axis) if (x.type_ == specs.Type.POINTER): return jax.nn.one_hot(data, nb_nodes) return data
class BaseObjectiveFunction(BaseSimPEG): map_class = IdentityMap def __init__(self, nP=None, mapping=None, has_fields=False, counter=None, debug=False): self._nP = nP if (mapping is None): self._mapping = mapping else: self.mapping = mapping self.counter =...
class JsonlDataset(Dataset): def __init__(self, data_dir): self._split_filenames = {} for filename in glob(f'{data_dir}/*.jsonl'): split = filename.split('/')[(- 1)][:(- len('.jsonl'))] self._split_filenames[split] = filename def __getitem__(self, split): examples...
def _align_output_features_output_indices(out_features: Optional[List[str]], out_indices: Optional[Union[(List[int], Tuple[int])]], stage_names: List[str]): if ((out_indices is None) and (out_features is None)): out_indices = [(len(stage_names) - 1)] out_features = [stage_names[(- 1)]] elif ((ou...
_utils.test() def test_assignment_in_nested_loops(): m = ti.field(ti.f32, 3) x = ti.field(ti.f32, ()) def func(): a = x[None] for i in m: b = a for j in range(1): b = b x[None] = b x[None] = 1 func() assert (x[None] == 1)
class HDC(): def __init__(self, dc): self.dc = dc def __int__(self): return self.dc
class OptimRegime(object): def __init__(self, params, regime): self.optimizer = torch.optim.SGD(params, lr=0) self.regime = regime self.current_regime_phase = None self.setting = {} def update(self, epoch, train_steps): if (self.regime is None): return ...
(Output('whatif-explanation-state', 'data'), [Input('select-instance-whatif', 'value'), Input('first-instance-set-btn', 'n_clicks'), Input('first-instance-reset-btn', 'n_clicks'), Input('second-instance-set-btn', 'n_clicks'), Input('second-instance-reset-btn', 'n_clicks'), Input('whatif-run-btn', 'n_clicks')], [State('...
_args('v', 'v', 'v', 'is', 'is', 'is', 'i') def conv2d(g, input, weight, bias, stride, padding, dilation, groups): return _convolution(g, input, weight, bias, stride, padding, dilation, False, (), groups, None, None, None, None)
def save_figures(data, source, model_version, filter, suffix, k=10): results = data['results'] df = pd.DataFrame(results) indirect_by_head = np.stack(df['indirect_effect_head'].to_numpy()) direct_by_head = np.stack(df['direct_effect_head'].to_numpy()) mean_indirect_by_head = indirect_by_head.mean(ax...
def order_p_automorphisms(rational_function, pre_image): if rational_function.parent().is_field(): K = rational_function.parent() R = K.ring() else: R = rational_function.parent() K = R.fraction_field() z = R.gen(0) phi = K(rational_function) F = R.base_ring() q =...
def linprog(c, A_ub=None, b_ub=None, A_eq=None, b_eq=None, bounds=None, method='interior-point', callback=None, options=None, x0=None): meth = method.lower() if ((x0 is not None) and (meth != 'revised simplex')): warning_message = "x0 is used only when method is 'revised simplex'. " warn(warning...
_utils.test(arch=[ti.cuda]) def test_large_shared_array(): if (ti.lang.impl.get_cuda_compute_capability() < 86): pytest.skip('Skip the GPUs prior to Ampere') block_dim = 128 nBlocks = 64 N = (nBlocks * block_dim) v_arr = np.random.randn(N).astype(np.float32) d_arr = np.random.randn(N).as...
def train_one_epoch(model, optimizer, train_loader, model_func, lr_scheduler, accumulated_iter, optim_cfg, rank, tbar, total_it_each_epoch, dataloader_iter, tb_log=None, leave_pbar=False, logger=None, log_interval=100, loss_record=None): if (total_it_each_epoch == len(train_loader)): dataloader_iter = iter(...
def used_vars_set(l, bound=None): if (not l): return BooleConstant(1) s = set() for p in l: s.update(Polynomial(p).vars_as_monomial().variables()) if (bound and (len(s) > bound)): break sorted_s = sorted(list(s), key=top_index, reverse=True) m = Monomial(next(iter...