code
stringlengths
101
5.91M
def create_json(wav_lst, json_file, clean_folder, txt_folder, lexicon): logger.debug(f'Creating json lists in {json_file}') json_dict = {} for wav_file in wav_lst: (noisy_path, filename) = os.path.split(wav_file) (_, noisy_dir) = os.path.split(noisy_path) (_, clean_dir) = os.path.spl...
def decode_generate_ids(tokenizer: PreTrainedTokenizer, ids: torch.Tensor) -> Union[(List[str], str)]: assert (ids.ndim in [1, 2]) only_one_sentence = (ids.ndim == 1) if only_one_sentence: ids = ids.unsqueeze(0) ids = post_process_generate_ids(tokenizer, ids) res = tokenizer.batch_decode(ids...
def res_sparse_dim(a: Tensor, b: Tensor) -> Optional[Dim]: if (a.sparse_dim and (not b.sparse_dim)): return a.sparse_dim if (b.sparse_dim and (not a.sparse_dim)): return b.sparse_dim if (a.sparse_dim and b.sparse_dim and (a.sparse_dim == b.sparse_dim)): return a.sparse_dim return...
class JSONWriter(EventWriter): def __init__(self, json_file, window_size=20): self._file_handle = PathManager.open(json_file, 'a') self._window_size = window_size def write(self): storage = get_event_storage() to_save = {'iteration': storage.iter} to_save.update(storage.l...
def dict_append(d_out, d_in): for (k, v) in d_in.items(): if (k not in d_out): d_out[k] = [] if isinstance(v, list): d_out[k] = (d_out[k] + v) else: d_out[k].append(v)
def flash_feather_m0(port, fwname): fwdir = os.path.join('/opt/chirpotle/firmwares', fwname) cmd_reset = ['stty', '-F', port, 'raw', 'ispeed', '1200', 'ospeed', '1200', 'cs8', '-cstopb', 'ignpar', 'eol', '255', 'eof', '255'] cmd_flash = ['/opt/bossa/bin/bossac', '-p', port, '-o', '0x2000', '-e', '-i', '-w',...
class RansomwareService(Service): def __init__(self): super().__init__() self.addDependency('Base', False, False) def _createServer(self) -> Server: return RansomwareServer() def getName(self) -> str: return 'RansomwareService' def print(self, indent: int) -> str: ...
def connect_(pairs, n=1): if (len(pairs) == 0): return [] (start_, end_) = pairs[0] new_pairs = [] for (i, (next_item, cur_item)) in enumerate(zip(pairs[1:], pairs[0:])): end_ = next_item[1] if ((next_item[0] - cur_item[1]) <= n): pass else: new_pa...
class BoyerMooreSearch(): def __init__(self) -> None: super().__init__() def aux_get_suffix_prefix_length(self, i: int) -> int: j = (self.pattern_length - 1) while ((j >= i) and (self.pattern[j] == self.pattern[(j - i)])): j -= 1 return (self.pattern_length - (j - 1))...
def main(): input_shape = (3, 32, 32) visualisation_channels = [0, 1, 2] latent_dim = 1024 model = SimpleVAE(input_shape, latent_dim, visualisation_channels) print('Created model') from coversion_utils import save_model_json, load_model_json load_model_json(model, 'model_vae.json') model...
def tuples_to_lists(d): if isinstance(d, (tuple, list)): it = enumerate(d) elif isinstance(d, dict): it = d.items() for (k, v) in it: if isinstance(v, (dict, list)): tuples_to_lists(v) elif isinstance(v, tuple): d[k] = list(v) tuples_to_lis...
class ThroughputSumWithPerf(Policy): def __init__(self, solver, num_threads=None): self._name = 'ThroughputSumWithPerf' self._policy = ThroughputNormalizedByCostSumWithPerfSLOs(solver, num_threads=num_threads) def get_allocation(self, unflattened_throughputs, scale_factors, cluster_spec): ...
def _repo_remote_url(repo): p = repo.find('/') assert (p >= 0) (host, path) = (repo[:p], repo[p:]) return (' % (host, path))
def openai_chat_completion_with_backoff(**kwargs): global total_cost ret = client.chat.completions.create(**kwargs) total_tokens = ret.usage.total_tokens total_cost += ((total_tokens / 1000) * _model_name_to_cost(kwargs['model'])) print(ret) return ret.choices[0].message.content
def random_string(stringLength=6): lettersAndDigits = (string.ascii_letters + string.digits) return ''.join((random.choice(lettersAndDigits) for i in range(stringLength)))
class DHCPServer(Server): __node: Node __emulator: Emulator __name_servers: str __dhcp_start: int __dhcp_end: int __is_range_changed: bool def __init__(self): super().__init__() self.__name_servers = '#option domain-name-servers none;' self.__is_range_changed = False ...
def resnet1202_cifar(**kwargs): model = ResNet_Cifar(BasicBlock, [200, 200, 200], **kwargs) return model
def get_parser(): def str2bool(v): if isinstance(v, bool): return v if (v.lower() in ('yes', 'true', 't', 'y', '1')): return True elif (v.lower() in ('no', 'false', 'f', 'n', '0')): return False else: raise argparse.ArgumentTypeError('B...
class ClassificationDataset(Dataset): def before_load(self): self.max_len = min(self.max_len, self.tokenizer.max_len_sentences_pair) self.labels = self.get_labels() self.label2id = {label: idx for (idx, label) in enumerate(self.labels)} def nb_labels(cls) -> int: return len(cls.g...
def make_samples(x, augment): x_pos = torch.cat((x[0], x[1]), dim=1) x_neg = torch.cat((x[0], x[2]), dim=1) if augment: x_pos2 = torch.cat((x[1], x[0]), dim=1) x_neg2 = torch.cat((x[1], x[2]), dim=1) x_pos = torch.cat((x_pos, x_pos2), dim=0) x_neg = torch.cat((x_neg, x_neg2),...
def macro_call_gen(name, params={}): macro_call = (' <xacro:%s ' % name) endline = '/>\n' insert = [] for i in params: if (i[:3] == '/**'): endline = '>\n' insert.append(i[3:]) else: macro_call += ('%s="%s" ' % (i, str(params[i]))) macro_call += e...
class TagUUID(JSONTag): __slots__ = () key = ' u' def check(self, value): return isinstance(value, UUID) def to_json(self, value): return value.hex def to_python(self, value): return UUID(value)
def glove_init(input, output, concept_file): embeddings_file = (output + '.npy') vocabulary_file = (output.split('.')[0] + '.vocab.txt') output_dir = '/'.join(output.split('/')[:(- 1)]) output_prefix = output.split('/')[(- 1)] words = [] vectors = [] vocab_exist = check_file(vocabulary_file)...
def render_blocks(blocks, base_pos=5): block_lst = [Block(position=Point(x=i, y=(k + 5), z=j), type=block_type, orientation=NORTH) for ((i, j, k), block_type) in blocks.items()] CLIENT.spawnBlocks(Blocks(blocks=block_lst))
_utils.test(arch=get_host_arch_list()) def test_hook(): _oriented class Solver(): def __init__(self, n, m, hook): self.val = ti.field(ti.f32, shape=(n, m)) self.hook = hook def run_hook(self): self.hook(self.val) def hook(x: ti.template()): for (i,...
def true_divide(g, self, other): if (sym_help._is_fp(self) and sym_help._is_fp(other)): return g.op('Div', self, other) if sym_help._is_fp(self): other = g.op('Cast', other, to_i=sym_help.cast_pytorch_to_onnx[self.type().scalarType()]) return g.op('Div', self, other) if sym_help._is_...
def GetTestCases(tests): test_cases = [] for test in tests: test_case = test.split('.')[0] if (not (test_case in test_cases)): test_cases.append(test_case) return test_cases
class TFCvtForImageClassification(metaclass=DummyObject): _backends = ['tf'] def __init__(self, *args, **kwargs): requires_backends(self, ['tf'])
def test_BitMaskedArray_RecordArray_NumpyArray(): a = ak.contents.bitmaskedarray.BitMaskedArray(ak.index.Index(np.packbits(np.array([True, True, True, True, False, False, False, False, True, False, True, False, True]))), ak.contents.recordarray.RecordArray([ak.contents.numpyarray.NumpyArray(np.array([0.0, 1.0, 2.0,...
_utils.test(require=ti.extension.quant, debug=True) def test_quant_array_struct_for(): block_size = 16 N = 64 cell = ti.root.pointer(ti.i, (N // block_size)) qi7 = ti.types.quant.int(7) x = ti.field(dtype=qi7) cell.dense(ti.i, (block_size // 4)).quant_array(ti.i, 4, max_num_bits=32).place(x) ...
def main(): m = T5Corrector() error_sentences = ['', ',', '', ',', '', '', ':', ''] batch_res = m.correct_batch(error_sentences) for i in batch_res: print(i) print()
def test_move_out_container(): c = m.MoveOutContainer() moved_out_list = c.move_list assert ([x.value for x in moved_out_list] == [0, 1, 2])
def r_percept3(t): actor = t[1] def fn(world, n): if (n > MAX_FUNC_CALL): return (world, n, False, False) c = world.is_there(actor()) return (world, n, True, c) return [('percept', fn)]
def apply_regular_attention(model, encoder_output_dim, encoder_outputs_transposed, weighted_encoder_outputs, decoder_hidden_state_t, decoder_hidden_state_dim, scope, encoder_lengths=None): weighted_decoder_hidden_state = _apply_fc_weight_for_sum_match(model=model, input=decoder_hidden_state_t, dim_in=decoder_hidden...
def _tf_device(device_option): if (not device_option.HasField('device_type')): return '' if (device_option.device_type == caffe2_pb2.CPU): return '/cpu:*' if (device_option.device_type == caffe2_pb2.CUDA): return '/gpu:{}'.format(device_option.device_id) raise Exception('Unhandle...
class Dictionary(object): def __init__(self, pad='<pad>', eos='</s>', unk='<unk>', bos='<s>', extra_special_symbols=None): (self.unk_word, self.pad_word, self.eos_word) = (unk, pad, eos) self.symbols = [] self.count = [] self.indices = {} self.bos_index = self.add_symbol(bos)...
_processor(name=LEMMA) class LemmaProcessor(UDProcessor): PROVIDES_DEFAULT = set([LEMMA]) REQUIRES_DEFAULT = set([TOKENIZE]) DEFAULT_BATCH_SIZE = 5000 def __init__(self, config, pipeline, device): self._use_identity = None self._pretagged = None super().__init__(config, pipeline,...
def born_rec(model, src_coords, wavelet, rec_coords, space_order=8, ic='as', f0=0.015, illum=False, fw=True): (rec, _, I, _) = born(model, src_coords, rec_coords, wavelet, save=False, space_order=space_order, ic=ic, f0=f0, illum=illum, fw=fw) return (rec.data, getattr(I, 'data', None))
def AUC_calc(item, TPR): try: return ((item + TPR) / 2) except TypeError: return 'None'
def compute_kernel(x, y): x_size = x.size(0) y_size = y.size(0) dim = x.size(1) x = x.unsqueeze(1) y = y.unsqueeze(0) tiled_x = x.expand(x_size, y_size, dim) tiled_y = y.expand(x_size, y_size, dim) kernel_input = ((tiled_x - tiled_y).pow(2).mean(2) / float(dim)) return torch.exp((- k...
class _OGD(): def __init__(self, t, scale, alpha, yhat_0, g=8): self.scale = scale self.base_lr = (scale / np.sqrt(3)) self.alpha = alpha self.yhat = yhat_0 self.grad_norm = 0 u = 0 while ((t % 2) == 0): t /= 2 u += 1 self.lifet...
class EncoderDecoderConfig(PretrainedConfig): model_type = 'encoder-decoder' is_composition = True def __init__(self, **kwargs): super().__init__(**kwargs) assert (('encoder' in kwargs) and ('decoder' in kwargs)), 'Config has to be initialized with encoder and decoder config' encoder...
class LoadData(file_load.Load): def __call__(self, file_name: str, id_: str, category: str, subject_id: str) -> typing.Tuple[(np.ndarray, typing.Union[(conv.ImageProperties, None)])]: if (id_ == FileTypes.AGE.name): with open(file_name, 'r') as f: value = np.asarray([int(f.readli...
class TinyNetworkDarts(nn.Module): def __init__(self, C, N, max_nodes, num_classes, search_space, affine, track_running_stats): super(TinyNetworkDarts, self).__init__() self._C = C self._layerN = N self.max_nodes = max_nodes self.stem = nn.Sequential(nn.Conv2d(1, C, kernel_si...
def _giac_solver(f, x, solution_dict=False): from sage.libs.giac.giac import libgiac giac_f = libgiac(f) giac_vars = libgiac(x) ret = giac_f.solve(giac_vars) sols = ret.sage() if solution_dict: if (not sols): return [] if isinstance(sols[0], list): return ...
def test_get_init_msa_lowercase(msa_sampler): seed = ['aaa', 'aCC', 'aCDE'] result = msa_sampler.get_init_msa(seed, 5, 2) assert (result[0][0].tolist() == [0, 5, 5, 5, 32, 32]) assert (result[0][1].tolist() == [0, 5, 23, 23, 32, 32]) assert (result[0][2].tolist() == [0, 5, 23, 13, 9, 32])
def input_fn(features, vocab_filename, norm_filename=None): def gen(): for item in features: (yield item) output_shapes = tf.TensorShape([None, features[0].shape[(- 1)]]) dataset = tf.data.Dataset.from_generator(gen, tf.float32, output_shapes) vocab_table = utils.create_vocab_table(v...
def restart_from_checkpoint(ckp_path, run_variables=None, **kwargs): if (not os.path.isfile(ckp_path)): return print('Found checkpoint at {}'.format(ckp_path)) checkpoint = torch.load(ckp_path, map_location='cpu') for (key, value) in kwargs.items(): if ((key in checkpoint) and (value is ...
def multiply_grads(params, c): for p in params: if (p.grad is not None): if torch.is_tensor(c): c = c.to(p.grad.device) p.grad.data.mul_(c)
def test(): net = Elliott_GoogLeNet() x = torch.randn(1, 3, 32, 32) y = net(x) print(y.size())
def save_latents(H, train_latent_ids, val_latent_ids): save_dir = 'latents/' os.makedirs(save_dir, exist_ok=True) latents_fp_suffix = ('_flipped' if H.horizontal_flip else '') train_latents_fp = f'latents/{H.dataset}_{H.latent_shape[(- 1)]}_train_latents{latents_fp_suffix}' val_latents_fp = f'latent...
def flip_gradient(x, lam=1.0): positive_path = tf.stop_gradient((x * tf.cast((1 + lam), tf.float32))) negative_path = ((- x) * tf.cast(lam, tf.float32)) return (positive_path + negative_path)
class _TensorOpOverloadsMixin(_TensorMixinBase): def __eq__(self: Tensor, other: Union[(_rf_types.RawTensorTypes, Tensor)]) -> Union[(Tensor, bool)]: if (self.raw_tensor is None): return False import returnn.frontend as rf valid_types = ((rf.Tensor, self._raw_backend.RawTensorTyp...
def visualize_search_doc(doc, semgrex_queries, lang_code, start_match=0, end_match=10): matches_count = 0 with Semgrex(classpath='$CLASSPATH') as sem: edited_html_strings = [] semgrex_results = sem.process(doc, *semgrex_queries) unedited_html_strings = get_sentences_html(doc, lang_code) ...
class ClassCellNode(ExprNode): subexprs = [] is_temp = True is_generator = False type = py_object_type def analyse_types(self, env): return self def generate_result_code(self, code): if (not self.is_generator): code.putln(('%s = __Pyx_CyFunction_GetClassObj(%s);' % (s...
class CiderScorer(object): def copy(self): new = CiderScorer(n=self.n) new.ctest = copy.copy(self.ctest) new.crefs = copy.copy(self.crefs) return new def copy_empty(self): new = CiderScorer(df_mode='corpus', n=self.n, sigma=self.sigma) new.df_mode = self.df_mode ...
class Downloader(Dataset): def __len__(self): return len(self.data) def __getitem__(self, i): (image, label) = self.data[i] if (not isinstance(self.data, DatasetMixIn)): image = self.transform(image) return (image, label) def extract_tar(tar_path, extract_dir=None...
def get_path_mask(masks_path, dataset_name, model_name, method): return (masks_path / Path('{}_{}_{}/'.format(dataset_name, model_name, method)))
def pad_random_crop(input_size, scale_size=None, normalize=__imagenet_stats): padding = int(((scale_size - input_size) / 2)) return transforms.Compose([transforms.RandomCrop(input_size, padding=padding), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(**normalize)])
class TestModelFromArtisDensity(): (autouse=True) def setup(self, example_model_file_dir, atomic_dataset): self.config = Configuration.from_yaml((example_model_file_dir / 'tardis_configv1_artis_density.yml')) self.simulation_state = SimulationState.from_config(self.config, atom_data=atomic_datas...
def related_to_test_list(file_name: str, test_list: TestList) -> bool: for test in test_list: if (test.name in file_name): return True return False
def _lookup_req_object(name): top = _request_ctx_stack.top if (top is None): raise RuntimeError(_request_ctx_err_msg) return getattr(top, name)
def save_model(iter): if (not os.path.exists('models/')): os.makedirs('models/') torch.save(genmodel.state_dict(), 'models/gen_model_{}'.format(iter))
def niggli_reduce(lattice, eps=1e-05): _set_no_error() niggli_lattice = np.array(np.transpose(lattice), dtype='double', order='C') result = spg.niggli_reduce(niggli_lattice, float(eps)) _set_error_message() if (result == 0): return None else: return np.array(np.transpose(niggli_l...
_criterion('masked_lm') class MaskedLmLoss(FairseqCriterion): def __init__(self, task, tpu=False): super().__init__(task) self.tpu = tpu def forward(self, model, sample, reduce=True): masked_tokens = sample['target'].ne(self.padding_idx) sample_size = masked_tokens.int().sum() ...
def init_logs(opt): log_dir = safe_path(os.path.join(opt.log_root, '{}_{}_data'.format(opt.domain_name, opt.task_name), 'exp_{}_{}_{}_{}'.format(opt.data_type1, opt.data_id1, opt.data_type2, opt.data_id2))) if opt.istrain: img_logs = safe_path(os.path.join(log_dir, 'train')) else: img_logs =...
def _defaultSuccessDebugAction(instring, startloc, endloc, expr, toks): print(((('Matched ' + _ustr(expr)) + ' -> ') + str(toks.asList())))
def get(identifier): if (identifier is None): return linear if isinstance(identifier, six.string_types): identifier = str(identifier) return deserialize(identifier) elif callable(identifier): if isinstance(identifier, Layer): warnings.warn('Do not pass a layer ins...
class ModuleTransfer(): src: nn.Module dest: nn.Module verbose: int = 1 src_skip: List = field(default_factory=list) dest_skip: List = field(default_factory=list) raise_if_mismatch: bool = True def __call__(self, x: Tensor): dest_traced = Tracker(self.dest)(x).parametrized sr...
def test_misspelling_perturbation(): data_augmenter = DataAugmenter(perturbations=[MisspellingPerturbation(prob=1.0)]) instance: Instance = Instance(id='id0', input=Input(text='Already, the new product is not available.'), references=[]) instances: List[Instance] = data_augmenter.generate([instance], includ...
class ContextNetBlock(torch.nn.Module): def __init__(self, out_channels, kernel_size, num_layers, inner_dim, input_shape, stride=1, beta=1, dropout=0.15, activation=Swish, se_activation=torch.nn.Sigmoid, norm=BatchNorm1d, residual=True): super().__init__() self.residual = residual self.Convs...
def create_search_engine(config): set_random_seed(config['random_seed']) if (config['out_dir'] is not None): config['pretrained'] = config['out_dir'] config['out_dir'] = None model_handle = ModelHandlerExtend(config) se = search_engine(model_handle=model_handle, config=config) querie...
def dataio_prep(hparams): label_encoder = sb.dataio.encoder.CategoricalEncoder() .data_pipeline.takes('wav') .data_pipeline.provides('sig') def audio_pipeline(wav): sig = sb.dataio.dataio.read_audio(wav) return sig .data_pipeline.takes('spk_id') .data_pipeline.provides('spk_id', ...
def post_process_partition(graph: Graph, edge_weight_function=None, verbose_on_error=True, assert_output_types=False, verbose_check_outputs=False) -> Graph: re_assign_partition_indices(graph) if has_stage_cycles(graph): if os.environ.get('DEBUG', False): graph.save_as_pdf(f'{graph.model_name...
class hardswish(ActivationFunctor): tag = 'cutlass::epilogue::thread::HardSwish' def numpy(x: np.ndarray): relu6 = np.minimum(np.maximum((x + 3.0), 0), 6.0) return ((x * relu6) / 6.0)
class Partition4(nn.Module): LAYER_SCOPES = ['BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/Linear[key]', 'BertForQuestionAnswering/BertModel[bert]/BertEncoder[encoder]/BertLayer[12]/BertAttention[attention]/BertSelfAttention[self]/Linea...
class CallableSymbolicExpressionRing_class(SymbolicRing, sage.rings.abc.CallableSymbolicExpressionRing): def __init__(self, arguments): self._arguments = arguments SymbolicRing.__init__(self, SR) self._populate_coercion_lists_(coerce_list=[SR]) self.symbols = SR.symbols def _coer...
def load_and_cache_examples(args, tokenizer, evaluate=False): if ((args.local_rank not in [(- 1), 0]) and (not evaluate)): torch.distributed.barrier() processor = PairProcessor() output_mode = 'classification' cached_features_file = os.path.join(args.data_dir, 'cached_{}_{}_{}_{}'.format(('dev' ...
def check_data_iterator_concat_result(di, batch_size, normalize, ds1, ds2, stop_exhausted): datalist = [] count = 0 for data in di: for i in range(batch_size): count += 1 if normalize: v1 = round((data[0][i].flatten()[0] * 256)) else: ...
('crf_tagger') class CrfTagger(Model): def __init__(self, vocab: Vocabulary, text_field_embedder: TextFieldEmbedder, encoder: Seq2SeqEncoder, label_namespace: str='labels', initializer: InitializerApplicator=InitializerApplicator(), regularizer: Optional[RegularizerApplicator]=None) -> None: super().__init_...
def perform_training(word2vec, TRAIN, DEV, out_size, num_epochs, batch_size): training_generator = torch.utils.data.DataLoader(TRAIN, batch_size=batch_size, drop_last=False, shuffle=True) dev_generator = torch.utils.data.DataLoader(DEV, batch_size=batch_size, drop_last=False, shuffle=True) model = BOWNetwor...
class Dataset(data.Dataset): def __init__(self, contentPath, stylePath, fineSize): super(Dataset, self).__init__() self.contentPath = contentPath self.image_list = [x for x in listdir(contentPath) if is_image_file(x)] self.stylePath = stylePath self.fineSize = fineSize ...
def register_Ns3TracedValue__Unsigned_char_methods(root_module, cls): cls.add_constructor([]) cls.add_constructor([param('ns3::TracedValue< unsigned char > const &', 'o')]) cls.add_constructor([param('unsigned char const &', 'v')]) cls.add_constructor([param('ns3::TracedValue< unsigned char > const &', ...
def MLP(order): model = cnn.CNNModelHelper() d = 256 depth = 20 width = 3 for i in range(depth): for j in range(width): current = ('fc_{}_{}'.format(i, j) if (i > 0) else 'data') next_ = 'fc_{}_{}'.format((i + 1), j) model.FC(current, next_, dim_in=d, dim_...
def test_integer_overlap_same_step_no_cover(): subset1 = Range.from_string('0:10:1') subset2 = Range.from_string('5:11:1') assert (subset1.covers_precise(subset2) is False) assert (subset2.covers_precise(subset1) is False) subset1 = Range.from_string('0:10:2') subset2 = Range.from_string('2:11:1...
class TFAutoModelForSequenceClassification(object): def __init__(self): raise EnvironmentError('TFAutoModelForSequenceClassification is designed to be instantiated using the `TFAutoModelForSequenceClassification.from_pretrained(pretrained_model_name_or_path)` or `TFAutoModelForSequenceClassification.from_co...
def unique_backward(grad_inputs, inputs, input_shapes, outputs, output_shapes, flatten=True, axis=None, sorted=True, with_index=False, with_inverse=False, with_counts=False): dy = grad_inputs[0] x0 = inputs[0] raise NotImplementedError('unique_backward is not implemented.')
def create_training_data_loader(dataset, batch_size, shuffle, collate_fn=None, num_workers=0, elapsed_iters=0): is_distributed = dist_utils.is_distributed() if is_distributed: sampler = CustomDistributedSampler(dataset, dist_utils.get_world_size(), dist_utils.get_rank(), shuffle) elif shuffle: ...
class VAE(nn.Module): def __init__(self, img_channels, latent_size, m): super(VAE, self).__init__() self.encoder = Encoder(img_channels, latent_size, m) self.decoder = Decoder(img_channels, latent_size, m) def forward(self, x): (mu, logsigma) = self.encoder(x) sigma = log...
def test_nan_values() -> None: with pytest.raises(ValueError, match='Array contains only NaN*'): check_array_nan(np.array([np.nan, np.nan, np.nan, np.nan]))
class ColorJitter(object): def __init__(self, brightness=0.4, contrast=0.4, saturation=0.4): self.brightness = brightness self.contrast = contrast self.saturation = saturation def __call__(self, img): self.transforms = [] if (self.brightness != 0): self.transf...
def expert_sharding_degree(args: Arguments) -> int: world_size = get_expert_parallel_world_size(args) esd = min(world_size, args.moe_num_experts) if ((args.moe_num_experts % esd) != 0): raise ValueError(f'Cannot shard {args.moe_num_experts} experts {esd} ways.') return esd
class UniformInitializer(BaseInitializer): def __init__(self, lim=((- 1), 1), rng=None): if (rng is None): rng = random.prng self.rng = rng self.lim = lim def __repr__(self): return '{}({})'.format(self.__class__.__name__, repr(self.lim)) def __call__(self, shape)...
class MaskHead(nn.Module): def __init__(self, hidden_dim, fpn_dims, num_frames): super().__init__() self.num_frames = num_frames self.lay1 = torch.nn.Conv2d(hidden_dim, hidden_dim, 3, padding=1) self.gn1 = torch.nn.GroupNorm(32, hidden_dim) self.lay2 = torch.nn.Conv2d(hidden_...
class ExportTestCallback(Callback): def __init__(self): self.test_outputs = [] def on_test_batch_end(self, trainer: pl.Trainer, pl_module: AnomalyModule, outputs: (STEP_OUTPUT | None), batch: Any, batch_idx: int, dataloader_idx: int): self.test_outputs.append(outputs)
def is_ckpt_epoch(cur_epoch): return ((((cur_epoch + 1) % cfg.train.ckpt_period) == 0) or ((cur_epoch + 1) == cfg.optim.max_epoch))
def pytest_runtest_logreport(report): if ((report.when == 'call') and (report.outcome == 'passed') and (report.nodeid in MARKS)): mark = MARKS[report.nodeid] _add_mark(mark, 'passed')
class DDIMScheduler(SchedulerMixin, ConfigMixin): _to_config def __init__(self, num_train_timesteps: int=1000, beta_start: float=0.0001, beta_end: float=0.02, beta_schedule: str='linear', trained_betas: Optional[np.ndarray]=None, timestep_values: Optional[np.ndarray]=None, clip_sample: bool=True, set_alpha_to_o...
class MedianAbsoluteDeviation(BaseModel): def __init__(self, absolute=True, b=1.4826): self.b = b self.absolute = absolute self.median_meter = MedianMeter() self.mad_meter = MedianMeter() def fit_partial(self, X, y=None): assert (len(X) == 1) self.median_meter.upd...
_utils.test(arch=[ti.cpu, ti.cuda]) def test_missing_return_annotation(): with pytest.raises(ti.TaichiCompilationError, match='return value must be annotated'): _func def add(a: ti.i32, b: ti.i32): return (a + b) def run(): add(30, 2) run()