code
stringlengths
101
5.91M
class ProgressMeter(object): def __init__(self, num_batches, meters, prefix='', fp=None): self.batch_fmtstr = self._get_batch_fmtstr(num_batches) self.meters = meters self.prefix = prefix self.fp = fp def display(self, batch): entries = [(self.prefix + self.batch_fmtstr.f...
class MOTDataReader(): def __init__(self, image_folder, detection_file_name): self.image_folder = image_folder self.detection_file_name = detection_file_name self.image_format = os.path.join(self.image_folder, '{0:06d}.jpg') self.detection = pd.read_csv(self.detection_file_name, head...
def check_response(response, response_len=1, semgrex_len=1, source_index=1, target_index=3, reln='obj'): assert (len(response.result) == response_len) assert (len(response.result[0].result) == semgrex_len) for semgrex_result in response.result[0].result: assert (len(semgrex_result.match) == 1) ...
class fc(nn.Module): def __init__(self, in_ch, out_ch): super(fc, self).__init__() self.fullc = nn.Sequential(nn.Linear(in_ch, out_ch), nn.ReLU(inplace=True)) def forward(self, x): x = self.fullc(x) return x
def message_lines(log_iterator): message_lines = [] while True: next_line = next(log_iterator, '').strip() if (not next_line): break message_lines.append(next_line) return ' '.join(message_lines)
def bleu_eval(refs, cands): print('calculating bleu_4 score...') (bleu, _) = bleu_scorer.compute_score(refs, cands) return bleu
class SummarizeJob(GenericJob): def __init__(self, problem): self.type = 'summarize' GenericJob.__init__(self, problem) self.resultsPerTipSubproblem = None def run(self): print(('Process [%s]: %s running %s with results from tips %s' % (os.getpid(), self.type, self.problem_name, ...
def _decode_block_str(block_str): assert isinstance(block_str, str) ops = block_str.split('_') block_type = ops[0] ops = ops[1:] options = {} noskip = False for op in ops: if (op == 'noskip'): noskip = True elif op.startswith('n'): key = op[0] ...
def parse_question(question_o, spans): spans = list(set(spans)) spans.sort(key=(lambda tup: tup[0])) cursor_idx = 0 (words, ye) = ([], []) for span in spans: (idx_start, idx_end, tag) = span space = text_to_word_sequence(question_o[cursor_idx:idx_start]) words.extend(space) ...
def test_dataset_warpper(): pipeline1 = [dict(type='LoadImageFromFile')] pipeline2 = [dict(type='LoadImageFromFile'), dict(type='ColorJitter')] img_prefix = 'tests/data/ocr_toy_dataset/imgs' ann_file = 'tests/data/ocr_toy_dataset/label.txt' train1 = dict(type='OCRDataset', img_prefix=img_prefix, ann...
class ImagenetData(Dataset): def __init__(self, data_dir=None): super(ImagenetData, self).__init__('imagenet', 300, 300, data_dir=data_dir) def num_examples_per_epoch(self, subset='train'): if (subset == 'train'): return IMAGENET_NUM_TRAIN_IMAGES elif (subset == 'validation')...
class CEM(RLAlgorithm, Serializable): def __init__(self, env, policy, n_itr=500, max_path_length=500, discount=0.99, init_std=1.0, n_samples=100, batch_size=None, best_frac=0.05, extra_std=1.0, extra_decay_time=100, plot=False, n_evals=1, **kwargs): Serializable.quick_init(self, locals()) self.env =...
def test(): net = PDELU_GoogLeNet() x = torch.randn(1, 3, 32, 32) y = net(x) print(y.size())
class TestBNInfoCollection(unittest.TestCase): def test_conv2d_bn_info_collection(self): input_shape = (8, 8, 3) in_model = create_model_1(input_shape) transformed_graph = prepare_graph(in_model) self.assertTrue((len(transformed_graph.find_node_by_name('conv2d_bn')) == 1)) co...
class PartialResRecon(nn.Module): def __init__(self, n_inputs: int=64, n_feats: int=64, n_outputs: int=3, kernel_size: int=3, depth: int=4) -> None: super().__init__() self.body = PartialResSeq(n_inputs=n_feats, n_feats=n_feats, n_outputs=n_feats, kernel_size=kernel_size, depth=depth) self.r...
class LoopScopeAttribute(): def __init__(self, is_static): self.is_static = is_static self.status = LoopStatus.Normal self.nearest_non_static_if = None
class EmptyDishwasher(Task): def init_task(self) -> None: success_detector = ProximitySensor('success') plate = Shape('dishwasher_plate') self.register_graspable_objects([plate]) self.register_success_conditions([DetectedCondition(plate, success_detector, negated=True)]) def init...
def standardise_blank_spaces(query): (in_squote, in_dquote) = (False, False) tmp_query = [] pos = 0 while (pos < len(query)): char = query[pos] pos += 1 if (char in ["'", '"']): if (not (in_squote or in_dquote)): tmp_query.append(' ') (in_s...
def filter_synthetic_queries(queries_dataset, document_index): total_filtered_questions = [] total_labels = [] queries_dataset = Dataset.from_pandas(queries_dataset) for i in tqdm(range(len(queries_dataset))): question = queries_dataset[i]['synthetic_query'] question_embedding = np.array...
def _update_adabelief(p, grad, m, s, s_max, t, alpha, beta1, beta2, eps, wd, amsgrad, weight_decouple, fixed_decay, rectify): beta1_t = (beta1 ** t) beta2_t = (beta2 ** t) bias_correction1 = (1.0 - beta1_t) bias_correction2 = np.sqrt((1.0 - beta2_t)) m[...] = ((beta1 * m) + ((1 - beta1) * grad)) ...
def trace(code, preparse=True): from IPython.core.debugger import Pdb pdb = Pdb() try: ipython = get_ipython() except NameError: raise NotImplementedError('the trace command can only be run from the Sage command-line') from sage.repl.preparse import preparse code = preparse(code)...
def train_eval_loop_trans(args, trans_fun, device, train_dataset, train_loader, optimizer_trans, old_embedding, new_embedding, time_train, time_val, writer, best_trans_fun_list): loss_fun = torch.nn.MSELoss() best_trans_fun = None best_loss = 1000 for epoch in range(1, (args.epochs + 1)): print(...
def run_decode(decoder_name_or_path: AnyPath, dataset_path='tatsu-lab/alpaca_farm', dataset_name: Optional[str]='alpaca_farm_evaluation', split='eval', prompt_dict_path=((pathlib.Path(__file__).parent / 'prompts') / 'v0_inputs_noinputs.json'), output_path: AnyPathOrNone=None, max_instances=sys.maxsize, per_device_batch...
class CategoricalLSTMPolicy(StochasticPolicy): def __init__(self, env_spec, name='CategoricalLSTMPolicy', hidden_dim=32, hidden_nonlinearity=tf.nn.tanh, hidden_w_init=tf.initializers.glorot_uniform(seed=deterministic.get_tf_seed_stream()), hidden_b_init=tf.zeros_initializer(), recurrent_nonlinearity=tf.nn.sigmoid, ...
def convert(in_file, out_file): in_state_dict = torch.load(in_file, map_location='cpu') out_state_dict = OrderedDict() for (key, val) in in_state_dict.items(): new_key = key if (key[:5] == 'conv1'): new_key = ('stem' + key[5:]) if (key[:3] == 'bn1'): new_key =...
class MLP(nn.Module): def __init__(self, dim): super(MLP, self).__init__() self.l1 = nn.Linear(60, 256) self.l2 = nn.Linear(256, 256) self.l3 = nn.Linear(280, 256) self.l4 = nn.Linear(256, 128) self.l5 = nn.Linear(128, dim) self.ac = nn.ReLU() def forward(...
def _config_likelihood(forward_dict, scale_data): input_dict = {} input_dict['conditions'] = forward_dict['prior_draws'].astype(np.float32) input_dict['observables'] = (forward_dict['sim_data'].astype(np.float32) / scale_data) return input_dict
def test_array_to_Array(): class PointArray(ak.Array): def __getitem__(self, where): return ak.Array([1, 2, 3]) array = ak.Array([[{'rho': 1, 'phi': 1.0}], [], [{'rho': 2, 'phi': 2.0}]], with_name='point', behavior={('*', 'point'): PointArray}) assert (array.to_list() == [[1, 2, 3], [1, ...
.parametrize('opts,obj', [(['a=10'], {'a': 10}), (['b=test'], {'b': 'test'}), (['c=1.0e-8'], {'c': 1e-08}), (['d=3.14'], {'d': 3.14}), (['e=True'], {'e': True}), (['f=false'], {'f': False}), (['a=b', 'c=d'], {'a': 'b', 'c': 'd'}), (['g=h=i'], {'g': 'h=i'})]) def test_options_from_eqdelimstring(opts, obj): assert (p...
def create_sequence(dirs, fpvpb, no_frames): random.seed(35) count_real = 0 count_fake = 0 folders = [] for directory in dirs: folders += [i for i in sorted(glob.glob(os.path.join(directory, '*', '*')))] random.shuffle(folders) total_folders = len(folders) X = {} y = [] p...
class CityscapesDataset(SegmentationDataset): num_classes = 19 label_names = ['road', 'sidewalk', 'building', 'wall', 'fence', 'pole', 'traffic light', 'traffic sign', 'vegetation', 'terrain', 'sky', 'person', 'rider', 'car', 'truck', 'bus', 'train', 'motorcycle', 'bicycle'] color_map = np.array([[128, 64, ...
def resnet50(pretrained=False, **kwargs): model = ResNet(Bottleneck, [3, 4, 6, 3], **kwargs) if pretrained: model.load_state_dict(torch.load(model_urls['resnet50'], map_location='cpu')) return model
def kernel(seq: dc.int32[N]): table = np.zeros((N, N), np.int32) for i in range((N - 1), (- 1), (- 1)): for j in range((i + 1), N): if ((j - 1) >= 0): table[(i, j)] = np.maximum(table[(i, j)], table[(i, (j - 1))]) if ((i + 1) < N): table[(i, j)] = ...
def init_process_group(backend, init_method='env://', **kwargs): world_size = kwargs.pop('world_size', (- 1)) group_name = kwargs.pop('group_name', '') rank = kwargs.pop('rank', (- 1)) assert (len(kwargs) == 0), ('got unexpected keyword arguments: %s' % ','.join(kwargs.keys())) if (not is_available(...
class VQAEval(): def __init__(self, vqa=None, vqaRes=None, n=2): self.n = n self.accuracy = {} self.evalQA = {} self.evalQuesType = {} self.evalAnsType = {} self.vqa = vqa self.vqaRes = vqaRes if (vqa is not None): self.params = {'question_...
def categorical_accuracy(y_true, y_pred): return K.cast(K.equal(K.argmax(y_true, axis=(- 1)), K.argmax(y_pred, axis=(- 1))), K.floatx())
def _to_probe(p, ctx=None): if is_probe(p): return p else: return Probe(p, ctx)
_end_docstrings(PIPELINE_INIT_ARGS) class SummarizationPipeline(Text2TextGenerationPipeline): return_name = 'summary' def __call__(self, *args, **kwargs): return super().__call__(*args, **kwargs) def check_inputs(self, input_length: int, min_length: int, max_length: int) -> bool: if (max_len...
class SlackDownloadFile(VirtualFunctionTool): name = 'SlackDownloadFile' summary = 'Download a file using its unique identifier.' parameters: List[ArgParameter] = [{'name': 'file_id', 'type': 'string', 'description': 'The unique identifier of the file.', 'required': True}, {'name': 'save_path', 'type': 'str...
class ZoomFFT(CZT): def __init__(self, n, fn, m=None, *, fs=2, endpoint=False): m = _validate_sizes(n, m) k = arange(max(m, n), dtype=np.min_scalar_type((- (max(m, n) ** 2)))) if (np.size(fn) == 2): (f1, f2) = fn elif (np.size(fn) == 1): (f1, f2) = (0.0, fn) ...
def test_axis0(): array = ak.Array([0.0, 1.1, 2.2, 3.3]) assert (to_list(ak.operations.combinations(array, 2, replacement=False, axis=0)) == [(0.0, 1.1), (0.0, 2.2), (0.0, 3.3), (1.1, 2.2), (1.1, 3.3), (2.2, 3.3)]) assert (to_list(ak.operations.combinations(array, 2, replacement=False, axis=0, fields=['x', ...
def load_mat_info(img_info, gt_file): assert isinstance(img_info, dict) assert isinstance(gt_file, str) (contours, words) = get_contours_mat(gt_file) anno_info = [] for (contour, word) in zip(contours, words): if (contour.shape[0] == 2): continue coordinates = np.array(co...
def changeGWContagion_LISTCOMP(alpha, G, A, i): delta = math.exp(((- alpha) * sum([(A[u] == 1) for u in G.outIterator(i)]))) for j in G.outIterator(i): if (A[j] == 1): djplus = sum([(A[u] == 1) for u in G.inIterator(j)]) delta += (math.exp(((- alpha) * (djplus + 1))) - math.exp((...
_class class FDM_EDMPrecond(torch.nn.Module): def __init__(self, img_resolution, img_channels, label_dim=0, use_fp16=False, sigma_min=0.002, sigma_max=80.0, sigma_data=0.5, model_type='SongUNet', fdm_beta_d=19.9, fdm_beta_min=0.1, fdm_multiplier=1.0, **model_kwargs): super().__init__() self.img_reso...
def log_device_names(): if torch.cuda.is_available(): device_name = torch.cuda.get_device_name() logger.info(f'CUDA Device {get_rank()} is: {device_name}')
def test_call_with_arguments(): module_block = BasicBlock([Instr('LOAD_CONST', arg='a'), Instr('LOAD_NAME', arg='int'), Instr('LOAD_CONST', arg='b'), Instr('LOAD_NAME', arg='int'), Instr('BUILD_TUPLE', arg=4), Instr('LOAD_CONST', arg=dummy_code_object), Instr('LOAD_CONST', arg='callee'), Instr('MAKE_FUNCTION', arg=...
class TestAuxiliary(): def test_vectorize(self): P = PersLandscapeExact(dgms=[np.array([[0, 5], [1, 4]])]) Q = vectorize(P, start=0, stop=5, num_steps=6) assert (Q.hom_deg == P.hom_deg) assert (Q.start == 0) assert (Q.stop == 5) np.testing.assert_array_equal(Q.values,...
def rollout_func(episode_dict: NewEpisodeDict, client: RayInferenceClient, servers: Dict[(str, RayInferenceWorkerSet)], rollout_config, server_runtime_config, evaluate): env_rets = client.env.reset(fragment_length=rollout_config['fragment_length'], max_step=rollout_config['max_step']) (processed_env_ret, datafr...
(repr=False, frozen=True) class Filter(): matchers: tuple[(Matcher, ...)] def __repr__(self) -> str: inner = ' && '.join((matcher.label for matcher in self.matchers)) return f'<{self.__class__.__name__}: [{inner}]>' def match(self, ctx: HasAPIOperation) -> bool: return all((matcher.m...
def server = PluginHTTPServer(host='127.0.0.1', port=0) server.start() (yield server) if server.is_running(): server.stop()
def contrastive_collate_fn(data, tokenizer, num_sample, strategy='random'): dummy_inputs = tokenizer('', '', return_token_type_ids=True) batch_size = len(data) all_input_ids = [] all_token_type_ids = [] all_sample_masks = [] for feat in data: if (strategy == 'random'): (input...
def main(): args = docopt('\n Usage:\n counts2pmi.py <counts>\n ') counts_path = args['<counts>'] words = Counter() contexts = Counter() relations = Counter() with gzip.open(counts_path) as f: for line in f: split = line.decode('utf-8').strip().split() ...
def test_reload(): print('Reloadable DaCe program test') array_one = np.random.rand(10).astype(np.float64) array_two = np.random.rand(20).astype(np.float64) output_one = np.zeros(10, dtype=np.float64) output_two = np.zeros(20, dtype=np.float64) prog_one = program_generator(10, 2.0) prog_two ...
def register_Ns3Ipv4EndPointDemux_methods(root_module, cls): cls.add_constructor([param('ns3::Ipv4EndPointDemux const &', 'arg0')]) cls.add_constructor([]) cls.add_method('Allocate', 'ns3::Ipv4EndPoint *', []) cls.add_method('Allocate', 'ns3::Ipv4EndPoint *', [param('ns3::Ipv4Address', 'address')]) ...
class TextDFDatasetForGen(Dataset): def __init__(self, df, in_memory: bool=False, split: str=None, train_ratio: float=1, omitted_labels=None): if (omitted_labels is not None): df = df.loc[(~ df['truth'].isin(omitted_labels))] if ((train_ratio != 1) and (split != None)): shuff...
class VCRDataset(BaseDataset): def __init__(self, *args, split='', **kwargs): assert (split in ['train', 'val', 'test']) self.split = split self.metadata = None self._load_metadata() if (split == 'train'): names = ['vcr_train'] elif (split == 'val'): ...
class NumPromptTokensRunExpander(ScenarioSpecRunExpander): name = 'num_prompt_tokens' values_dict = {'default_sweep': [1, 256, 512, 1024, 1536]}
def interpret_distro_name(location, basename, metadata, py_version=None, precedence=SOURCE_DIST, platform=None): parts = basename.split('-') if ((not py_version) and any((re.match('py\\d\\.\\d$', p) for p in parts[2:]))): return for p in range(1, (len(parts) + 1)): (yield Distribution(locati...
def db_to_float(db, using_amplitude=True): db = float(db) if using_amplitude: return (10 ** (db / 20)) else: return (10 ** (db / 10))
class Interface(WithEqualityById, ParentWithBase): def __init__(self, name): self.__name = name self.__coerce_name = (('_' + name.lower()) + '_') self.__seq = (- 1) self._available_vars = [] self._seed = None ParentWithBase.__init__(self, self) def _repr_(self): ...
def encode(label): x = np.zeros(shape=len(CLASSES), dtype=np.float32) x[[C2I[l] for l in label]] = 1.0 return x
class SpeechT5ForTextToSpeech(metaclass=DummyObject): _backends = ['torch'] def __init__(self, *args, **kwargs): requires_backends(self, ['torch'])
def make_compute_state(sdfg): state = sdfg.add_state('histogram_fpga') a = state.add_array('A_in', (H, W), dtype, storage=dace.dtypes.StorageType.FPGA_Global) hist = state.add_array('hist_buffer', (num_bins,), dace.uint32, transient=True, storage=dace.dtypes.StorageType.FPGA_Local) (entry, exit) = state...
class QueryStub(object): def __init__(self, channel): self.QueryOnline = channel.unary_unary('/infaas.internal.Query/QueryOnline', request_serializer=query__pb2.QueryOnlineRequest.SerializeToString, response_deserializer=query__pb2.QueryOnlineResponse.FromString) self.QueryOffline = channel.unary_un...
def extract_ones_data(H, pivots): (onecol, onerow, non_onecol, non_onerow) = ones(H, pivots) verbose(('extract_ones -- got submatrix of size %s' % len(non_onecol))) if (len(non_onecol) in [1, 2]): C = H.matrix_from_rows_and_columns(onerow, non_onecol) D = H.matrix_from_rows_and_columns(non_o...
_utils.test() def test_struct(): class C(): i: int f: float n = 16 x = C.field(shape=n) y = C.field(shape=n) x[1].i = 2 x[2].i = 4 y[0].f = 1.0 y[2].i = 3 x.copy_from(y) assert (x[0].f == 1.0) assert (x[1].i == 0) assert (x[2].i == 3) assert (y[0].f ==...
def count_frames(directory): for i in itertools.count(start=0): pickle_file = os.path.join(directory, f'{str(i).zfill(12)}.pickle') if (not os.path.isfile(pickle_file)): return i
def create_ARSC_test_episode(prefix: str='data/ARSC-Yu/raw', n_query: int=5, n_unlabeled=0, set_type: str='test'): assert (set_type in ('test', 'dev')) labels = [line.strip() for line in open(f'{prefix}/workspace.target.list', 'r').readlines()] label = random.choice(labels) binary_task = random.choice([...
class Cnn1DC3_DTanh(Convolution1DArchitectureBase, NeuralNetworkTrainingDefault): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.use_gpu = False def build_model(self, x_shape, y_shape): self.assert_shapes(x_shape, y_shape) assert (x_shape[1:] == (606,...
def parse_args(): parser = argparse.ArgumentParser(description='Calibrates network output') parser.add_argument('raw', type=str, help='Sample name, like "test".') parser.add_argument('calibration', type=str) parser.add_argument('--name', type=str, default='', help='File name appendix for results.') ...
class PartialFC(nn.Module): def __init__(self, embedding_size, num_classes, sample_rate, cls_type, scale, margin): super().__init__() self.embedding_size = embedding_size self.num_classes = num_classes self.sample_rate = sample_rate self.world_size = comm.get_world_size() ...
def vat_noise_backward(grad_inputs, inputs, input_shapes, outputs, output_shapes, base_axis=1, eps=1.0): dy = grad_inputs[0] x0 = inputs[0] raise NotImplementedError('vat_noise_backward is not implemented.')
def declare(type=None, value=_Unspecified, **kwds): if ((type not in (None, object)) and hasattr(type, '__call__')): if (value is not _Unspecified): return type(value) else: return type() else: return value
def get_image_and_label(image_path, label_path): image = cv2.imread(image_path, cv2.IMREAD_COLOR) image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) image = np.float32(image) label = cv2.imread(label_path, cv2.IMREAD_GRAYSCALE) if ((image.shape[0] != label.shape[0]) or (image.shape[1] != label.shape[1])...
class ToricLattice_sublattice_with_basis(ToricLattice_generic, FreeModule_submodule_with_basis_pid): def _repr_(self): s = 'Sublattice ' s += '<' s += ', '.join(map(str, self.basis())) s += '>' return s def _latex_(self): s = '\\left\\langle' s += ', '.joi...
def run_tasks(config_path, cuda_devices): command = f'HYDRA_CONFIG_PATH={config_path} python run_tasks_on_multiple_gpus.py cuda_devices={cuda_devices}' log.info(f'Command: {command}') ret = os.system(command) if (ret != 0): raise RuntimeError(ret) return ret
def save_to_ckpt(ckpt_file, epoch, model, optimizer, lr_scheduler): ckpt_file = (ckpt_file + '_{}'.format(epoch)) print(get_time(), 'save to ckpt {}'.format(ckpt_file)) torch.save({'epoch': epoch, 'model_state_dict': model.state_dict(), 'optimizer_state_dict': optimizer.state_dict(), 'lr_scheduler': lr_sche...
def reslike_block(nf, num_groups=None, bottle_neck: bool=False, **conv_kwargs): nf_inner = ((nf / 2) if bottle_neck else nf) return SequentialEx(conv_block(num_groups=num_groups, c_in=nf, c_out=nf_inner, ks=3, stride=1, padding=1, **conv_kwargs), conv_block(num_groups=num_groups, c_in=nf_inner, c_out=nf, ks=3, ...
def test_number_protocol(): for (a, b) in [(1, 1), (3, 5)]: li = [(a == b), (a != b), (a < b), (a <= b), (a > b), (a >= b), (a + b), (a - b), (a * b), (a / b), (a | b), (a & b), (a ^ b), (a >> b), (a << b)] assert (m.test_number_protocol(a, b) == li)
class CudnnModule(object): def __init__(self, m): self.__dict__ = m.__dict__ self.__old_mod = m enabled = ContextProp(torch._C._get_cudnn_enabled, torch._C._set_cudnn_enabled) deterministic = ContextProp(torch._C._get_cudnn_deterministic, torch._C._set_cudnn_deterministic) benchmark = Co...
_module() class BFP(nn.Module): def __init__(self, in_channels, num_levels, refine_level=2, refine_type=None, conv_cfg=None, norm_cfg=None): super(BFP, self).__init__() assert (refine_type in [None, 'conv', 'non_local']) self.in_channels = in_channels self.num_levels = num_levels ...
def run(): parser = argparse.ArgumentParser(description='Clean contigs', usage='circlator clean [options] <in.fasta> <outprefix>') parser.add_argument('--min_contig_length', type=int, help='Contigs shorter than this are discarded (unless specified using --keep) [%(default)s]', default=2000, metavar='INT') p...
def func_set_import_config(config): func_list = [] with open(config, 'r') as f: for func_decl in f.readlines(): func_decl = func_decl.strip() if func_decl.startswith(';'): continue else: func_list.append(func_decl.split(',')[0].strip())...
def test_OptionType_transformations(): expected = [1, 2, None, 4, 5, 6, 7, 8, 9, None, None, None, 123] indexedoptionarray = ak.from_iter(expected, highlevel=False) assert isinstance(indexedoptionarray, ak.contents.IndexedOptionArray) for valid_when in [False, True]: bytemaskedarray = indexedopt...
class SKUNet(nn.Module): def __init__(self, bilinear=True): super(SKUNet, self).__init__() self.bilinear = bilinear self.down1 = nn.Conv2d(kernel_size=9, padding=4, in_channels=3, out_channels=32) self.down2 = SKDown(3, 1, False, 16, 32, 64) self.down3 = SKDown(3, 1, False, 1...
_torch class TrainerCallbackTest(unittest.TestCase): def setUp(self): self.output_dir = tempfile.mkdtemp() def tearDown(self): shutil.rmtree(self.output_dir) def get_trainer(self, a=0, b=0, train_len=64, eval_len=64, callbacks=None, disable_tqdm=False, **kwargs): train_dataset = Regr...
class StableDiffusion(): config_name: str = 'stable_diffusion' def __init__(self, weights_path: str): pass def finetune(self, dataset: Text2ImageDataset, logger=True): pass def generate(self, texts: Optional[Union[(List[str], str)]]=None, dataset: Optional[Text2ImageDataset]=None): ...
def mk_gparams_register_modules_internal(h_files_full_path, path): assert isinstance(h_files_full_path, list) assert check_dir_exists(path) cmds = [] mod_cmds = [] mod_descrs = [] fullname = os.path.join(path, 'gparams_register_modules.cpp') fout = open(fullname, 'w') fout.write('// Auto...
def Edges(self): EI = self.BegEI() while (EI < self.EndEI()): (yield EI) EI.Next()
class TrivialValuationFactory(UniqueFactory): def __init__(self, clazz, parent, *args, **kwargs): UniqueFactory.__init__(self, *args, **kwargs) self._class = clazz self._parent = parent def create_key(self, domain): return (domain,) def create_object(self, version, key, **ext...
def get_args(): parser = argparse.ArgumentParser('*** TGB: EdgeBank ***') parser.add_argument('-d', '--data', type=str, help='Dataset name', default='tgbl-comment') parser.add_argument('--bs', type=int, help='Batch size', default=200) parser.add_argument('--k_value', type=int, help='k_value for computin...
def GetSequenceOrderCorrelationFactor(ProteinSequence, k=1, AAP=[]): LengthSequence = len(ProteinSequence) res = [] for i in range((LengthSequence - k)): AA1 = ProteinSequence[i] AA2 = ProteinSequence[(i + k)] res.append(GetCorrelationFunction(AA1, AA2, AAP)) result = round((sum(...
def DerivJvecTest(inputSetup, comp='All', freq=False, expMap=True): (m, simulation) = nsem.utils.test_utils.setupSimpegNSEM_PrimarySecondary(inputSetup, [freq], comp=comp, singleFreq=False) print('Using {0} solver for the simulation'.format(simulation.solver)) print('Derivative test of Jvec for eForm primar...
def test_bytestring_array(): array = ak.contents.ListOffsetArray(ak.index.Index64([0, 5, 10]), ak.contents.NumpyArray(np.frombuffer(b'hellothere', 'u1'), parameters={'__array__': 'byte'}), parameters={'__array__': 'bytestring'}) assert (ak.operations.to_json(array, convert_bytes=bytes.decode) == '["hello","ther...
def test_approx_fprime(): g = optimize.approx_fprime(himmelblau_x0, himmelblau) assert_allclose(g, himmelblau_grad(himmelblau_x0), rtol=5e-06) h = optimize.approx_fprime(himmelblau_x0, himmelblau_grad) assert_allclose(h, himmelblau_hess(himmelblau_x0), rtol=5e-06)
def test_raise_build_error(): with pytest.raises(ImportError): raise_build_error(ImportError())
class Conv1x1Linear(nn.Module): def __init__(self, in_channels, out_channels, stride=1, bn=True): super(Conv1x1Linear, self).__init__() self.conv = nn.Conv2d(in_channels, out_channels, 1, stride=stride, padding=0, bias=False) self.bn = None if bn: self.bn = nn.BatchNorm2d...
class ResnetGenerator(nn.Module): def __init__(self, input_nc, output_nc, ngf=64, norm_layer=nn.BatchNorm2d, use_dropout=False, n_blocks=6, padding_type='reflect'): assert (n_blocks >= 0) super(ResnetGenerator, self).__init__() self.input_nc = input_nc self.output_nc = output_nc ...
def try_record_real_gap_from_current(statistics: Stats, optimizer: Optimizer, real_theta, pre_computed_gap=None, gap_name='gap'): if statistics.has_statistic(gap_name): if (pre_computed_gap is None): if (real_theta is None): gap = 0 else: with torch.no...
class Dataset(torch.utils.data.Dataset): def __init__(self, inputs): (self.questions, self.functions, self.func_depends, self.func_inputs, self.choices, self.answers) = inputs self.is_test = (len(self.answers) == 0) def __getitem__(self, index): question = torch.LongTensor(self.questions...