code
stringlengths
101
5.91M
def get_optimizer(args, params_list): if (args.optim is None): if (options['dataset'] == 'tinyimagenet'): optimizer = torch.optim.Adam(params_list, lr=args.lr) else: optimizer = torch.optim.SGD(params_list, lr=args.lr, momentum=0.9, weight_decay=args.weight_decay) elif (a...
def convert(in_file, out_file): checkpoint = torch.load(in_file) in_state_dict = checkpoint.pop('state_dict') out_state_dict = OrderedDict() meta_info = checkpoint['meta'] parse_config(('#' + meta_info['config'])) for (key, value) in in_state_dict.items(): if ('extra' in key): ...
def single_post_process(key, chunk): file_graph_name = os.path.join(os.path.join(RAW_FILE, key), (('graph_' + key) + '_gnn.jsonl.gz')) count = 0 with gzip.GzipFile(file_graph_name, 'wb') as gnn_file: for file in tqdm(chunk): class_name = file.split('_')[1] if os.path.exists(o...
def get_original_lm(lm: np.ndarray, image_size_original: tuple, image_size_new: tuple) -> np.ndarray: lm_new = [] for lm_ in lm: xy = (lm_[0], lm_[1]) xy = get_original_xy(xy, image_size_original, image_size_new) lm_new.append(xy) lm_new = np.array(lm_new) return lm_new
def get_neighbors(input_matrix: sparse.csr_matrix, node: int, transpose: bool=False) -> np.ndarray: if transpose: matrix = sparse.csr_matrix(input_matrix.T) else: matrix = input_matrix neighbors = matrix.indices[matrix.indptr[node]:matrix.indptr[(node + 1)]] return neighbors
def _available_envs(): cmd = ['conda', 'env', 'list'] p = subprocess.run(cmd, check=True, capture_output=True, text=True) lines = p.stdout.splitlines() envs = {} for line in map(str.strip, lines): if ((not line) or line.startswith('#')): continue parts = line.split() ...
def blob_blob_force_new(r, *args, **kwargs): L = kwargs.get('periodic_length') eps = kwargs.get('repulsion_strength') b = kwargs.get('debye_length') project_to_periodic_image(r, L) r_norm = np.linalg.norm(r) return ((((- ((eps / b) + (eps / r_norm))) * np.exp(((- r_norm) / b))) * r) / (r_norm **...
def get_class_name_lineno(method): current_frame = inspect.currentframe() for i in range(2): assert (current_frame is not None) current_frame = current_frame.f_back assert (current_frame is not None) class_name = current_frame.f_code.co_name line_no = current_frame.f_code.co_firstlin...
class PolymakeElement(ExtraTabCompletion, InterfaceElement): def _repr_(self): (T1, T2) = self.typeof() P = self._check_valid() name = self._name if T1: Temp = self.typename() if Temp: T1 = Temp if (T1 in ['Matrix', 'Vector']): ...
def eval_performance(datadir, prefix=None, args=None, eval_every=200, out_file_prefix=None, sample_time=2, baselines={}): if (args is None): real_graphs_filename = [(datadir + f) for f in os.listdir(datadir) if re.match((prefix + '.*real.*\\.dat'), f)] pred_graphs_filename = [(datadir + f) for f in ...
def download_url_test(): background_url = ' background_md5 = '68d2efa1b9178cc56df9314c21c6e718' dir_name = '~/test' utils.download_url(dir_name, background_url)
class NERDataset(object): def __init__(self, dataset: list, w_pad: int, c_pad: int, token_per_batch: int): super(NERDataset, self).__init__() self.dataset = dataset self.w_pad = w_pad self.c_pad = c_pad self.token_per_batch = token_per_batch self.construct_index() ...
class LossCrossEntropy(LossFunction): def __init__(self, dtype=bb.DType.FP32): core_loss = bb.search_core_object('LossCrossEntropy', [dtype]).create() super(LossCrossEntropy, self).__init__(core_loss=core_loss)
def _enable_faulthandler(): try: import faulthandler faulthandler.enable() print('Faulthandler enabled') except Exception: print('Could not enable faulthandler')
def test_algo_count(): all_entries = rldb.find_all({}) all_algos = set([e['algo-title'] for e in all_entries]) assert (len(all_algos) == 60)
def build_model(params, with_dis): (src_dico, _src_emb) = load_embeddings(params, source=True) params.src_dico = src_dico src_emb = nn.Embedding(len(src_dico), params.emb_dim, sparse=True) src_emb.weight.data.copy_(_src_emb) if params.tgt_lang: (tgt_dico, _tgt_emb) = load_embeddings(params, ...
def _augment_exception(exc, version, arch=''): message = exc.args[0] if (('vcvarsall' in message.lower()) or ('visual c' in message.lower())): tmpl = 'Microsoft Visual C++ {version:0.1f} is required.' message = tmpl.format(**locals()) msdownload = 'www.microsoft.com/download/details.aspx...
class LastNWindowCollector(ModelLayer): def __init__(self, model, input_record, num_to_collect, name='last_n_window_collector', **kwargs): super(LastNWindowCollector, self).__init__(model, name, input_record, **kwargs) assert (num_to_collect > 0) self.num_to_collect = num_to_collect ...
def test_get_target_index_in_order(esm_sampler_fixture): sampler = esm_sampler_fixture (last_i, target_indexes) = sampler.get_target_index_in_order(batch_size=2, indexes=[0, 1, 2, 3], next_i=1, num_positions=2) assert (len(target_indexes) == 2) assert (last_i == 3) assert (target_indexes == [[2, 3],...
def tanh_backward(grad_inputs, inputs, input_shapes, outputs, output_shapes): dy = grad_inputs[0] x0 = inputs[0] y0 = outputs[0] dx0 = (dy * (1 - (y0 ** 2))) return dx0
class Updater(object): def __init__(self, *, config, network, device, initial_learning_rate=1.0): self.config = config self.learning_rate = initial_learning_rate self._effective_learning_rate = self.learning_rate self.network = network self._device = device self._curr...
def span_masking(sentence, spans, tokens, pad, mask_id, pad_len, mask, replacement='word_piece', endpoints='external'): sentence = np.copy(sentence) sent_length = len(sentence) target = np.full(sent_length, pad) pair_targets = [] spans = merge_intervals(spans) assert (len(mask) == sum([((e - s) ...
def _parse_arg(value, desc): if (desc == 'none'): return value if ((desc == 'v') or (not _is_value(value))): return value if value.node().mustBeNone(): return None if (value.node().kind() == 'onnx::Constant'): tval = value.node()['value'] if (desc == 'i'): ...
class BetterDataset(Dataset): def match_span_segment(seg, string): cursor = 0 found = [] for (i, s) in enumerate(seg): if (s == string[cursor]): cursor += 1 if (cursor == len(string)): for x in reversed(range(len(string))): ...
_criterion('vocab_parallel_cross_entropy') class VocabParallelCrossEntropyCriterion(FairseqCriterion): def __init__(self, task, sentence_avg): super().__init__(task) self.sentence_avg = sentence_avg if (not has_megatron_submodule): raise ImportError('\n\nPlease install the megatr...
def gamma_pdf(x, a): if (x > 0): return math.exp((((- math.lgamma(a)) + ((a - 1.0) * math.log(x))) - x)) else: return (0 if (a >= 1) else np.inf)
def test_settings_first(testdir, plugin): parameters = {'parameters': [integer(name='id', required=True)]} testdir.make_test(f''' () {('.asyncio' if (plugin == 'pytest_asyncio') else '')} (max_examples=5) async def test_(request, case): request.config.HYPOTHESIS_CASES += 1 assert case.full_path == "/v1/...
class SplinterModel(metaclass=DummyObject): _backends = ['torch'] def __init__(self, *args, **kwargs): requires_backends(self, ['torch'])
def eval(model, criterion, data): total_loss = 0 total_words = 0 total_num_correct = 0 model.eval() for i in range(len(data)): batch = data[i][:(- 1)] outputs = model(batch) targets = batch[1][1:] (loss, _, num_correct) = memoryEfficientLoss(outputs, targets, model.ge...
def _length_sum(partial_primitives): if (len(partial_primitives) == 0): return 0 total = 0 for primitive in partial_primitives.values(): if isinstance(primitive, instantiators['circle']): total += circumference(primitive) elif isinstance(primitive, instantiators['line']):...
def test_different_face_detectors(): for detector in detectors: res = DeepFace.verify('dataset/img1.jpg', 'dataset/img2.jpg', detector_backend=detector) assert isinstance(res, dict) assert ('verified' in res.keys()) assert (res['verified'] in [True, False]) assert ('distance'...
def main(seed=None): logger.info('Parsing Spec...') spec = S.parse(toy_spec_str) logger.info('Parsing succeeded') logger.info('Building synthesizer...') synthesizer = Synthesizer(enumerator=RandomEnumerator(spec, max_depth=4, seed=seed), decider=ExampleDecider(interpreter=ToyInterpreter(), examples=...
def test(): def reproduce(arrays): out = np.zeros(len(arrays), np.int64) i = 0 for values in arrays: for p in values: out[i] = p i += 1 break return out numpyarray = ak.contents.NumpyArray(np.arange(100, 200, 10)) in...
def rad_centered(n, cutoff): r0 = 0.5 rn = (cutoff - 1.0) delta = ((rn - r0) / float((n - 1))) sfs = [{'rad': {'cutoff': cutoff, 'eta': (0.5 / ((r0 + (i * delta)) ** 2)), 'mu': 0.0}} for i in range(n)] return (sfs, n, 0)
def main(base_config, config, seed): config_name = config.split('/')[(- 1)].split('.')[0] run_name = ((('FMNIST' + '_') + config_name) + f'_{seed}') model_path = (('models/' + run_name) + '_model.pt') delta_path = (('models/' + run_name) + '_delta.pt') cmd_train = f'python marglik_training/train_mar...
class CubexSolver(): def __call__(self, facets): return self.solve(facets) def solve(self, facets): s = self.format_cube(facets) cmd = ((shlex.quote(sage.features.rubiks.cubex().absolute_filename()) + ' ') + s) child = pexpect.spawn(cmd) ix = child.expect(['210.*?:', '^5\...
def main(params): imgs = json.load(open(params['input_json'], 'r')) imgs = imgs['images'] N = len(imgs) if (params['fc_input_dir'] is not None): print('processing fc') with h5py.File(params['fc_output']) as file_fc: for (i, img) in enumerate(tqdm(imgs)): npy_f...
def create_metadata(output_filename, n_sessions, configs, utterances_dict, words_dict, rir_list, impulsive_noises_list=None, background_noises_list=None): dataset_metadata = {} for n_sess in tqdm(range(n_sessions)): c_speakers = np.random.choice(list(utterances_dict.keys()), configs['n_speakers'], repla...
class NumpyVersion(): def __init__(self, vstring): self.vstring = vstring ver_main = re.match('\\d[.]\\d+[.]\\d+', vstring) if (not ver_main): raise ValueError('Not a valid numpy version string') self.version = ver_main.group() (self.major, self.minor, self.bugfix...
class OneBit(MultiBit): def __init__(self, *args, **kwargs): super().__init__(*args, m='max', **kwargs)
def main(): result = dict() prior_appearance = get_prior_appearance() invalid_count = 0 total_count = 0 for qid in question.keys(): cur_que = question[qid]['question'] img_id = question[qid]['imageId'] cur_scene_graph = ori_scene_graph[img_id]['objects'] cur_bbox = np...
def calc_f1(y_true, y_pred): y_true = np.argmax(y_true, axis=1) y_pred = np.argmax(y_pred, axis=1) return (metrics.f1_score(y_true, y_pred, average='micro'), metrics.f1_score(y_true, y_pred, average='macro'))
def construct_comp_exp_vec(rfv_to_ev_dict, q): comp_exp_vec_dict = {} for residue_field_vector in rfv_to_ev_dict: rf_vector_complement = tuple([((q + 1) - j) for j in residue_field_vector]) exponent_vector_list = rfv_to_ev_dict[residue_field_vector][:] exponent_vector_complement_list = r...
class AltCLIPConfig(PretrainedConfig): model_type = 'altclip' is_composition = True def __init__(self, text_config=None, vision_config=None, projection_dim=768, logit_scale_init_value=2.6592, **kwargs): text_config_dict = kwargs.pop('text_config_dict', None) vision_config_dict = kwargs.pop('...
def test_gh18123(tmp_path): lines = [' %%MatrixMarket matrix coordinate real general\n', '5 5 3\n', '2 3 1.0\n', '3 4 2.0\n', '3 5 3.0\n'] test_file = (tmp_path / 'test.mtx') with open(test_file, 'w') as f: f.writelines(lines) mmread(test_file)
def replace_line_in_file(line, line_num_to_replace, filename): with open(filename, 'r+', encoding='utf-8') as f: d = f.readlines() f.seek(0) for (idx, i) in enumerate(d): if (idx == line_num_to_replace): f.write(line) else: f.write(i) ...
def repeat(t: DATA_TYPE, args: Tuple): if is_tensor(t): return t.repeat(*args) elif is_array(t): return np.tile(t, args)
def snode_deactivate_dynamic(b: template()): for I in grouped(b.parent()): deactivate(b, I)
def hash_profile(data): bboxes = np.vstack(data['profile']) hash_str = sha256(np.ascontiguousarray(bboxes).flatten()).hexdigest() uid = data['tmp_uid'] return (hash_str, uid)
def preprocess(text): doc = nlp(text) return '\n'.join((' '.join((token.text for token in sentence.tokens)) for sentence in doc.sentences))
class TestF90ReturnInteger(TestReturnInteger): suffix = '.f90' code = '\nmodule f90_return_integer\n contains\n function t0(value)\n integer :: value\n integer :: t0\n t0 = value\n end function t0\n function t1(value)\n integer(kind=1) :: value\n intege...
class CNN(nn.Module): def __init__(self, in_channels, out_channels, n_len_seg, n_classes, device, verbose=False): super(CNN, self).__init__() self.n_len_seg = n_len_seg self.n_classes = n_classes self.in_channels = in_channels self.out_channels = out_channels self.dev...
def ebp_resnet50_128(): 'VGGFace2 resnet-50-128d: sys.path.append('../models/resnet50_128_pytorch') import resnet50_128 net = resnet50_128.resnet50_128('../models/resnet50_128_pytorch/resnet50_128.pth') wb = xfr.models.whitebox.Whitebox(xfr.models.whitebox.Whitebox_resnet50_128(net)) x_probe =...
class MaxTimeCriteria(StoppingCriteria): def __init__(self, max_time: float, initial_timestamp: Optional[float]=None): self.max_time = max_time self.initial_timestamp = (time.time() if (initial_timestamp is None) else initial_timestamp) _start_docstrings(STOPPING_CRITERIA_INPUTS_DOCSTRING) d...
.parametrize('inshape, outmaps, kernel, pad, stride, dilation, group, base_axis', [((1, 2, 1, 4, 4), 16, (3, 3), None, None, None, 1, 2), ((1, 2, 2, 2, 8), 8, (1, 1, 3), (0, 0, 1), (1, 1, 2), (1, 1, 2), 2, 1)]) .parametrize('w_init', [None, I.NormalInitializer(), True]) .parametrize('b_init', [None, I.ConstantInitializ...
def adjust_optimizer(optimizer, epoch, config): def modify_optimizer(optimizer, setting): if ('optimizer' in setting): optimizer = __optimizers[setting['optimizer']](optimizer.param_groups) logging.debug(('OPTIMIZER - setting method = %s' % setting['optimizer'])) for param_gr...
class TestMapping(unittest.TestCase): def test_bwa_index(self): ref = os.path.join(data_dir, 'mapping_test_bwa_index.fa') outprefix = 'tmp.mapping_test.bwa_index' mapping.bwa_index(ref, outprefix) expected_files = [((outprefix + '.') + x) for x in ['amb', 'ann', 'bwt', 'pac', 'sa']] ...
def get_source(file_name): interface_data = load_interface_files(file_name) f = io.StringIO() f.write('#include "clusol.h"\n') f.write('\n') f.write('// declarations for fortran function calls\n') for interface_func in interface_data: if (interface_func['format'] == 'f90'): f...
def abc_stats(design_file, abc_binary, stats): abc_command = (('read_verilog ' + design_file) + '; print_stats') try: proc = check_output([abc_binary, '-c', abc_command]) lines = proc.decode('utf-8').split('\n') for line in lines: if ('i/o' in line): ob = re.s...
class Progress(Infinite): def __init__(self, *args, **kwargs): super(Progress, self).__init__(*args, **kwargs) self.max = kwargs.get('max', 100) def eta(self): return int(ceil((self.avg * self.remaining))) def eta_td(self): return timedelta(seconds=self.eta) def percent(s...
def generator_loss(disc_outputs): loss = 0 gen_losses = [] for dg in disc_outputs: dg = dg.float() l = torch.mean(((1 - dg) ** 2)) gen_losses.append(l) loss += l return (loss, gen_losses)
def main(): args = get_arg() random.seed(RAND_SEED) np.random.seed(RAND_SEED) torch.manual_seed(RAND_SEED) data = load_stage3_data(datatrack=args.datatrack, feat_type=args.feat_type, i_cv=args.i_cv) if (args.method == 'svgp'): model = SVGP(stage='stage2') elif (args.method == 'exactg...
_grad() def validate(model, val_loader, cfg): model.eval() n_itc_ex = 0 n_t2i_corrects = 0 n_i2t_corrects = 0 itc_loss = 0 st = time.time() val_log = {'valid/itc_loss': 0, 'valid/i2t_acc': 0, 'valid/t2i_acc': 0} debug_step = 5 val_loaders = (val_loader if isinstance(val_loader, dict)...
def test_divmod(Poly): c1 = list((random((4,)) + 0.5)) c2 = list((random((3,)) + 0.5)) c3 = list((random((2,)) + 0.5)) p1 = Poly(c1) p2 = Poly(c2) p3 = Poly(c3) p4 = ((p1 * p2) + p3) c4 = list(p4.coef) (quo, rem) = divmod(p4, p2) assert_poly_almost_equal(quo, p1) assert_poly_...
class ConvBNActivation(nn.Sequential): def __init__(self, in_planes: int, out_planes: int, kernel_size: int=3, stride: int=1, groups: int=1, bn_norm=None, activation_layer: Optional[Callable[(..., nn.Module)]]=None, dilation: int=1) -> None: padding = (((kernel_size - 1) // 2) * dilation) if (activa...
class ForecastBasedNN(nn.Module): def __init__(self, config: ForecastBasedNNParams): super(ForecastBasedNN, self).__init__() self.config = config self.device = set_device(self.config.gpu) self.topk = self.config.topk self.feature_type = self.config.feature_type self.l...
def load_by_class(loader, num_classes): train_set = loader.dataset subsets = {} if (len(train_set.__getitem__(0)) == 3): try: subsets = {target: torch.utils.data.Subset(train_set, [i for (i, (x, y, _)) in enumerate(train_set) if (y == target)]) for target in range(num_classes)} e...
def test_siblings_get_binary_examples_1d_2(digraph, features_1d, labels): policy = SiblingsPolicy(digraph, features_1d, labels) ground_truth_x = [3, 4, 5, 6, 7, 8, 1, 2] ground_truth_y = [1, 1, 1, 1, 1, 1, 0, 0] (x, y, weights) = policy.get_binary_examples('2') assert_array_equal(ground_truth_x, x) ...
def save_gold_mention_statistics(train_extracted_mentions, dev_extracted_mentions, test_extracted_mentions): logger.info('Calculate mention statistics...') all_data_mentions = ((train_extracted_mentions + dev_extracted_mentions) + test_extracted_mentions) filename = 'mention_stats.txt' calc_split_statis...
class Feedforward(nn.Sequential): def __init__(self, input_dim, hidden_dim=None, num_layers=2, output_dim=None, BatchNorm=BatchNorm, Activation=nn.ReLU, bias=True, **kwargs): super().__init__() hidden_dim = (hidden_dim or input_dim) output_dim = (output_dim or hidden_dim) for i in ra...
def label_object(item: Generated, name: str, explode: (bool | None)) -> None: if explode: new = make_delimited(item[name], '.') else: object_items = map(str, sum(force_dict((item[name] or {})).items(), ())) new = ','.join(object_items) if new: item[name] = f'.{new}' else:...
.gpu def test_persistent_fusion_interstate(): N = dace.symbol('N', dtype=dace.int64) (auto_optimize=False, device=dace.DeviceType.GPU) def func(A: dace.float64[N], B: dace.float64[N]): a = 10.2 for t in range(1, 10): if (t < N): A[:] = (((A + B) + a) / 2) ...
def default_detection_train_config(): h = OmegaConf.create() h.skip_crowd_during_training = True h.input_rand_hflip = True h.train_scale_min = 0.1 h.train_scale_max = 2.0 h.autoaugment_policy = None h.momentum = 0.9 h.learning_rate = 0.08 h.lr_warmup_init = 0.008 h.lr_warmup_epoc...
class ProgressiveScaling(): def __init__(self, progressive_scaling, num_scales=4): self.num_scales = num_scales if (progressive_scaling > 0.0): self.progressive_scaling = np.float32(([(progressive_scaling * (i + 1)) for i in range((num_scales - 1))] + [1.0])) else: se...
class UpliftIterator(): def __init__(self, treatment_col: np.ndarray, target: np.ndarray, mode: bool, task: Task, n_folds: int=5): self.task = task self.n_folds = n_folds self.mode = mode idx = np.arange(treatment_col.shape[0]) flg = (treatment_col.astype(np.bool) == self.mod...
def main() -> None: parser = argparse.ArgumentParser() parser.add_argument('--env', type=str, default='Pendulum-v1') parser.add_argument('--seed', type=int, default=1) parser.add_argument('--n-steps', type=int, default=1) parser.add_argument('--gpu', action='store_true') args = parser.parse_args...
class UnionArray(Content): def __init__(self, tags, index, contents): assert isinstance(tags, list) assert isinstance(index, list) assert isinstance(contents, list) assert (len(index) >= len(tags)) for x in tags: assert isinstance(x, int) assert (0 <= ...
def idx_parser(text): pattern = '\\d\\s\\w*' match = re.search(pattern, text) if match: return int(match.group(0)) else: return None
def getVel(pos): vel = V for i in range(maxElements): uv = (pos - sources[i].pos) uv[0] *= (screen[1] / screen[0]) vel += ((uv * sources[i].q) / ((2 * tm.pi) * ((uv[0] ** 2) + (uv[1] ** 2)))) uv = (pos - vortexes[i].pos) uv = vec2((- uv[1]), uv[0]) uv[0] *= (scree...
def test_props_file(): with corenlp.CoreNLPClient(properties=SERVER_TEST_PROPS, server_id='test_server_start_props_file') as client: ann = client.annotate(EN_DOC, output_format='text') assert (ann.strip() == EN_PROPS_FILE_GOLD.strip())
def test_is_none(): array = ak.Array([None, [None], [{'x': None, 'y': None}], [{'x': [None], 'y': [None]}], [{'x': [1], 'y': [[None]]}], [{'x': [2], 'y': [[1, 2, 3]]}]]) assert (ak.is_none(array, axis=0).tolist() == [True, False, False, False, False, False]) assert (ak.is_none(array, axis=1).tolist() == [No...
def plot_curve(data, ylabel, smooth=False, save_path=''): keys = list(data.keys()) values = list(data.values()) if smooth: window_size = 9 assert ((window_size % 2) == 1) keys = keys[(window_size // 2):(- (window_size // 2))] values = np.convolve(values, (np.ones(window_size)...
def orderSpanList(tree, eduIds): queue = [tree] while queue: node = queue.pop(0) eduCovered = [] setEduCovered(node, eduIds, eduCovered) node.eduCovered = sortEdu(eduCovered, eduIds) node.eduspan = tuple([node.eduCovered[0], node.eduCovered[(- 1)]]) for m in node....
def run_command(cmd_fmt: str, *md5): if all(md5): cmd_fmt = cmd_fmt.format(*(files[x] for x in md5)) out = subprocess.run(cmd_fmt, shell=True, capture_output=True) return out
class _ModuleProviderAction(Enum): INTERN = 1 EXTERN = 2 MOCK = 3 DENY = 4 REPACKAGED_MOCK_MODULE = 5 SKIP = 6
class Permutation(CombinatorialElement): _keyword(deprecation=35233, check_input='check') def __classcall_private__(cls, l, check=True): if isinstance(l, Permutation): return l elif isinstance(l, PermutationGroupElement): l = l.domain() elif isinstance(l, str): ...
def extract_files(datapath, type=None): if type: path = os.path.join(datapath, type) files = glob.glob((path + '/*.wav')) files.sort(key=(lambda f: int(f.split('fileid_')[(- 1)].strip('.wav')))) else: files = sorted(glob.glob((datapath + '/*.wav'))) return files
def parse_win_kpoints(f): while ('begin kpoints' not in f.readline()): pass kpoints = [] for line in f.readlines(): if ('end kpoints' in line): break kpoint = tuple(parse_line_list(line, ' ', float)) kpoints.append(kpoint) return kpoints
def register_Ns3CallbackImplBase_methods(root_module, cls): cls.add_constructor([]) cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')]) cls.add_method('GetTypeid', 'std::string', [], is_pure_virtual=True, is_const=True, is_virtual=True) cls.add_method('IsEqual', 'bool', [param('ns3::Pt...
class FileDataset(data.Dataset): def __init__(self, benchmark, data_file, transform=None, add_idx=False): super().__init__() self.root_dir = DATA_PATH self.benchmark = benchmark self.data_file = data_file self.transform = transform (self.names, self.labels) = _dataset...
def register_types(module): root_module = module.get_root() module.add_enum('EnvironmentType', ['UrbanEnvironment', 'SubUrbanEnvironment', 'OpenAreasEnvironment'], import_from_module='ns.propagation') module.add_enum('CitySize', ['SmallCity', 'MediumCity', 'LargeCity'], import_from_module='ns.propagation') ...
class ASR_Brain(sb.Brain): def compute_forward(self, batch, stage): batch = batch.to(self.device) (wavs, wav_lens) = batch.sig if (stage == sb.Stage.TRAIN): if hasattr(self.hparams, 'env_corrupt'): wavs_noise = self.hparams.env_corrupt(wavs, wav_lens) ...
def test_union_numpy_empty_1_parm(): text = 'union[float64[parameters={"wonky": "boop"}], unknown, parameters={"pratyush": "das"}]' parsedtype = deduce_type(text) assert isinstance(parsedtype, ak.types.UnionType) assert (str(parsedtype) == text)
class HashableDict(): def __init__(self, obj): self.keys = tuple(sorted(obj)) self.values = tuple((as_hashable(obj[k]) for k in self.keys)) self.hash = hash((HashableDict, *self.keys), self.values) def __hash__(self): return self.hash def __eq__(self, other): return (...
.lower_builtin('append', ArrayBuilderType, numba.types.UnicodeType) def lower_append_string(context, builder, sig, args): return lower_string(context, builder, sig, args)
class AugmentOp(): def __init__(self, name, prob=0.5, magnitude=10, hparams=None): hparams = (hparams or _HPARAMS_DEFAULT) self.name = name self.aug_fn = NAME_TO_OP[name] self.level_fn = LEVEL_TO_ARG[name] self.prob = prob self.magnitude = magnitude self.hpara...
def test_asymptotic_calculator_one_poi(): with pytest.raises(TypeError): AsymptoticCalculator() (loss, (mean, sigma)) = create_loss() calc = AsymptoticCalculator(loss, Minuit()) poi_null = POIarray(mean, [1.15, 1.2, 1.25]) poi_alt = POI(mean, 1.2) dataset = calc.asimov_dataset(poi_alt) ...
(st.integers()) def test_set_get_seed(seed): rng = randomness.Random() rng.seed(seed) assert (rng.get_seed() == seed)
class TimeShift(object): def __init__(self, shift_factor=(30, 50), p=0.5): self.shift_factor = shift_factor self.p = p def __call__(self, wav): if (random.random() < self.p): shift = int(((wav.shape[1] * random.randint(self.shift_factor[0], self.shift_factor[1])) / 100)) ...
class EntrySelector(object): def from_string(spec: str) -> 'EntrySelector': if (spec == '*'): return AllEntrySelector() return FieldEntrySelector(spec)