code
stringlengths
101
5.91M
def test_make_model_with_tensors(): def make_model(nominal, lumi_sigma, corrup_data, corrdn_data, stater_data, normsys_up, normsys_dn, uncorr_data): spec = {'channels': [{'name': 'achannel', 'samples': [{'name': 'background', 'data': nominal, 'modifiers': [{'name': 'mu', 'type': 'normfactor', 'data': None},...
class Table(Node): def __init__(self, name, n_name=None, caseless=True): super().__init__(TABLE, name, n_name, caseless) self.fields = [] self.num_rows = None def num_fields(self): return len(self.fields)
_level_function() def to_arrow_table(array, *, list_to32=False, string_to32=False, bytestring_to32=False, emptyarray_to=None, categorical_as_dictionary=False, extensionarray=True, count_nulls=True): (yield (array,)) return _impl(array, list_to32, string_to32, bytestring_to32, emptyarray_to, categorical_as_dicti...
def main(args): (detect_tp, correct_tp, pos, neg, fp) = (0, 0, 0, 0, 0) pred_dict = dict() truth_dict = dict() fpred = open(args.pred_file, 'r', encoding='utf-8') ftruth = open(args.truth_file, 'r', encoding='utf-8') for (idx, (pred, truth)) in enumerate(zip(fpred, ftruth)): pred_tokens ...
def TCliqueOverlap_GetRelativeComplement(A, B, Complement): return _snap.TCliqueOverlap_GetRelativeComplement(A, B, Complement)
class CropSegmentationDataset(SustainBenchDataset): _dataset_name = 'crop_delineation' _versions_dict = {'1.1': {'download_url': ' 'compressed_size': }} def __init__(self, version=None, root_dir='data', download=False, split_scheme='official', oracle_training_set=False, seed=111, filled_mask=False, use_ood_...
def test_choose_type_or_negate_negate(inferred_signature): config.configuration.test_creation.negate_type = 1.0 assert (inferred_signature._choose_type_or_negate(OrderedSet((inferred_signature.type_system.to_type_info(int),))) != inferred_signature.type_system.convert_type_hint(int))
class SearchTrainer(object): def __init__(self, train_data, val_data, search_optim, criterion, scheduler, config, args): self.train_data = train_data self.val_data = val_data self.search_optim = search_optim self.criterion = criterion self.scheduler = scheduler self.s...
def test_upload_image(testdir): testdir.make_petstore_test('\(endpoint="/pet/{petId}/uploadImage$")\(max_examples=5, deadline=None)\ndef test_(request, case):\n assume(case.body is not NOT_SET)\n assert_int(case.path_parameters["petId"])\n if case.operation.schema.spec_version == "2.0":\n assume("ad...
def register_coco_instances(name, metadata, json_file, image_root): assert isinstance(name, str), name assert isinstance(json_file, (str, os.PathLike)), json_file assert isinstance(image_root, (str, os.PathLike)), image_root DatasetCatalog.register(name, (lambda : load_coco_json(json_file, image_root, n...
(torch.is_vulkan_available(), 'Vulkan backend must be available for these tests.') class TestVulkanRewritePass(TestCase): def validate_transformed_module(self, pattern_count_map, data_shape, prepack_removal=False, fuse_clamping_ops=False): module_instance = self scripted_model = torch.jit.script(mod...
def _cast(value, dtype): if isinstance(value, torch.Tensor): is_eligible = (value.is_floating_point() and value.is_cuda and (value.dtype is not torch.float64)) return (value.to(dtype) if is_eligible else value) elif isinstance(value, string_classes): return value elif isinstance(valu...
def main(args): rows = [] for json_fn in args.llava_json: with open(json_fn) as f: rows.extend(json.load(f)) def gen(rows): for row in rows: img_path = row['image'] fn = os.path.join(args.image_folder, img_path) if (not os.path.exists(fn)): ...
class Config(): def __init__(self, items=None): self.dict = {} self.typed_dict = {} self.network_topology_json = None self.files = [] if (items is not None): self.typed_dict.update(items) def __getstate__(self): import io from pickle import Pic...
def Dennis_calc(TP, FP, FN, TN): try: n = (((TP + FP) + FN) + TN) part1 = (((TP + FP) * (TP + FN)) / n) return ((TP - part1) / math.sqrt(part1)) except Exception: return 'None'
def _f_p_r_lcs(llcs, m, n): r_lcs = (llcs / m) p_lcs = (llcs / n) beta = (p_lcs / (r_lcs + 1e-12)) num = (((1 + (beta ** 2)) * r_lcs) * p_lcs) denom = (r_lcs + ((beta ** 2) * p_lcs)) f_lcs = (num / (denom + 1e-12)) return (f_lcs, p_lcs, r_lcs)
class EpsilonWrapper(object): def __init__(self, env, attrs=('distance_threshold', 'rotation_threshold'), compute_reward_with_internal=None): self.env = env if hasattr(self.env, 'mode'): assert (self.env.mode == 0) if (compute_reward_with_internal is not None): self.c...
def load_dataset(encode_labels, rng): data = load_breast_cancer() X = data.data y = data.target if (encode_labels is not None): y = np.take(encode_labels, y) (X_train, X_test, y_train, y_test) = train_test_split(X, y, test_size=0.33, random_state=rng) scalar = StandardScaler() X_trai...
def get_dataloaders(args, pipe_config: Optional[PipelineConfig]=None, dataset_keywords: Optional[Dict[(str, Any)]]=None): if (dataset_keywords is None): dataset_keywords = dict() if (not is_explicit_non_seperated_dataset(args)): (train_dl, test_dl, samplers, extra) = get_separate_dls_from_args(a...
def python_kernel_fn(n, recv_conn, send_conn, p_conn1, p_conn2): import pickle import cloudpickle import traceback import os from scannerpy import Config, DeviceType, DeviceHandle, KernelConfig p_conn1.close() p_conn2.close() try: kernel_config = KernelConfig(cloudpickle.loads(n[...
def container_checker(obj, target_type) -> bool: origin_type = get_origin(target_type) check_args_exist(target_type) if ((origin_type is list) or (origin_type is List)): check_empty_containers(obj) if (not isinstance(obj, list)): return False arg_type = get_args(target_ty...
class EvernoteManagerReadNotebook(VirtualFunctionTool): name = 'EvernoteManagerReadNotebook' summary = 'Retrieve the content of a notebook by its unique identifier.' parameters: List[ArgParameter] = [{'name': 'notebook_id', 'type': 'string', 'description': 'The unique identifier of the notebook.', 'required...
def attention_simple(inputs, timesteps): input_dim = int(inputs.shape[(- 1)]) a = Permute((2, 1), name='transpose')(inputs) a = Dense(timesteps, activation='softmax', name='attention_probs')(a) a_probs = Permute((2, 1), name='attention_vec')(a) output_attention_mul = Multiply(name='focused_attention...
_config def task_finetune_vqa(): exp_name = 'finetune_vqa' datasets = ['vqa'] loss_names = _loss_names({'vqa': 1}) batch_size = 256 max_epoch = 10 max_steps = None warmup_steps = 0.1 draw_false_image = 0 learning_rate = 0.0001 val_check_interval = 0.5 lr_mult = 10
def parse_req_from_line(name, line_source): if is_url(name): marker_sep = '; ' else: marker_sep = ';' if (marker_sep in name): (name, markers_as_string) = name.split(marker_sep, 1) markers_as_string = markers_as_string.strip() if (not markers_as_string): m...
def pretty_str(envinfo): def replace_nones(dct, replacement='Could not collect'): for key in dct.keys(): if (dct[key] is not None): continue dct[key] = replacement return dct def replace_bools(dct, true='Yes', false='No'): for key in dct.keys(): ...
def fc(input, output_shape, is_train, info=False, norm='batch', activation_fn=lrelu, name='fc'): with tf.variable_scope(name): _ = slim.fully_connected(input, output_shape, activation_fn=None) _ = bn_act(_, is_train, norm=norm, activation_fn=activation_fn) if info: log.info('{} {...
def RandomGNP(n, p, seed=None, fast=True, algorithm='Sage'): if (n < 0): raise ValueError('The number of nodes must be positive or null.') if ((0.0 > p) or (1.0 < p)): raise ValueError('The probability p must be in [0..1].') if (p == 1): from sage.graphs.generators.basic import Compl...
def output_model_structure(layer, indent=0): print(('%s%s %s' % ((' ' * indent), layer.name, type(layer)))) if hasattr(layer, 'input_layers'): for inp in layer.input_layers: output_model_structure(inp, indent=(indent + 1)) elif hasattr(layer, 'input_layer'): output_model_structu...
def compute_random_num(seed: int, num_1: int, num_2: int, modulus: int=100): network_num_1 = struct.pack('!q', num_1) network_num_2 = struct.pack('!q', num_2) network_seed = struct.pack('!q', seed) to_hash = ((network_seed + network_num_1) + network_num_2) hash_object = hashlib.sha256() hash_obj...
class KoBARTGecDataset(Dataset): def __init__(self, filename, tok, max_len, pad_index=0, ignore_index=(- 100), data_split_type='val', train_mode='normal'): super().__init__() self.tok = tok self.max_len = max_len self.docs = self.read_docs(filename) self.len = len(self.docs) ...
def _copy_sources(): shutil.rmtree(SRC_DIR_LOCAL, ignore_errors=True) os.mkdir(SRC_DIR_LOCAL) shutil.copy(os.path.join(SRC_DIR_REPO, 'LICENSE.txt'), SRC_DIR_LOCAL) shutil.copy(os.path.join(SRC_DIR_REPO, 'z3.pc.cmake.in'), SRC_DIR_LOCAL) shutil.copy(os.path.join(SRC_DIR_REPO, 'CMakeLists.txt'), SRC_D...
class CompleteDyckWords(DyckWords): Element = DyckWord_complete def __contains__(self, x) -> bool: if isinstance(x, DyckWord_complete): return True if (not isinstance(x, list)): return False if (len(x) % 2): return False return is_a(x, (len(x) ...
class Cycle(UniqueRepresentation, Parent): def __init__(self, n=5): self._n = n Parent.__init__(self, category=Graphs()) def _repr_(self): return 'An example of a graph: the {}-cycle'.format(self._n) def an_element(self): return self(0) def vertices(self): return ...
def _detector(imgfile): im = np.array(PIL.Image.open(imgfile)) if torch.cuda.is_available(): gpu_index = 0 else: gpu_index = (- 1) net = strface.detection.FasterRCNN(model_dir='../python/strface/models/detection', gpu_index=gpu_index, conf_threshold=None, rotate_flags=None, rotate_thresh...
def evaluate_summarization(args): scorer = SummarizationScorer(align=args.align) scores = [] for (doc, refs, hypo) in zip(open(args.doc).readlines(), open(args.refs).readlines(), open(args.hypo).readlines()): (doc, refs, hypo) = (doc.strip(), refs.strip().split('|||'), hypo.strip()) if ((doc...
_if_no_torch def test_llama_rms_norm(): import torch from transformers.models.llama.modeling_llama import LlamaRMSNorm as HFLlamaRMSNorm config = _get_llama_config() ln = LlamaRMSNorm.init(config.Embed, eps=config.layer_norm_epsilon, use_bias=config.use_bias) hf_ln = HFLlamaRMSNorm(config.Embed.size...
class ConvBertLayer(metaclass=DummyObject): _backends = ['torch'] def __init__(self, *args, **kwargs): requires_backends(self, ['torch'])
class MJCONTACT(Structure): _fields_ = [('dist', c_double), ('pos', (c_double * 3)), ('frame', (c_double * 9)), ('includemargin', c_double), ('friction', (c_double * 5)), ('solref', (c_double * 2)), ('solimp', (c_double * 3)), ('mu', c_double), ('coef', (c_double * 5)), ('zone', c_int), ('dim', c_int), ('geom1', c_...
class T5Paraphraser(nn.Module): def __init__(self, model_path, tokenizer, dropout=0.1): super().__init__() self.tokenizer = tokenizer (self.pad_token_id, self.sos_token_id, self.eos_token_id) = self.tokenizer.convert_tokens_to_ids(['<_PAD_>', '<sos>', '<eos>']) self.special_token_ids...
(name='compile', params=[True, False]) def _compile_fixture(request: Any) -> bool: return request.param
def full_like(g, input, fill_value, dtype=None, layout=None, device=None, pin_memory=False, memory_format=None): fill_value = sym_help._maybe_get_const(fill_value, 'f') if sym_help._is_value(fill_value): dtype = (6 if (dtype is None) else dtype) tmp = zeros_like(g, input, dtype, layout, device) ...
def pytest_generate_tests(metafunc): if ('openapi_version' in metafunc.fixturenames): marker = metafunc.definition.get_closest_marker('openapi_version') if (marker is not None): variants = [(OpenAPIVersion(variant) if isinstance(variant, str) else variant) for variant in marker.args] ...
class HerbertTokenizer(XLMTokenizer): vocab_files_names = VOCAB_FILES_NAMES pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP pretrained_init_configuration = PRETRAINED_INIT_CONFIGURATION max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES def __init__(self, **kwargs): kwarg...
class SFaceModel(): def __init__(self, model_path): self.model = cv.FaceRecognizerSF.create(model=model_path, config='', backend_id=0, target_id=0) self.layers = [_Layer()] def predict(self, image: np.ndarray) -> np.ndarray: input_blob = (image[0] * 255).astype(np.uint8) embeddin...
class VarianceEstimator(T.nn.Module): def __init__(self, model: T.nn.Module, callback=None): super().__init__() self.model = model self.state = VarianceEstimatorImpl(callback) def variance(self): return self.state.variance def forward(self, input: T.Tensor, *args, **kwargs): ...
def test_ssurgeon_existing_mwt_no_change(): semgrex_pattern = "{word:It}=it . {word:/'s/}=s" ssurgeon_edits = ["EditNode -node it -is_mwt true -is_first_mwt true -mwt_text It's", "EditNode -node s -is_mwt true -is_first_mwt false -mwt_text It's"] doc = CoNLL.conll2doc(input_str=EXISTING_MWT_DOC_INPUT) ...
class Face(Element): def __init__(self, x=0, y=0, z=0): Element.__init__(self, 'Face') self.text = ((((str(x) + ',') + str(y)) + ',') + str(z))
class BiLSTM(nn.Module): def __init__(self, rnn_layers, dropout, num_classes, audio_hidden_dims, audio_embed_size): super(BiLSTM, self).__init__() self.lstm_net_audio = nn.GRU(audio_embed_size, audio_hidden_dims, num_layers=rnn_layers, dropout=dropout, batch_first=True) self.fc_audio = nn.Se...
_module() class Transpose(): def __init__(self, keys, order): self.keys = keys self.order = order def __call__(self, results): for key in self.keys: results[key] = results[key].transpose(self.order) return results def __repr__(self): return (self.__class__...
class NbsDataLoaderCls(BaseDataLoader): def __init__(self, dataset, batch_size, n_a, cpus, seed=0, val_splitter=_get_split_indices_cls): super().__init__(dataset, batch_size, cpus, True, seed, val_splitter) self.n_a = n_a self.groups = _get_kfolded_indices_rgs(self.split_indices[0], self.dat...
def to_tensor(data): if isinstance(data, torch.Tensor): return data if isinstance(data, np.ndarray): return torch.from_numpy(data) if (isinstance(data, Sequence) and (not mmcv.is_str(data))): return torch.tensor(data) if isinstance(data, int): return torch.LongTensor([dat...
def test_branch_subscope_nofission(): sdfg = dace.SDFG('branch_subscope_nofission') sdfg.add_symbol('i', dace.int32) sdfg.add_array('A', [2], dace.int32) init_state = sdfg.add_state('init') guard_1 = sdfg.add_state('guard_1') guard_2 = sdfg.add_state('guard_2') right1_state = sdfg.add_state(...
def test_bound_constrained0(): def fg(x): n = len(x) c = np.arange(n) f = (x.dot(x) + c.dot(x)) g = ((2 * x) + c) return (f, g) n = 5 x0 = np.ones(n) c = np.arange(n) n2 = (n // 2) bnds = ([(0, 1)] * n2) bnds += ([((- inf), 1)] * (n - n2)) lb = (([...
class LukeForMultipleChoice(metaclass=DummyObject): _backends = ['torch'] def __init__(self, *args, **kwargs): requires_backends(self, ['torch'])
class One(nn.Module): def __init__(self): super(One, self).__init__() def forward(self, x): return 1.0
def parse_args(args=None): parser = argparse.ArgumentParser(description='Training and Testing Knowledge Graph Embedding Models', usage='main.py [<args>] [-h | --help]') parser.add_argument('--data', type=str, help='cn15k or nl27k') parser.add_argument('--task', type=str, help='mse or ndcg') return parse...
def remove_builtin_slots(dataset): filtered_dataset = deepcopy(dataset) for intent_data in itervalues(filtered_dataset[INTENTS]): for utterance in intent_data[UTTERANCES]: utterance[DATA] = [chunk for chunk in utterance[DATA] if ((ENTITY not in chunk) or (not is_builtin_entity(chunk[ENTITY])...
class TFIDF(): def __init__(self, map: t.Dict[(int, t.List[int])]): self.__map = map self.__o = Counter((feature for feature_list in self.__map.values() for feature in feature_list)) self.__maxi = max(self.__o.values()) self.__total_documents = len(self.__map) self.__idfo = {...
def suite(): suite = unittest.TestSuite() suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(AWSCreateFunction)) suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(AWSInvokeFunctionSDK)) suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(AWSInvokeFunctionHTTP)) re...
def get_job_types(trace_file): job_types = [] with open(trace_file, 'r') as f: for line in f: job_type = line.split('\t')[0] job_types.append(job_type) return job_types
class BiasDisparityBS(BaseMetric): def __init__(self, recommendations, config, params, eval_objects, additional_data): super().__init__(recommendations, config, params, eval_objects, additional_data) self._train = self._evaluation_objects.data.train_dict self._item_clustering_path = self._ad...
def two_c4_2_bridge(): G = nx.MultiGraph() G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (4, 5), (5, 6), (6, 7), (7, 4), (2, 4), (2, 4)]) return G
def register_Ns3NodeContainer_methods(root_module, cls): cls.add_constructor([param('ns3::NodeContainer const &', 'arg0')]) cls.add_constructor([]) cls.add_constructor([param('ns3::Ptr< ns3::Node >', 'node')]) cls.add_constructor([param('std::string', 'nodeName')]) cls.add_constructor([param('ns3::N...
def yield_misclassified_indices(images, labels, predictions, true_label_to_consider=None, predicted_label_to_consider=None): misclassified_indicators = (predictions.cpu() != labels.cpu()) if (true_label_to_consider is not None): misclassified_indicators = (misclassified_indicators & (labels.cpu() == tru...
def load_checkpoint(args, trainer, **passthrough_args): if (args.distributed_rank == 0): os.makedirs(args.save_dir, exist_ok=True) if (args.restore_file == 'checkpoint_last.pt'): checkpoint_path = os.path.join(args.save_dir, 'checkpoint_last.pt') else: checkpoint_path = args.restore_...
def cfg(key: str=None, **kwargs): if (key is not None): return config_dict.get_item(key, **kwargs) else: return config_dict
def get_logger(name, log_file=None, log_level=logging.INFO): logger = logging.getLogger(name) if (name in logger_initialized): return logger for logger_name in logger_initialized: if name.startswith(logger_name): return logger stream_handler = logging.StreamHandler() hand...
def test_dont_record_objectproxy_instance_check_2(): proxy = tt.ObjectProxy(42) with tt.shim_isinstance(): assert isinstance(proxy, (tt.ObjectProxy, bytes)) assert (len(tt.UsageTraceNode.from_proxy(proxy).type_checks) == 0)
class AlignModelTester(): def __init__(self, parent, text_kwargs=None, vision_kwargs=None, is_training=True): if (text_kwargs is None): text_kwargs = {} if (vision_kwargs is None): vision_kwargs = {} self.parent = parent self.text_model_tester = AlignTextModel...
def test_lit_image_classifier(): LitImageClassifier((64, 64, 3), 2, 16, 16, EncoderConfig(), DecoderConfig(), optimizer_init={})
class TestModel(unittest.TestCase): def test_case_ernie_csc(self): error_sentences = ['', '', '', '', ''] correct_sentences = ['', '', '', '', ''] corrector = ErnieCscCorrector() for (sent, correct) in zip(error_sentences, correct_sentences): result = corrector.correct(se...
(TEST_WITH_TSAN, 'Fails with TSAN with the following error: starting new threads after multi-threaded fork is not supported. Dying (set die_after_fork=0 to override)') class TestNamedTupleDataLoader(TestCase): def setUp(self): super(TestNamedTupleDataLoader, self).setUp() self.dataset = NamedTupleDa...
def test_panhead(): in_channels = [128] out_channels = 128 text_repr_type = 'poly' downsample_ratio = 0.25 loss = dict(type='PANLoss') with pytest.raises(AssertionError): panheader = pan_head.PANHead(128, out_channels, downsample_ratio, loss) with pytest.raises(AssertionError): ...
def feature_evaluation(cl_data_file, model, n_way=5, n_support=5, n_query=15, adaptation=False): class_list = cl_data_file.keys() select_class = random.sample(class_list, n_way) z_all = [] for cl in select_class: img_feat = cl_data_file[cl] perm_ids = np.random.permutation(len(img_feat))...
def add_arguments(parser): group = parser.add_argument_group('quant_trainer arguments') group.add_argument('--wprec', type=int, default=8, help='weight precision') group.add_argument('--aprec', type=int, default=8, help='activation precision') group.add_argument('--quant-per-tensor', action='store_true'...
class OutputField(Field): def __init__(self, *, prefix=None, desc=None, format=None): super().__init__(prefix=prefix, desc=desc, input=False, format=format)
def torch_tensor_repeat(self, *sizes): shape = list(self.shape) for (i, x) in enumerate(sizes): shape[i] *= x return torch.empty(shape, device='meta')
_end_docstrings(PIPELINE_INIT_ARGS) class Text2TextGenerationPipeline(Pipeline): return_name = 'generated' def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.check_model_type((TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING if (self.framework == 'tf') else MODEL_FOR_SEQ_TO_SEQ...
class ManualCurriculumFixed(Curriculum): def __init__(self, upper_bound, allow_empty_context=False): self.upper_bound = upper_bound self.allow_empty_context = allow_empty_context def get(self, length): upper_bound = self.upper_bound[:min(length, len(self.upper_bound))] return upp...
def FreeQuadraticModule(base_ring, rank, inner_product_matrix, sparse=False, inner_product_ring=None): global _cache rank = int(rank) if (inner_product_ring is not None): raise NotImplementedError('an inner_product_ring cannot currently be defined') MS = sage.matrix.matrix_space.MatrixSpace(base...
def measure_inference_speed(cfg, checkpoint, max_iter, log_interval, is_fuse_conv_bn): if cfg.get('cudnn_benchmark', False): torch.backends.cudnn.benchmark = True cfg.model.pretrained = None cfg.data.test.test_mode = True samples_per_gpu = cfg.data.test.pop('samples_per_gpu', 1) if (samples_...
def is_enfuzzq(log): global ENFUZZQ_FUZZERS fuzzers = get_fuzzers_from_log(log) return (set(fuzzers) == set(ENFUZZQ_FUZZERS))
class CustomTrainer(Trainer): def compute_loss(self, model, inputs): (center_contrast_embeddings, toplogy_contrast_embeddings) = model(**inputs) loss = infonce(center_contrast_embeddings, toplogy_contrast_embeddings) return loss
class StoppingCriteriaList(list): _start_docstrings(LOGITS_PROCESSOR_INPUTS_DOCSTRING) def __call__(self, input_ids: torch.LongTensor, scores: torch.FloatTensor, **kwargs) -> bool: return any((criteria(input_ids, scores) for criteria in self))
def get_configs(dataset, al_type): keys = dict() if (dataset == constants.GLAS): keys['knn'] = [40] if (al_type == constants.AL_WSL): keys['freeze_classifier'] = [False] else: keys['freeze_classifier'] = [True] keys['segloss_l'] = [constants.BinCrossEntrop...
def _PBD_4_5_8_9_12_closure(B): BB = [] for X in B: if (len(X) not in [4, 5, 8, 9, 12]): PBD = PBD_4_5_8_9_12(len(X), check=False) X = [[X[i] for i in XX] for XX in PBD] BB.extend(X) else: BB.append(X) return BB
_driver.jit(device=True) def wrap(x, m, M): diff = (M - m) while (x > M): x = (x - diff) while (x < m): x = (x + diff) return x
def get_topk_ids_aggregated_from_seq_prediction(logits, topk_per_token, topk_from_batch): (topk_logit_per_token, topk_eids_per_token) = logits.topk(topk_per_token, sorted=False, dim=(- 1)) i = torch.cat([topk_eids_per_token.view(1, (- 1)), torch.zeros(topk_eids_per_token.view((- 1)).size(), dtype=torch.long, de...
def clean_test_dir(strict=False): if os.path.isdir(_the_test_dir): try: shutil.rmtree(_the_test_dir) except Exception: if strict: raise
def check_performance(check_str, output_folder, recipe_id, pattern='performance_check=\\[(.*?)\\]'): check = True performance_to_check = re.search(pattern, check_str) if (performance_to_check is None): return check performance_to_check = performance_to_check.group(1).split(',') filename = pe...
def process_text_node(node, sentences, sync_waiting, has_speech, concept, concept_open, task, n, time_end): if ((task == 'slu') and (concept != 'null') and (not concept_open)): sentences[n][0] += (('<' + concept) + '> ') sentences[n][1] += (('<' + concept) + '> _ ') concept_open = True s...
def generate(model_name: str, model: nn.Module, data: Union[(List[torch.Tensor], Dict[(str, torch.Tensor)])], workspace_root: str, input_names: list=None, use_onnx=True): os.makedirs(workspace_root, exist_ok=True) if (input_names is None): if isinstance(model, nn.Module): input_names = get_o...
def main(): parser = argparse.ArgumentParser() parser.add_argument('--input_path', type=str, default='/home/nchaso/EASE/downstreams/text-clustering/data/mewsc16/previous2/ja_sentences.txt') parser.add_argument('--output_path', type=str, default='/home/nchaso/EASE/downstreams/text-clustering/data/mewsc16/ja_...
def _glu_old_input(draw): dims = draw(st.lists(st.integers(min_value=1, max_value=5), min_size=1, max_size=3)) axis = draw(st.integers(min_value=0, max_value=len(dims))) axis_dim = (2 * draw(st.integers(min_value=1, max_value=2))) dims.insert(axis, axis_dim) X = draw(hu.arrays(dims, np.float32, None...
def pad_to_max(pair_targets, pad): max_pair_target_len = max([len(pair_tgt) for pair_tgt in pair_targets]) for pair_tgt in pair_targets: this_len = len(pair_tgt) for i in range(this_len, max_pair_target_len): pair_tgt.append(pad) return pair_targets
def effects_histogram_slider(data_dir='heterogeneous_example_data', show_estimated_effects=False): if (data_dir[(- 1)] != '/'): data_dir += '/' data_filename = (data_dir + 'heterogeneous_example_data.json') data = json.load(open(data_filename)) (x_min, x_max) = (np.inf, (- np.inf)) for n in ...
def smv(L, abstain=(- 1), uncovered=0): y_hat = [] k = np.unique(L[(L != abstain)]).astype(int) k = list(range(min(k), (max(k) + 1))) for row in L: row = list(row[(row != abstain)]) N = len(row) if (not N): y_hat.append([1.0, 0]) else: p = [] ...
class Hashes(object): def __init__(self, hashes=None): self._allowed = ({} if (hashes is None) else hashes) def check_against_chunks(self, chunks): gots = {} for hash_name in iterkeys(self._allowed): try: gots[hash_name] = hashlib.new(hash_name) ex...
class EELAN(nn.Module): def __init__(self, c1, c2, c3): super(EELAN, self).__init__() self.conv1 = Conv(c1, c2, 1, 1) self.conv2 = Conv(c1, c2, 1, 1) self.conv3 = nn.Sequential(Conv(c2, c2, 3, 1), Conv(c2, c2, 3, 1)) self.conv4 = nn.Sequential(Conv(c2, c2, 3, 1), Conv(c2, c2,...