code
stringlengths
101
5.91M
_toolkit() class Dropbox(FunctionToolkit): name_for_human = 'Dropbox' description_for_human = 'Toolkit for managing files and folders in Dropbox.' name_for_model = 'Dropbox' description_for_model = 'A comprehensive toolkit for managing files and folders in Dropbox, including uploading, downloading, shar...
def general_stats_data(path): df = pd.read_json(path) query_type_label = {'LOCATION': 0, 'DESCRIPTION': 0, 'NUMERIC': 0, 'ENTITY': 0, 'PERSON': 0} wfa = 0 total_judgements = 0 total_wfa_judgments = 0 multiple_answers = 0 multiple_wfa = 0 total_size = len(df) for row in df.iterrows():...
def get_parser(): parser = argparse.ArgumentParser('plot for UAPs') parser.add_argument('--data_path', type=str, default='./UAP_ablation_study_data.csv', help='') parser.add_argument('--plot_type', choices=['UAP_length'], default='UAP_length', help='') args = parser.parse_args() return args
class TestDataNoising(unittest.TestCase): def _get_test_data(self, append_eos=True): vocab = Dictionary() vocab.add_symbol('') vocab.add_symbol('llo') vocab.add_symbol('how') vocab.add_symbol('are') vocab.add_symbol('') vocab.add_symbol('ou') vocab.add...
class CLIPSegVisionConfig(PretrainedConfig): model_type = 'clipseg_vision_model' def __init__(self, hidden_size=768, intermediate_size=3072, num_hidden_layers=12, num_attention_heads=12, num_channels=3, image_size=224, patch_size=32, hidden_act='quick_gelu', layer_norm_eps=1e-05, attention_dropout=0.0, initiali...
def wrap_nncf_model(model: nn.Module, config: dict, dataloader: DataLoader, init_state_dict: dict) -> tuple[(CompressionAlgorithmController, NNCFNetwork)]: nncf_config = NNCFConfig.from_dict(config) if ((not dataloader) and (not init_state_dict)): logger.warning('Either dataloader or NNCF pre-trained mo...
def perm_sym_domain(g): if isinstance(g, (tuple, list)): if isinstance(g[0], tuple): return set().union(*g) else: return set(g) elif isinstance(g, str): assert (g.startswith('(') and g.endswith(')')) domain = set().union(*[a for cyc in g[1:(- 1)].split(')(...
def generate_EO_with_etype(sentence, entity_codes, entities_in_utterance, entity_types, empty_token): s = sentence.split() EO = ['O' for _ in s] ent_type_labels = [empty_token for _ in s] dict_code2indices_list = {} for (e_code, e, cur_type) in zip(entity_codes, entities_in_utterance, entity_types):...
def _get_obj_from_xcomp(deps, is_pas): for dep in deps: if ((dep.pos_ == 'VERB') and (dep.dep_ == 'xcomp')): v = dep rights = list(v.rights) objs = [tok for tok in rights if (tok.dep_ in OBJECTS)] objs.extend(_get_objs_from_prepositions(rights, is_pas)) ...
def load_combined_test_data(output_path: str): id_p_te = load_data_tensors_TW(join(output_path, 'vectors', 'test', 'identifiers_param_test_datapoints_x.npy')) id_r_te = load_data_tensors_TW(join(output_path, 'vectors', 'test', 'identifiers_ret_test_datapoints_x.npy')) id_v_te = load_data_tensors_TW(join(out...
def register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, cls): cls.add_constructor([]) cls.add_constructor([param('ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter< ns3::NixVector > > const &', 'o')]) cls.add_method('Cleanup', '...
def listener(): rospy.Subscriber('/rtabmap/localization_pose', PoseWithCovarianceStamped, callback) sleep(2)
def register_functions(root_module): module = root_module register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module) register_functions_ns3_Hash(module.get_submodule('Hash'), root_module) return
def save_vocabulary(path, vocab): with open(path, 'w') as f: for w in vocab: print(w, file=f)
class GobangNNet(): def __init__(self, game, args): (self.board_x, self.board_y) = game.getBoardSize() self.action_size = game.getActionSize() self.args = args self.input_boards = Input(shape=(self.board_x, self.board_y)) x_image = Reshape((self.board_x, self.board_y, 1))(sel...
def make_linear_lut(black, white): lut = [] if (black == 0): for i in range(256): lut.append(((white * i) // 255)) else: raise NotImplementedError return lut
def bin_predictions(y_hat: Prediction, y: Tensor, n_bins: int=10) -> Tuple[(Tensor, Tensor, Tensor)]: (y_hat, y_hat_label) = (y_hat.soft, y_hat.hard) y_hat = y_hat.max((- 1))[0] corrects = (y_hat_label == y.squeeze()) acc_binned = torch.zeros((n_bins,), device=y_hat.device) conf_binned = torch.zeros...
def make_plots(statistics_file): print('\n Make Plots') with open(statistics_file, 'r') as f: stats = json.load(f) output_folder = os.path.split(statistics_file)[0] FILETYPE = 'eps' numCols = len(stats) numClasses = 2 (fig, axs) = plt.subplots(1, 2, sharex=True, figsize=(6.4, 3.0)) ...
('orion.data.pd.read_csv') ('orion.data.os.path.exists') def test_download_new(exists_mock, read_csv_mock): exists_mock.return_value = False returned = download('a_signal_name') assert (returned == read_csv_mock.return_value) expected_url = ' read_csv_mock.assert_called_once_with(expected_url) e...
def write_csv(filename, results): with io.open(filename, 'w', encoding='utf8') as f: writer = csv.writer(f, delimiter=',') writer.writerows(results)
def _single_cell(unit_type, num_units, forget_bias, dropout, mode, residual_connection=False, device_str=None, residual_fn=None): dropout = (dropout if (mode == tf.contrib.learn.ModeKeys.TRAIN) else 0.0) if (unit_type == 'lstm'): utils.print_out((' LSTM, forget_bias=%g' % forget_bias), new_line=False) ...
def discriminator(inputs, depth=64, is_training=True, reuse=None, scope='Discriminator', fused_batch_norm=False): normalizer_fn = slim.batch_norm normalizer_fn_args = {'is_training': is_training, 'zero_debias_moving_mean': True, 'fused': fused_batch_norm} _validate_image_inputs(inputs) inp_shape = input...
class Grad(object): def __init__(self): pass def _force_list(self, x): if isinstance(x, list): return x elif hasattr(x, '__iter__'): return [o for o in x] else: return [x] def _connect_on_gradient_graph(self, grad_vars, f): vf_vb_ma...
def ref_hard_sigmoid_backward(x, dy, **kw): return np.array([((dy * 0.2) if (2.5 >= i >= (- 2.5)) else 0) for i in np.nditer(x)])
def cost(output, target): cross_entropy = (target * tf.log(output)) cross_entropy = (- tf.reduce_sum(cross_entropy, reduction_indices=2)) mask = tf.sign(tf.reduce_max(tf.abs(target), reduction_indices=2)) cross_entropy *= mask cross_entropy = tf.reduce_sum(cross_entropy, reduction_indices=1) cro...
class ArcFace(ArcMargin): def __init__(self, in_feats, out_feats, s=64.0, m=0.5) -> None: super().__init__(in_feats, out_feats, s=s, m2=m)
def extract_vgg(name): gen = models.vgg19(pretrained=True).features vgg = None configs = ('11', '12', '21', '22', '31', '32', '33', '34', '41', '42', '43', '44', '51', '52', '53', '54') sub_mean = MeanShift() def sub_vgg(config): sub_modules = [sub_mean] pool_idx = 0 conv_idx...
class Select(object): def __init__(self, multiple=False): self.multiple = multiple def __call__(self, field, **kwargs): kwargs.setdefault('id', field.id) if self.multiple: kwargs['multiple'] = True if (('required' not in kwargs) and ('required' in getattr(field, 'flag...
class RandomWeakHopper(ModifiableRoboschoolHopper): def randomize_power(self): self.power = uniform_exclude_inner(self.np_random.uniform, self.EXTREME_LOWER_POWER, self.EXTREME_UPPER_POWER, self.RANDOM_LOWER_POWER, self.RANDOM_UPPER_POWER) def _reset(self, new=True): if new: self.ran...
class Box(): def __init__(self, name, pose, size, space): self.name = name self.pose = np.array([float(j) for j in [i for i in pose.split(' ') if (i != '')]]) self.size = np.array([float(j) for j in [i for i in size.split(' ') if (i != '')]]) self.space = int(space) return ...
def rgb_to_hsv(rgb): rgb_0_1 = [(d / 255.0) for d in rgb[:3]] hsv_0_1 = colorsys.rgb_to_hsv(*rgb_0_1) return tuple(((d * r) for (d, r) in zip(hsv_0_1, [360.0, 100.0, 100.0])))
_module() class Atom2Points(object): def __call__(self, item): id = eval(item['id']) transformed = {'pos': item['atoms'][['x', 'y', 'z']].to_numpy().astype(np.float32), 'features': np.array([one_of_k_encoding_unk(e, prot_atoms) for e in item['atoms']['element']]).astype(np.float32).transpose(1, 0), ...
class OptplanContext(): def __init__(self) -> None: self._optplan_node_map = {} def get_node_model(self, node_meta_type: str, node_type: str) -> Optional[models.Model]: if (node_meta_type not in self._optplan_node_map): return None if (node_type not in self._optplan_node_map[...
def unpack_file(filename, location, content_type=None): filename = os.path.realpath(filename) if ((content_type == 'application/zip') or filename.lower().endswith(ZIP_EXTENSIONS) or zipfile.is_zipfile(filename)): unzip_file(filename, location, flatten=(not filename.endswith('.whl'))) elif ((content_...
def residual(input, num_filters, name, is_train, reuse, norm, pad='REFLECT', bias=False): with tf.variable_scope(name, reuse=reuse): with tf.variable_scope('res1', reuse=reuse): out = conv2d(input, num_filters, 3, 1, reuse, pad, bias) out = _norm(out, is_train, norm) out ...
class GodVehicle(BaseVehicle): def control(self, target_course, target_vel): state = deepcopy(self._state) dt = self.config.simulation.dt meters_traveled = (dt * state.velocity) (course_xs, course_ys, course_yaw, _, _) = target_course course_res = self.config.vehicle.course_r...
def print_header_eps(s, xmin, ymin, xmax, ymax): s += '%!PS-Adobe-3.0 EPSF-3.0\n' s += (('%' + '%') + ('BoundingBox: %s %s %s %s \n' % (xmin, ymin, xmax, ymax))) return s
def dfs(node: Node) -> Iterator[Node]: stack = [node] while (len(stack) > 0): node = stack.pop() (yield node) for child in reversed(node.children): stack.append(child)
class SphericalHarmonic(BuiltinFunction): def __init__(self): BuiltinFunction.__init__(self, 'spherical_harmonic', nargs=4, conversions=dict(maple='SphericalY', mathematica='SphericalHarmonicY', maxima='spherical_harmonic', sympy='Ynm')) def _eval_(self, n, m, theta, phi, **kwargs): if ((n in ZZ...
def group_by_key(data, key): obj = defaultdict(list) for d in data: value = d[key] obj[value].append(d) return obj
def create_dataset(dataset, config, min_scale=0.5): normalize = transforms.Normalize((0., 0.4578275, 0.), (0., 0., 0.)) transform_train = transforms.Compose([transforms.RandomResizedCrop(config['image_size'], scale=(min_scale, 1.0), interpolation=InterpolationMode.BICUBIC), transforms.RandomHorizontalFlip(), Ra...
def _nested_remote_schema(testdir): testdir.makefile('.json', bar='{"bar": {"$ref": "spam.json#/spam"}}') testdir.makefile('.json', spam='{"spam": {"type": "integer"}}')
class GatherOperation(Function): def forward(ctx, features, idx): ctx.save_for_backward(idx, features) return _ext.gather_points(features, idx) def backward(ctx, grad_out): (idx, features) = ctx.saved_tensors N = features.size(2) grad_features = _ext.gather_points_grad(gr...
def test_fix_singleton_tags(): TESTS = [(['O'], ['O']), (['B-PER'], ['S-PER']), (['B-PER', 'I-PER'], ['B-PER', 'E-PER']), (['B-PER', 'O', 'B-PER'], ['S-PER', 'O', 'S-PER']), (['B-PER', 'B-PER', 'I-PER'], ['S-PER', 'B-PER', 'E-PER']), (['B-PER', 'I-PER', 'O', 'B-PER'], ['B-PER', 'E-PER', 'O', 'S-PER']), (['B-PER', '...
class DobotSuctionCup(SuctionCup): def __init__(self, count: int=0): super().__init__(count, 'Dobot_suctionCup')
class ArchiveReuseTestCaseChromosomeFactory(cf.ChromosomeFactory[tcc.TestCaseChromosome]): def __init__(self, delegate: cf.ChromosomeFactory[tcc.TestCaseChromosome], archive: arch.Archive): self._delegate = delegate self._archive = archive def get_chromosome(self) -> tcc.TestCaseChromosome: ...
def build_indexers(): indexers = {} indexers['words'] = SingleIdTokenIndexer() return indexers
def numerical_approx(x, prec=None, digits=None, algorithm=None): if (prec is None): from sage.arith.numerical_approx import digits_to_bits prec = digits_to_bits(digits) try: n = x.numerical_approx except AttributeError: from sage.arith.numerical_approx import numerical_approx...
def BullGraph(): edge_list = [(0, 1), (0, 2), (1, 2), (1, 3), (2, 4)] pos_dict = {0: (0, 0), 1: ((- 1), 1), 2: (1, 1), 3: ((- 2), 2), 4: (2, 2)} return Graph(edge_list, pos=pos_dict, name='Bull graph')
def residual_success(attack_rate: SuccessRate, control_rate: SuccessRate) -> SuccessRate: residual = ((attack_rate.value - control_rate.value) / (1.0 - control_rate.value)) der_wrt_attack = (1 / abs((1 - control_rate.value))) der_wrt_control = ((attack_rate.value - 1) / ((1 - control_rate.value) ** 2)) ...
def get_list(data_path, output_path): for split in os.listdir(data_path): if (split == 'train'): split_path = os.path.join(data_path, split) if (not os.path.isdir(split_path)): continue f_train = open(os.path.join(output_path, (split + '_datalist')), 'w') ...
class Urban100(base.SRBase): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) def get_path(self) -> str: return path.join(self.dpath, 'benchmark', 'urban100')
class KRRCNonSimplyLacedElement(KRRiggedConfigurationElement, RCNonSimplyLacedElement): def e(self, a): if (a == self.parent()._cartan_type.special_node()): try: ret = self.to_tensor_product_of_kirillov_reshetikhin_tableaux().e(a) if (ret is None): ...
class LazyDict(): def __init__(self, **kwargs): self._lazy_dict = kwargs self._dict = {} def __getitem__(self, key): if (key not in self._dict): self._dict[key] = self._lazy_dict[key]() return self._dict[key] def __setitem__(self, i, y): self.set(i, y) ...
def simSetObjectPosition(objectHandle, relativeToObjectHandle, position): ret = lib.simSetObjectPosition(objectHandle, relativeToObjectHandle, position) _check_return(ret)
class SageElement(ExpectElement): def _rich_repr_(self, display_manager, **kwds): return None def _repr_option(self, option): return False def __getattr__(self, attrname): self._check_valid() return SageFunction(self, attrname) def _sage_(self): P = self.parent() ...
def _blob_to_node(producing_ops, shapes, name): assert name n = NodeDef() n.name = name inputs = producing_ops.get(name, []) if inputs: n.op = 'Blob' else: n.op = 'Placeholder' n.input.extend((('%s:%d' % (op.name, i)) for (op, i) in inputs)) if inputs: device = in...
def mask_tokens(inputs, mask_token_id=103, special_tokens_mask=None): labels = inputs.clone() probability_matrix = torch.full(labels.shape, 0.15) if (special_tokens_mask is None): special_tokens_mask = [tokenizer.get_special_tokens_mask(val, already_has_special_tokens=True) for val in labels.tolist(...
def simGetFloatParameter(parameter): ret = lib.simGetFloatParameter(parameter) _check_return(ret) return ret
(name='ensemble_size', params=[2, 5]) def _ensemble_size_fixture(request: Any) -> int: return request.param
class Architect(object): def __init__(self, model, args): self.network_momentum = args.momentum self.network_weight_decay = args.weight_decay self.model = model self.lr_alpha = args.arch_learning_rate self.optimizer = torch.optim.Adam(self.model.arch_parameters(), lr=0, betas...
class KM_config(CPAlgorithm): def __init__(self, num_runs=10): self.num_runs = num_runs self.alpha = 0.5 def detect(self, G): (A, nodelabel) = utils.to_adjacency_matrix(G) x = None Q = (- np.inf) for _i in range(self.num_runs): (cidsi, xi) = _label_swi...
class reciprocal_gen(rv_continuous): def _argcheck(self, a, b): return ((a > 0) & (b > a)) def _shape_info(self): ia = _ShapeInfo('a', False, (0, np.inf), (False, False)) ib = _ShapeInfo('b', False, (0, np.inf), (False, False)) return [ia, ib] def _fitstart(self, data): ...
def run_treebank(mode, paths, treebank, short_name, temp_output_file, command_args, extra_args): ner_dir = paths['NER_DATA_DIR'] (language, dataset) = short_name.split('_') train_file = os.path.join(ner_dir, f'{treebank}.train.json') dev_file = os.path.join(ner_dir, f'{treebank}.dev.json') test_file...
def read_file(input_file): with open(input_file, 'rb') as fin: text = fin.read() text = text.decode('utf-8', errors='replace') with io.StringIO(text) as fin: lines = fin.readlines() return lines
() class ReturnBasedRewardScaler(RewardScaler): return_max: Optional[float] = None return_min: Optional[float] = None multiplier: float = 1.0 def fit_with_transition_picker(self, episodes: Sequence[EpisodeBase], transition_picker: TransitionPickerProtocol) -> None: assert (not self.built) ...
def main() -> None: parser = argparse.ArgumentParser() parser.add_argument('--dataset', type=str, default='hopper-medium-v0') parser.add_argument('--seed', type=int, default=1) parser.add_argument('--gpu', type=int) args = parser.parse_args() (dataset, env) = d3rlpy.datasets.get_dataset(args.dat...
class Extractor(): keepLinks = False keepSections = True HtmlFormatting = False toJson = False templatePrefix = '' def __init__(self, id, revid, urlbase, title, page): self.id = id self.revid = revid self.url = get_url(urlbase, id) self.title = title self....
def compute_confidence_interval(data): a = (1.0 * np.array(data)) m = np.mean(a) std = np.std(a) pm = (1.96 * (std / np.sqrt(len(a)))) return (m, pm)
def rand_contrast(x, param): ratio = param.contrast x_mean = x.mean(dim=[1, 2, 3], keepdim=True) set_seed_DiffAug(param) randc = torch.rand(x.size(0), 1, 1, 1, dtype=x.dtype, device=x.device) if param.Siamese: randc[:] = randc[0] x = (((x - x_mean) * (randc + ratio)) + x_mean) return...
def accimage_loader(path: str) -> Any: import accimage try: return accimage.Image(path) except OSError: return pil_loader(path)
def generate_data(func, N, range_min=DOMAIN[0], range_max=DOMAIN[1]): x_dim = len(signature(func).parameters) x = (((range_max - range_min) * np.random.random([N, x_dim])) + range_min) y = np.random.normal([[func(*x_i)] for x_i in x], NOISE_SD) return (x, y)
def adjust_learning_rate(optimizer, epoch, args): lr = (args.lr * (0.1 ** (epoch // 30))) for param_group in optimizer.param_groups: param_group['lr'] = lr
def string_pos(args): params = functionParams(args, ('target', 'pos')) target = params.get('target', '') pos = int((params.get('pos', 1) or 1)) if (pos > 0): pos -= 1 return target[pos]
def capture_tagged(state): next_pos = state.find(')') if (next_pos < 1): raise IntentFormatError(('Missing ending \')\' in annotated utterance "%s"' % state.input)) else: tagged_text = state[:next_pos] state.add_tagged(tagged_text) state.move(next_pos) capture_text(st...
def _check_comments(comments): for comment in comments: for char in comment: if (not (0 <= ord(char) < 128)): raise ValueError('non-ASCII character in comment') if (char == '\n'): raise ValueError('embedded newline in comment')
_mock.Mocker(kw='mock') def test_parse_results_ws(**kwargs): mock_file = open('tests/transfer/mocks/mock_parse_results_ws', 'rb') mock_body = mock_file.read() mock_file.close() mock_query = 'red basketball shoes' query_str = mock_query.replace(' ', '+') url = f'{WEBSHOP_URL}/search_results/{WEBS...
class PolynomialQuotientRing_coercion(DefaultConvertMap_unique): def is_injective(self): if ((self.domain().modulus().change_ring(self.codomain().base_ring()) == self.codomain().modulus()) and self.domain().modulus().leading_coefficient().is_unit()): if self.codomain().base_ring().coerce_map_fro...
def main(unused_argv): tf.logging.set_verbosity(tf.logging.INFO) config = model_deploy.DeploymentConfig(num_clones=FLAGS.num_clones, clone_on_cpu=FLAGS.clone_on_cpu, replica_id=FLAGS.task, num_replicas=FLAGS.num_replicas, num_ps_tasks=FLAGS.num_ps_tasks) assert ((FLAGS.train_batch_size % config.num_clones) ...
def big_endian_number(number): if (number < 256): return chr(number) return (big_endian_number((number >> 8)) + chr((number & 255)))
def stencil_offset(A: dace.float64[(2 * N)], B: dace.float64[N]): tmp1 = np.ndarray(shape=[N], dtype=dace.float64) def m1(i: _[0:N]): (in1 << A[(2 * i)]) (in2 << A[((2 * i) + 1)]) (out1 >> tmp1[i]) out1 = ((in1 + in2) / float(2.0)) def m2(i: _[0:(N - 2)]): (in1 << tmp...
def _import_file(args, ifiles): if (args.import_format == 'NNP'): return NnpImporter(*ifiles, expand_network=(not args.nnp_no_expand_network), executor_index=args.nnp_import_executor_index).execute() elif (args.import_format == 'ONNX'): from .onnx import OnnxImporter return OnnxImporter(...
_to_bytes_io def load_audio(fhandle: BinaryIO, sr=44100) -> Tuple[(np.ndarray, float)]: (audio, sr) = librosa.load(fhandle, sr=sr, mono=True) return (audio, sr)
def _make_sdfg(language: str, with_data: bool=False): lang = (dtypes.Language.Python if (language == 'Python') else dtypes.Language.CPP) endl = ('\n' if (language == 'Python') else ';\n') sdfg = dace.SDFG(f'map_with_tasklets') sdfg.add_array('A', (N,), datatype) sdfg.add_array('B', (M,), datatype) ...
(scope='module') def dataframe_two_columns_pandas(): data_two_columns = [(1, [2, 0, 0, 0, 0], [19842, (- 1), (- 1), (- 1), (- 1)]), (1, [2, 4, 0, 0, 0], [19842, 19844, (- 1), (- 1), (- 1)]), (1, [2, 4, 3, 0, 0], [19842, 19844, 19843, (- 1), (- 1)]), (1, [2, 4, 3, 5, 0], [19842, 19844, 19843, 19845, (- 1)]), (1, [2,...
class Node(): def __init__(self, operation, sentence, index, score, height): self.operation = operation self.sentence = sentence self.index = index self.score = score self.height = height self.left = None self.right = None
class EfficientFormerPreTrainedModel(metaclass=DummyObject): _backends = ['torch'] def __init__(self, *args, **kwargs): requires_backends(self, ['torch'])
def _create_ca(ca_path: Path, ca_url: str, password: str): import os pki_dir = (ca_path / CA_PKI_DIR) step_config_dir = (ca_path / CA_STEP_CONFIG_DIR) pki_dir.mkdir(parents=True, exist_ok=True) step_config_dir.mkdir(parents=True, exist_ok=True) with open(f'{pki_dir}/pass_file', 'w', encoding='ut...
def used_namespaces(): return {namespace.__name__ for namespace in chain(override_dict.keys(), TracedFunctions.traced_namespaces()) if (hasattr(namespace, '__name__') and inspect.ismodule(namespace))}
class TaggingExample(task.Example): def __init__(self, eid, task_name, words, tags, is_token_level, label_mapping): super(TaggingExample, self).__init__(task_name) self.eid = eid self.words = words if is_token_level: labels = tags else: span_labels = t...
class WarmupLR(torch.optim.lr_scheduler._LRScheduler): def __init__(self, optimizer: torch.optim.Optimizer, warmup_factor: float=0.1, warmup_iters: int=1000, warmup_method: str='linear', last_epoch: int=(- 1)): self.warmup_factor = warmup_factor self.warmup_iters = warmup_iters self.warmup_m...
def convert_for_model(in_dir_path, out_dir_path, data_type, end_id, pad_id): data_path = os.path.join(in_dir_path, (data_type + '_data_file.pkl')) out_filename = os.path.join(out_dir_path, (data_type + '.pkl')) data = pkl.load(open(data_path, 'r')) pickle_sentences = [] for line in data: tex...
def train_epoch(models, criterion, optimizers, dataloaders, epoch, epoch_loss): models['backbone'].train() if (AUXILIARY == 'TOD'): models['ema'].train() global iters for data in dataloaders['train']: inputs = data[0].cuda() labels = data[1].cuda() iters += 1 opti...
class Identity(nn.Module): def __init__(self, params=None): super(Identity, self).__init__() self.params = nn.ParameterList(params) def forward(self, x: (Variable or list)): return x
_params(name='cpu') class CPUCodeGen(TargetCodeGenerator): title = 'CPU' target_name = 'cpu' language = 'cpp' def __init__(self, frame_codegen, sdfg): self._frame = frame_codegen self._dispatcher: TargetDispatcher = frame_codegen.dispatcher self.calling_codegen = self dis...
_utils.test(require=ti.extension.mesh) def test_mesh_local(): mesh_builder = ti.lang.mesh._TetMesh() mesh_builder.verts.place({'a': ti.i32}) model = mesh_builder.build(ti.Mesh.load_meta(model_file_path)) ext_a = ti.field(ti.i32, shape=len(model.verts)) def foo(cache: ti.template()): if ti.st...
def bn_dense_layer(input_tensor, hn, bias, bias_start=0.0, scope=None, activation='relu', enable_bn=False, wd=0.0, keep_prob=1.0, is_train=None, dup_num=1, merge_var=False): assert ((len(input_tensor.get_shape().as_list()) == 3) or (len(input_tensor.get_shape().as_list()) == 2)) is_seq_data = (len(input_tensor....
def test_fpn_ocr(): in_s1 = torch.rand(1, 128, 32, 256) in_s2 = torch.rand(1, 256, 16, 128) in_s3 = torch.rand(1, 512, 8, 64) in_s4 = torch.rand(1, 512, 4, 32) fpn_ocr = FPNOCR(in_channels=[128, 256, 512, 512], out_channels=256) fpn_ocr.init_weights() fpn_ocr.train() out_neck = fpn_ocr((...
class HybridLoss(_Loss): def __init__(self, segloss_l=constants.BCEAndSoftDiceLoss, segloss_pl=constants.BCEAndSoftDiceLoss, smooth=1.0, elbon=True, init_t=1.0, max_t=10.0, mulcoef=1.01, subtask=constants.SUBCLSEG, scale_cl=1.0, scale_seg=1.0, scale_seg_u=1.0, scale_seg_u_end=0.001, scale_seg_u_sigma=100.0, scale_s...
class TestCameras(TestCore): def setUp(self): super().setUp() self.camera = Camera('DefaultCamera') self.dummy = Dummy('dummy') def test_create(self): with self.assertRaises(NotImplementedError): _ = Camera.create() def test_get_set_position(self): positio...