code
stringlengths
101
5.91M
def construct_xfg_single_raw_folder(params, num_folder): (folder_counter, folder_raw) = params print('\n------ Processing raw folder', folder_raw, '(', (folder_counter + 1), '/', num_folder, ')') folder_preprocessed = (folder_raw + '_preprocessed') data_preprocessing_done_filename = os.path.join(folder_...
class Env(): def __init__(self, api): self.api = api def __enter__(self): self.ptr = ctypes.c_void_p() self.api.CreateEnv('ORT_LOGGING_LEVEL_WARNING', 'ort_api', ctypes.byref(self.ptr)) return self def __exit__(self, exc_type, exc_val, exc_tb): self.api.ReleaseEnv(sel...
def metrics(sv, rules, model, enc_dim, dim, seed, ckpt='_last'): name = get_name(sv, rules, model, enc_dim, dim, seed) print(name) if ('Monolithic' in name): return if (not os.path.exists(name)): print(f'Model not found') return else: if (not os.path.exists(f'{name}/l...
class GhostNet(nn.Module): def __init__(self, width_mult=1.0, out_stages=(4, 6, 9), act='ReLU', pretrain=True): super(GhostNet, self).__init__() self.width_mult = width_mult self.out_stages = out_stages self.cfgs = [[[3, 16, 16, 0, 1]], [[3, 48, 24, 0, 2]], [[3, 72, 24, 0, 1]], [[5, ...
class Vietnamese(MLQALanguage): def __init__(self): super().__init__(re.compile('\\b(cua|la|cai|chiec|nhung)\\b')) def tokenize(self, text: str): return whitespace_tokenize(text)
def validate(val_loader, dataset, net, criterion, optim, scheduler, curr_epoch, writer, curr_iter, save_pth=True): net.eval() val_loss = AverageMeter() iou_acc = 0 error_acc = 0 dump_images = [] for (val_idx, data) in enumerate(val_loader): (inputs, gt_image, img_names, _) = data ...
class Encoder3d(nn.Module): def __init__(self, backbone, tw, pixel_mean, pixel_std): super(Encoder3d, self).__init__() self.conv1_p = nn.Conv3d(1, 64, kernel_size=7, stride=(1, 2, 2), padding=(3, 3, 3), bias=False) resnet = get_backbone_fn(backbone.NAME)(sample_size=112, sample_duration=tw) ...
def head_match(anaphor, antecedent): if (anaphor.attributes['type'] in ['PRO', 'DEM', 'VRB']): return False elif (antecedent.attributes['type'] in ['PRO', 'DEM', 'VRB']): return False elif ((anaphor.attributes['semantic_class'] == 'NUMERIC') or (antecedent.attributes['semantic_class'] == 'NU...
def compare_ignoring_whitespace(predicted, expected): predicted = re.sub('[ \t]+', ' ', predicted.strip()) predicted = re.sub('\r\n', '\n', predicted) expected = re.sub('[ \t]+', ' ', expected.strip()) expected = re.sub('\r\n', '\n', expected) assert (predicted == expected)
def get_activations_quantizer_for_node(node: BaseNode) -> BasePyTorchInferableQuantizer: if (node.final_activation_quantization_cfg is None): Logger.critical(f'Can not set quantizer for a node with no final activation quantization configuration') node_act_qc = node.final_activation_quantization_cfg ...
class KittiDataset(torch.utils.data.Dataset): CLASSES = ('__background__ ', 'car') def __init__(self, data_dir, split, use_difficult=False, transforms=None): self.root = data_dir self.image_set = split self.keep_difficult = use_difficult self.transforms = transforms self....
def _setup_and_check() -> (tuple[(TestCaseExecutor, ModuleTestCluster, ConstantProvider)] | None): if (not _setup_path()): return None (wrapped_constant_provider, dynamic_constant_provider) = _setup_constant_seeding() tracer = _setup_import_hook(set(config.configuration.statistics_output.coverage_me...
def test_jottings(): fname = os.path.join(test_data_path, 'parabola.mat') ws_vars = read_workspace_vars(fname)
class GraphPaths_t(Parent, GraphPaths_common): def __init__(self, g, target): self.graph = g self.target = target Parent.__init__(self, category=FiniteEnumeratedSets()) def __repr__(self): return ('Paths in %s ending at %s' % (repr(self.graph), self.target)) def list(self): ...
class CTransClassTests(unittest.TestCase): def setUp(self): self.t2k = dt.datetime(2000, 1, 1) self.CTrans2000 = ctrans.CTrans(self.t2k) def tearDown(self): del self.CTrans2000 def test_initRaises(self): self.assertRaises(TypeError, ctrans.CTrans, 'Incorrect input') def t...
def get_forward_walk_ops(seed_ops, inclusive=True, within_ops=None, within_ops_fn=None, stop_at_ts=(), control_outputs=None): (_, control_outputs) = check_cios(False, control_outputs) if (not util.is_iterable(seed_ops)): seed_ops = [seed_ops] if (not seed_ops): return [] if isinstance(se...
def convert_to_relative_path(whole_path: str, base_path: str) -> str: if (base_path not in whole_path): raise RuntimeError(((base_path + ' is not in ') + whole_path)) return whole_path[(len(base_path) + 1):]
class Parameter(torch.Tensor): def __new__(cls, data=None, requires_grad=True): if (data is None): data = torch.tensor([]) return torch.Tensor._make_subclass(cls, data, requires_grad) def __deepcopy__(self, memo): if (id(self) in memo): return memo[id(self)] ...
def checkCharType(var_list): checked = [] for i in range(len(var_list)): if (var_list[i] == 32): checked.append(1) elif isHangul(var_list[i]): checked.append(0) else: checked.append((- 1)) return checked
def write_position_log(**kwargs): keys = [ALICE_START_POSITION, ALICE_END_POSITION, BOB_START_POSITION, BOB_END_POSITION] log = _format_custom_logs(keys=keys, raw_log=kwargs, _type=POSITION) write_log(log)
class CategoricalDataFrameField(BaseDataFrameField): CATEGORICAL_MAPPING_KEY = 'categorical_mapping' ORIGINAL_ATTR_KEY = 'original_key' def __init__(self, registry_key: str, attr_key: Optional[str], field_type: Literal[('obs', 'var')]=None) -> None: self.is_default = (attr_key is None) self....
def __get_error_factor(k, confidence): level = __two_to_one_sided_confidence_level(confidence) return (t.ppf(level, (k - 1)) / sqrt((k - 1)))
_end_docstrings(PIPELINE_INIT_ARGS) class TranslationPipeline(Text2TextGenerationPipeline): return_name = 'translation' def check_inputs(self, input_length: int, min_length: int, max_length: int): if (input_length > (0.9 * max_length)): logger.warning(f"Your input_length: {input_length} is b...
class VarianceScaling(ParamInit): scale = 1.0 mode = 'fan_in' distribution = 'truncated_normal' dtype: str def __init__(self, scale: float=None, mode: str=None, distribution: str=None, dtype: str=None): if (scale is not None): self.scale = scale if (mode is not None): ...
def process_packages(args): resources = json.load(open(os.path.join(args.output_dir, 'resources.json'))) for lang in resources: if (lang == 'url'): continue if ('alias' in resources[lang]): continue if all(((k in ('backward_charlm', 'forward_charlm', 'pretrain', '...
def convert_slow_tokenizer(transformer_tokenizer) -> Tokenizer: tokenizer_class_name = transformer_tokenizer.__class__.__name__ if (tokenizer_class_name not in SLOW_TO_FAST_CONVERTERS): raise ValueError(f'An instance of tokenizer class {tokenizer_class_name} cannot be converted in a Fast tokenizer insta...
_utils.test(debug=True) def test_function_keyword_args(): def foo(a, b, c=3): assert (a == 1) assert (b == 2) assert (c == 3) def bar(a, b, c=3): assert (a == 1) assert (b == 2) assert (c == 4) def baz(): foo(1, b=2) bar(b=2, a=1, c=4) baz(...
def train_one_epoch(model, optimizer, data_loader, device, epoch, print_freq): model.train() metric_logger = utils.MetricLogger(delimiter=' ') metric_logger.add_meter('lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) header = 'Epoch: [{}]'.format(epoch) lr_scheduler = None if (epoch ...
def test_files_reousrce(): resource = files_resources.FilesResource(url=' download_path=pathlib.Path('foo', 'bar.zip'), file_name='bar.test', data_dir='foo') assert (resource.file_path == pathlib.Path('foo', 'bar.test'))
class LossRecorder(): def __init__(self, writer: SummaryWriter, base=0): self.losses = {} self.writer = writer self.base = base def report_scalar(self, group_name, name, iteration=None, value=0.0): name = ((group_name + '/') + name) if isinstance(value, torch.Tensor): ...
def removemax(array, size): assert (size > 0) if (size == 1): return (array[0], (size - 1)) elif (size == 2): return (array[1], (size - 1)) else: i = (1 if (array[1] > array[2]) else 2) elem = array[i] array[i] = array[(size - 1)] trickledown(array, i, (si...
def test_typetracer(): array = ak.Array([[[1, 2, 3]], [[5, 4]]], backend='typetracer') with pytest.raises(NotImplementedError): ak.almost_equal(array, (2 * array))
def map(wrapper): model = wrapper cfg = get_cfg() (test_loader, num_query) = build_reid_test_loader(cfg, 'Market1501', T.Compose([])) feats = [] pids = [] camids = [] for batch in test_loader: for image_path in batch['img_paths']: t = torch.Tensor(np.array([model.infer(cv...
def se_resnet50_fc512(num_classes, loss='softmax', pretrained=True, **kwargs): model = SENet(num_classes=num_classes, loss=loss, block=SEResNetBottleneck, layers=[3, 4, 6, 3], groups=1, reduction=16, dropout_p=None, inplanes=64, input_3x3=False, downsample_kernel_size=1, downsample_padding=0, last_stride=1, fc_dims...
class MLP(nn.Module): def __init__(self, dim, projection_size, hidden_size=4096): super().__init__() self.net = nn.Sequential(nn.Linear(dim, hidden_size), nn.BatchNorm1d(hidden_size), nn.ReLU(inplace=True), nn.Linear(hidden_size, projection_size)) def forward(self, x): return self.net(x)
def skip_if_win32(): return sandcastle_skip_if((sys.platform == 'win32'), 'This unit test case is not supportted on Windows platform')
def _format(val: Any, output_format: str='standard', errors: str='coarse') -> Any: val = str(val) result: Any = [] if (val in NULL_VALUES): return [np.nan] if (not validate_cy_vat(val)): if (errors == 'raise'): raise ValueError(f'Unable to parse value {val}') error_re...
def _Subscript(t, symbols, inferred_symbols): value_type = _dispatch(t.value, symbols, inferred_symbols) slice_type = _dispatch(t.slice, symbols, inferred_symbols) if isinstance(slice_type, dtypes.pointer): raise SyntaxError('Invalid syntax (pointer given as slice)') if isinstance(t.slice, ast.S...
class QueryAndGroup(nn.Module): def __init__(self, radius: float, nsample: int, use_xyz: bool=True): super().__init__() (self.radius, self.nsample, self.use_xyz) = (radius, nsample, use_xyz) def forward(self, xyz: torch.Tensor, new_xyz: torch.Tensor, features: torch.Tensor=None, fps_idx: torch.I...
def __getattr__(name): return _sub_module_deprecation(sub_package='sparse', module='extract', private_modules=['_extract'], all=__all__, attribute=name)
def pairwise_distance_mse(U, V, D, reg=1): d_uv = torch.cdist(U, V) l = (((torch.norm((D - d_uv)) ** 2) / D.numel()) + (reg * (((torch.norm(U) ** 2) / U.numel()) + ((torch.norm(V) ** 2) / V.numel())))) return l
def train_FCNN_model(nr_neurons, input_size, nr_epochs, learing_rate, batch_size, train_size, val_size, u_range, ti_range, yaw_range, model_name, type='power', device='cpu', nr_workers=0, floris_path='.'): model_input_size = (input_size + 2) model = FCNN(model_input_size, nr_neurons, 1).to(device) (x_train,...
def get_clustering(cluster_dict): cluster_index = [] cluster_list = [] for (u, k) in cluster_dict.items(): if (k not in cluster_index): cluster_index.append(k) cluster_list.append([u]) else: cluster_list[cluster_index.index(k)].append(u) return cluster...
def _tsumo(state: State): c_p = state.current_player score = Yaku.score(state._hand[c_p], state._melds[c_p], state._n_meld[c_p], state._target, state._riichi[c_p], is_ron=FALSE, dora=_dora_array(state, state._riichi[c_p])) s1 = (score + ((- score) % 100)) s2 = ((score * 2) + ((- (score * 2)) % 100)) ...
def main(): args = parser.parse_args() if (not os.path.exists(args.ckpt_path)): os.mkdir(args.ckpt_path) if (args.seed is not None): random.seed(args.seed) torch.manual_seed(args.seed) cudnn.deterministic = True warnings.warn('You have chosen to seed training. This wi...
def pad(tensor, num=1): return tf.pad(tensor, [[0, 0], [num, num], [num, num], [0, 0]], 'CONSTANT')
def baz(a, b): if ((type(a) is not Tensor) or ((type(b) is not Tensor) and has_torch_function((a, b)))): return handle_torch_function(baz, (a, b), a, b) return (a + b)
def nice_print(details: Dict, level: int=0) -> List: lines = [] LEVEL_OFFSET = '\t' KEY_PADDING = 20 for k in sorted(details): key = (f'* {k}:' if (level == 0) else f'- {k}:') if isinstance(details[k], dict): lines += [((level * LEVEL_OFFSET) + key)] lines += nice...
def make_index_list(maxNumImages, numImageList): index = np.zeros((len(numImageList) * maxNumImages), np.int32) for k in range(len(numImageList)): index[(maxNumImages * k):((maxNumImages * k) + numImageList[k])] = 1 return index
class CDCorefScorer(nn.Module): def __init__(self, word_embeds, word_to_ix, vocab_size, char_embedding, char_to_ix, char_rep_size, dims, use_mult, use_diff, feature_size): super(CDCorefScorer, self).__init__() self.embed = nn.Embedding(vocab_size, word_embeds.shape[1]) self.embed.weight.data...
def load_sts_benchmark(data_dir: str) -> Dict[(str, Dict[(str, List[Tuple[(Tuple[(str, str)], float)]])])]: return {'sts12': load_sts12(os.path.join(data_dir, 'STS', 'STS12-en-test')), 'sts13': load_sts13(os.path.join(data_dir, 'STS', 'STS13-en-test')), 'sts14': load_sts14(os.path.join(data_dir, 'STS', 'STS14-en-te...
class Jasper10x5Config(): def __init__(self, num_classes: int, num_blocks: int, num_sub_blocks: int) -> None: super(Jasper10x5Config, self).__init__() self.num_blocks = num_blocks self.num_sub_blocks = num_sub_blocks self.preprocess_block = {'in_channels': 80, 'out_channels': 256, 'k...
def read_json(json_path, min_lane_height=20): with open(json_path, 'r') as jf: label_content = json.load(jf) _filter_lanes_by_size(label_content, min_height=min_lane_height) _filter_few_markers(label_content, min_markers=2) _fix_lane_names(label_content) content = {'projection_matrix': label...
def merge_a_into_b(a, b): if (not isinstance(a, edict)): return for (k, v) in a.items(): if (k not in b): raise KeyError(('%s is not a valid config key' % k)) old_type = type(b[k]) if (old_type is not type(v)): if isinstance(b[k], np.ndarray): ...
def use_elite_goals(out_directory: str, parameters: Dict[(str, Union[(int, float, str, bool)])], loaded_policies: List[policies.RvS], attribute_dicts: List[Dict[(str, Union[(int, float, str)])]], env: offline_env.OfflineEnv, trajectory_samples: int=200, wandb_run: Optional[Run]=None) -> None: goals = step.find_elit...
class ScaleExp(nn.Module): def __init__(self, init_value=1.0): super(ScaleExp, self).__init__() self.scale = nn.Parameter(torch.tensor([init_value], dtype=torch.float32)) def forward(self, x): return torch.exp((x * self.scale))
_properties class Array(Data): allow_conflicts = Property(dtype=bool, default=False, desc='If enabled, allows more than one memlet to write to the same memory location without conflict resolution.') strides = ShapeProperty(desc='For each dimension, the number of elements to skip in order to obtain the next elem...
def test_scnet_aug_test(): aug_result = model_aug_test_template('configs/scnet/scnet_r50_fpn_1x_coco.py') assert (len(aug_result[0]) == 2) assert (len(aug_result[0][0]) == 80) assert (len(aug_result[0][1]) == 80)
def get_model(args): print_rank_0('building BERT model ...') model = BertModel(args) if (mpu.get_data_parallel_rank() == 0): print(' > number of parameters on model parallel rank {}: {}'.format(mpu.get_model_parallel_rank(), sum([p.nelement() for p in model.parameters()])), flush=True) model.cud...
def elementwise_linear(model, *args, **kwargs): return _elementwise_linear(model, model.net.ElementwiseLinear, *args, **kwargs)
def context_fusion_layers(rep_tensor, rep_mask, method, activation_function, scope=None, wd=0.0, is_train=None, keep_prob=1.0, **kwargs): method_name_list = ['lstm', 'gru', 'sru', 'sru_normal', 'cnn', 'multi_head', 'multi_head_git', 'disa', 'block'] (bs, sl, vec) = (tf.shape(rep_tensor)[0], tf.shape(rep_tensor)...
_quantizer(quantization_target=QuantizationTarget.Weights, quantization_method=[QuantizationMethod.UNIFORM], identifier=TrainingMethod.LSQ) class LSQUniformWeightQATQuantizer(BasePytorchQATTrainableQuantizer): def __init__(self, quantization_config: TrainableQuantizerWeightsConfig): super().__init__(quantiz...
class MEGrid(containers.Grid): def __init__(self, bin_sizes, bin_bounds, **kwargs): max_items_per_bin = (int(200) if np.all((np.array(bin_sizes) == 1)) else 1) super(MEGrid, self).__init__(shape=bin_sizes, max_items_per_bin=max_items_per_bin, features_domain=bin_bounds, fitness_domain=(((- np.inf), ...
def test_UnmaskedArray_NumpyArray(): a = ak.contents.unmaskedarray.UnmaskedArray(ak.contents.numpyarray.NumpyArray(np.array([0.0, 1.1, 2.2, 3.3]))) assert (to_list(ak.drop_none(a)) == [0.0, 1.1, 2.2, 3.3])
def test_countvectorizer_stop_words(): cv = CountVectorizer() cv.set_params(stop_words='english') assert (cv.get_stop_words() == ENGLISH_STOP_WORDS) cv.set_params(stop_words='_bad_str_stop_') with pytest.raises(ValueError): cv.get_stop_words() cv.set_params(stop_words='_bad_unicode_stop_...
def init_clusterings(args, model_classes): clusterings = {} for (model_name, model_class) in model_classes.items(): ds = model_class.output_dims if isinstance(ds, int): clusterings[model_name] = {'model': KMeans(args, ds, args.clustering.ncentroids)} else: cluster...
def cramers_V_calc(phi_square, classes): try: return math.sqrt((phi_square / (len(classes) - 1))) except Exception: return 'None'
def open_tsv(fname, folder): print(('Opening %s Data File...' % fname)) df = pd.read_csv(fname, sep='\t', names=['caption', 'url']) df['folder'] = folder print('Processing', len(df), ' Images:') return df
('/process', methods=['POST']) def predict(): user_input = request.json if (('text' not in user_input) or ('cid' not in user_input)): return json.dumps({'action': 'ERROR'}) cid = user_input['cid'] try: ctx = dmgr.get_or_create_ctx(cid, orchestrator.policy_layer.state_manager.entity_manag...
class Camera(): def __init__(self): check_ggui_availability() self.ptr = _ti_core.PyCamera() self.position(0.0, 0.0, 0.0) self.lookat(0.0, 0.0, 1.0) self.up(0.0, 1.0, 0.0) self.last_mouse_x = None self.last_mouse_y = None self.last_time = None def ...
class MAMLTrajectoryBatch(collections.namedtuple('MAMLTrajectoryBatch', ['observations', 'actions', 'rewards', 'valids', 'baselines'])):
def accept(f): def test_can_derive_other_vars_from_one_calculated(self, A=not_set, T_s=not_set, R_s=not_set, a=not_set, epsilon=not_set): return f(self=self, A=A, T_s=T_s, R_s=R_s, a=a, epsilon=epsilon) return test_can_derive_other_vars_from_one_calculated
def _check_puttable(state: EnvironmentState, src_node: GraphNode, dest_node: GraphNode, relation: Relation, info: ExecutionInfo): hand_rel = _find_holding_hand(state, src_node) if (hand_rel is None): char_node = _get_character_node(state) info.error('{} is not holding {}', char_node, src_node) ...
def _cat(raw, inputs, dimension=0): x = raw(inputs, dimension) bottom_blobs = [] for input in inputs: bottom_blobs.append(log.blobs(input)) layer_name = log.add_layer(name='cat') top_blobs = log.add_blobs([x], name='cat_blob') layer = caffe_net.Layer_param(name=layer_name, type='Concat',...
def load_checkpoint(fpath): if (fpath is None): raise ValueError('File path is None') fpath = osp.abspath(osp.expanduser(fpath)) if (not osp.exists(fpath)): raise FileNotFoundError('File is not found at "{}"'.format(fpath)) map_location = (None if torch.cuda.is_available() else 'cpu') ...
def safe_str_cmp(a, b): if isinstance(a, text_type): a = a.encode('utf-8') if isinstance(b, text_type): b = b.encode('utf-8') if (_builtin_safe_str_cmp is not None): return _builtin_safe_str_cmp(a, b) if (len(a) != len(b)): return False rv = 0 if PY2: for ...
def parse_readme_frontmatter(dirname): readme_filename = os.path.join(dirname, 'readme.md') with open(readme_filename) as f: lines = [line.strip() for line in f.readlines()] top = lines.index('---') bottom = lines[(top + 1):].index('---') frontmatter = yaml.load('\n'.join(lines[(top + 1):bot...
_utils.test(arch=get_host_arch_list()) def _test_ndrange_for_mismatch2(): def func(): for (i, j, k) in ti.ndrange(3, 4): print(i, j, k) with pytest.raises(ti.TaichiCompilationError): func()
def sigmoid_focal_loss_cpu(logits, targets, gamma, alpha): num_classes = logits.shape[1] dtype = targets.dtype device = targets.device class_range = torch.arange(1, (num_classes + 1), dtype=dtype, device=device).unsqueeze(0) t = targets.unsqueeze(1) p = torch.sigmoid(logits) term1 = (((1 - p...
def _variable_on_cpu(name, shape, initializer): key = (tf.get_variable_scope().name, name) if (key in shared_variables): return shared_variables[key] dtype = (tf.float16 if FLAGS.use_fp16 else tf.float32) var = tf.get_variable(name, shape, initializer=initializer, dtype=dtype) shared_variabl...
class MGRandomEnv(object): def __init__(self, args, reward=None): self.num_agents = args.num_agents self.agents = [Agent(args.history_length) for i in range(self.num_agents)] self.current_time = 0 self.coop_num = 0 self.defect_num = 0 self.coopdefect_num = 0 s...
class Tokenizer(): def __init__(self, vocab: Vocab, split_fn: Callable[([str], List[str])], pad_fn: Callable[([List[int]], List[int])]=None) -> None: self._vocab = vocab self._split = split_fn self._pad = pad_fn def split(self, string: str) -> List[str]: list_of_tokens = self._sp...
def parse_iaga(lines, iagacode=None): from dateutil.parser import parse fmt = lines.pop(0).split() if ((fmt[0] != 'Format') or (fmt[1] != 'IAGA-2002')): raise Exception('Data is not in IAGA-2002 format.') source = lines.pop(0).split()[1] lines.pop(0) code = lines.pop(0).split()[2] fo...
def squeeze(source: Tensor, axis: Dim) -> Tensor: assert (axis.dimension == 1), f'squeeze {source}: axis {axis} is not of extend 1' return source._raw_backend.squeeze(source, axis=axis)
class ConvX(nn.Module): def __init__(self, in_planes, out_planes, kernel=3, stride=1): super(ConvX, self).__init__() self.conv = nn.Conv2d(in_planes, out_planes, kernel_size=kernel, stride=stride, padding=(kernel // 2), bias=False) self.bn = nn.BatchNorm2d(out_planes) self.relu = nn....
def create_dataset(opt, rank=0): data_loader = CustomDatasetDataLoader(opt, rank=rank) dataset = data_loader.load_data() return dataset
def Exists(vs, body, weight=1, qid='', skid='', patterns=[], no_patterns=[]): return _mk_quantifier(False, vs, body, weight, qid, skid, patterns, no_patterns)
def get_symmetric_quantization_range_and_scale(activation_is_signed: bool, activation_n_bits: int, activation_threshold: float): if activation_is_signed: min_value = (- (2 ** (activation_n_bits - 1))) max_value = ((2 ** (activation_n_bits - 1)) - 1) scale = (activation_threshold / (2 ** (act...
.experimental def test_predict_empty_log(log): model = ALSWrap(seed=SEED) model.fit(log) model.predict(log.limit(0), 1)
def insert_cad_file(filepath, model_name): if (not os.path.isfile(filepath)): raise FileNotFoundError payload = {'name': model_name, 'file': filepath, 'source': 4, 'image': filepath.replace('/tracePart/', '/image/tracePart/').replace('.stp', '.png'), 'file_size': os.path.getsize(filepath)} return ag...
class BasicBlock(nn.Module): expansion: int = 1 def __init__(self, inplanes: int, planes: int, stride: int=1, downsample: Optional[nn.Module]=None, groups: int=1, base_width: int=64, dilation: int=1, norm_layer: Optional[Callable[(..., nn.Module)]]=None) -> None: super().__init__() if (norm_laye...
def dump(path, obj): path = get_absolute_path(path) parent_path = get_dir(path) mkdir(parent_path) with open(path, 'w') as f: logging.info(('dumping file:' + path)) pkl.dump(obj, f)
def download_voc(path, overwrite=False): _DOWNLOAD_URLS = [(' '34ed68851bce2a36e2a223fa52c661d592c66b3c'), (' '41a8d6e12baa5ab18ee7f8f8029b9e11805b4ef1'), (' '4e443f8a2eca6b1dac8a6c57641b67dd40621a49')] os.makedirs(path) for (url, checksum) in _DOWNLOAD_URLS: filename = download(url, path=path, over...
def test_distillation_loader(): num_total = 64 batch_size = 16 datasets = [TensorDataset(torch.rand((num_total // 4), 2, 2), torch.rand((num_total // 4))), TensorDataset(torch.rand((3 * (num_total // 4)), 2, 2), torch.rand(((3 * num_total) // 4)))] loader = DistillLoader(teacher=(lambda x: x), datasets=...
def restart(): if (platform.system() == 'Windows'): os._exit(run(sys.executable, '-S', *sys.argv)) else: os.execl(sys.executable, sys.executable, '-S', *sys.argv)
def set_logging_level(log_level='info'): if (log_level == 'debug'): logger.setLevel(logging.DEBUG) else: logger.setLevel(logging.INFO)
class Vocabulary2Id(Transformer): def __init__(self, sc, output: str, **kwargs): super().__init__(**kwargs) self.output = output self.sc = sc def __call__(self, rows: RDD): (value2index, path2index, value2freq, path2freq) = self.build_vocabularies(rows) doc2path_contexts ...
class TRANSFORMATION(object): clean = 'clean' CUR_TRANS_TYPE = clean rotate90 = 'rotate90' rotate180 = 'rotate180' rotate270 = 'rotate270' shift_left = 'shift_left' shift_right = 'shift_right' shift_up = 'shift_up' shift_down = 'shift_down' shift_top_left = 'shift_top_left' s...
.parametrize('apply', [True, False]) .parametrize('kwargs', [{}, {'autograph': False}, {'input_signature': [tf.TensorSpec(()), tf.TensorSpec(())]}]) def test_jit_function_behaviour_unchanged(apply: bool, kwargs: Any) -> None: (apply, **kwargs) def add(t: tf.Tensor, u: tf.Tensor) -> tf.Tensor: return (t ...
def register_Ns3PcapHelper_methods(root_module, cls): cls.add_constructor([param('ns3::PcapHelper const &', 'arg0')]) cls.add_constructor([]) cls.add_method('CreateFile', 'ns3::Ptr< ns3::PcapFileWrapper >', [param('std::string', 'filename'), param('std::_Ios_Openmode', 'filemode'), param('ns3::PcapHelper::D...