code
stringlengths
101
5.91M
class Game(): def __init__(self, **kwargs): self.game_id = (- 1) self.level_seed = 0 self.rl_agent_seed = 0 self.zoom = 5.5 self.bgzoom = 0.4 self.world_theme_n = (- 1) self.agent_theme_n = (- 1) self.background_themes = [] self.ground_themes =...
def centeredSketch(data): def centered(data, scale, old_center, new_center): centered_data = [] for stroke in data: x = list(map((lambda x: int((((x - old_center[0]) * scale) + new_center[0]))), stroke[0])) y = list(map((lambda x: int((((x - old_center[1]) * scale) + new_cent...
def _find_existing_fcompiler(compiler_types, osname=None, platform=None, requiref90=False, c_compiler=None): from numpy.distutils.core import get_distribution dist = get_distribution(always=True) for compiler_type in compiler_types: v = None try: c = new_fcompiler(plat=platform, ...
def collate_fn(insts): try: (src_insts, tgt_insts) = list(zip(*insts)) src_insts = src_pad(src_insts) tgt_insts = tgt_pad(tgt_insts) except: return None return (src_insts, tgt_insts)
def _parse_args(): parser = argparse.ArgumentParser(description='CTGAN Command Line Interface') parser.add_argument('-e', '--epochs', default=300, type=int, help='Number of training epochs') parser.add_argument('-t', '--tsv', action='store_true', help='Load data in TSV format instead of CSV') parser.add...
(datatype[(N, N)], datatype[(N, M)], datatype[1], datatype[1]) def syrk(C, A, alpha, beta): def mult_c_rows(i: _[0:N]): def mult_c_cols(j: _[0:(i + 1)]): (ic << C[(i, j)]) (ib << beta) (oc >> C[(i, j)]) oc = (ic * ib) def compute(i: _[0:N], k: _[0:M]): ...
class Projection(nn.Module): def __init__(self, in_dim, hidden_mlp=2048, feat_dim=256): super(Projection, self).__init__() self.projection_head = nn.Sequential(nn.Linear(in_dim, hidden_mlp), nn.ReLU(inplace=True), nn.Linear(hidden_mlp, feat_dim)) self.output_dim = feat_dim def forward(se...
def build_data_loader(dataset, batch_size, num_workers, training=True): return torch.utils.data.DataLoader(dataset, sampler=(TrainingSampler if training else InferenceSampler)(len(dataset)), batch_size=batch_size, num_workers=num_workers, pin_memory=True)
class TFFlaubertWithLMHeadModel(metaclass=DummyObject): _backends = ['tf'] def __init__(self, *args, **kwargs): requires_backends(self, ['tf'])
_function def _exp_term(n, R=QQ): p = SymmetricFunctions(R).power() return sum(((p(part) / part.aut()) for part in Partitions(n)))
def calc_fullsum_scores(meta): from returnn.Util import better_repr fn = Globals.get_fullsum_scores_filename(**meta) if os.path.exists(fn): print('Existing fullsum scores filename:', fn) print(('content:\n%s\n' % open(fn).read())) return fn assert ('output_fullsum' in Globals.eng...
def is_transformers_available(): if (importlib.util.find_spec('transformers') is not None): _version = importlib_metadata.version('transformers') if (version.parse(_version) < version.parse('4.0')): raise EnvironmentError(f'Transformers found but with version {_version}. The minimum vers...
def run(args): model = getattr(importlib.import_module(args.cam_network), 'Net')() train_dataset = voc12.dataloader.VOC12ClassificationDataset(args.train_list, voc12_root=args.voc12_root, resize_long=(320, 640), hor_flip=True, crop_size=512, crop_method='random') train_data_loader = DataLoader(train_dataset...
def save_mask(mask, filename): os.makedirs(os.path.dirname(filename), exist_ok=True) path_file = os.path.splitext(filename)[0] img = mask.detach().cpu().numpy().squeeze() plt.imsave((path_file + '.png'), img, cmap='gray', format='png') np.savez_compressed((path_file + '.npz'), img)
class MaxUnpool1d(_MaxUnpoolNd): kernel_size: _size_1_t stride: _size_1_t padding: _size_1_t def __init__(self, kernel_size: _size_1_t, stride: Optional[_size_1_t]=None, padding: _size_1_t=0) -> None: super(MaxUnpool1d, self).__init__() self.kernel_size = _single(kernel_size) sel...
def sanitize_output(case: Case, response: (GenericResponse | None)=None, *, config: (Config | None)=None) -> None: sanitize_case(case, config=config) if (response is not None): sanitize_response(response, config=config) sanitize_request(response.request, config=config)
def get_data_info(cfg: Dict, augment: Optional[bool]=True) -> Dict: try: print('[get_data_info]', cfg) meta_root = cfg['meta_root'] train_manifest = cfg['train_manifest'] val_manifest = cfg['val_manifest'] label_map = cfg['label_map'] train_manifest = os.path.join(met...
_numpy_output(non_zero=True, positive=True) def test_modr(A: dace.int64, B: dace.int64[(5, 5)]): return (A % B)
def normalization(quaternion, split_dim=1): size = (quaternion.size(split_dim) // 4) quaternion = quaternion.reshape((- 1), 4, size) quaternion = (quaternion / torch.sqrt(torch.sum((quaternion ** 2), 1, True))) quaternion = quaternion.reshape((- 1), (4 * size)) return quaternion
def scenario_objective_calculator(trial: Trial, search_space: Dict[(str, List[Optional[Any]])], split_data: SplitData, recommender, criterion: Metric, k: int) -> float: params_for_trial = suggest_params(trial, search_space) recommender.set_params(**params_for_trial) return eval_quality(split_data, recommend...
def protoge_td3_config(): config = default_ddpg_config() config.gamma = 0.99 config.actor_lr = 0.001 config.critic_lr = 0.001 config.actor_weight_decay = 0.0 config.target_network_update_freq = 40 config.target_network_update_frac = 0.05 config.optimize_every = 2 config.batch_size = ...
class MyBiTModel(tf.keras.Model): def __init__(self, module): super().__init__() self.dense1 = tf.keras.layers.Dense(128) self.normalize = Lambda((lambda a: tf.math.l2_normalize(a, axis=1))) self.bit_model = module def call(self, images): bit_embedding = self.bit_model(im...
def print_usage(): ((print >> sys.stderr), 'Usage: m2scorer.py [OPTIONS] proposed_sentences gold_source') ((print >> sys.stderr), 'where') ((print >> sys.stderr), ' proposed_sentences - system output, sentence per line') ((print >> sys.stderr), ' source_gold - source sentences with gold...
def inputs(train_batch, train_epochs, vald_batch=50, test_batch=50): training_iterator = mnist_reader.inputs('train', train_batch, train_epochs) validation_iterator = mnist_reader.inputs('vald', vald_batch, None) test_iterator = mnist_reader.inputs('test', test_batch, None) handle = tf.placeholder(tf.st...
def gen_aval_types_datapoints(df_params, df_ret, df_var, set_type, output_path, cached_file: bool=False): if (((not (os.path.exists(os.path.join(output_path, f'params_{set_type}_aval_types_dp.npy')) and os.path.exists(os.path.join(output_path, f'ret_{set_type}_aval_types_dp.npy')))) and os.path.exists(os.path.join(...
class Mul_both(Problem): name = 'Mul_both' dependencies = {Operations: (lambda config: config)} symbols = ['<MUL_BOTH>', '<SOLVE>', '<SEP>', 'x', 'y', '+', '-'] def generate(self): (x_coef, y_coef, const) = self.sample_linear_2d(self.config['max_digits']) max_coef = (10 ** self.config['m...
def add_word_number_mapping(answers): added_ans = [] for ans in answers: if ans.isdigit(): added_ans.append(num2words(ans)) else: try: temp = str(w2n.word_to_num(ans)) added_ans.append(temp) except: pass retu...
def save_to_sa(name, data): arr = sa.create(name, data.shape, data.dtype) np.copyto(arr, data)
class JankPolicy2(NaivePolicy): def act(self, observations): interaction_per_cas = [sum([len(arm_ob) for arm_ob in cas_ob]) for cas_ob in observations] cas_id = np.argmin(interaction_per_cas) cas_ob = observations[cas_id] cas_interactions = interaction_per_cas[cas_id] if (len...
def _weight_mean_color(graph, src, dst, n): diff = (graph.nodes[dst]['mean color'] - graph.nodes[n]['mean color']) diff = np.linalg.norm(diff) return {'weight': diff}
def test_mlp_bias(): lin1 = torch.nn.Linear(1, 5) lin2 = torch.nn.Linear(5, 1) torch.nn.init.normal_(lin1.weight, std=0.01) torch.nn.init.normal_(lin1.bias, std=0.01) torch.nn.init.normal_(lin2.weight, std=0.01) torch.nn.init.normal_(lin2.bias, std=1e-08) mlp = torch.nn.Sequential(lin1, torc...
_cache def get_sequences(num_sequences=1000, num_workers=None): possible_conditions = {'led': [0, 1], 'lightbulb': [0, 1], 'slider': ['right', 'left'], 'drawer': ['closed', 'open'], 'red_block': ['table', 'slider_right', 'slider_left'], 'blue_block': ['table', 'slider_right', 'slider_left'], 'pink_block': ['table',...
(tf.float32, tf.float32, tf.float32, func_name='att_sum_bahdanau', noinline=True) def att_sum_bahdanau(v_att, keys, query): return tf.reduce_sum((v_att * tf.tanh((keys + tf.expand_dims(query, 1)))), [2])
def register_mhpv2_parsing(root): root = os.path.join(root, 'mhpv2') meta = _get_mhpv2_parsing_meta() for (name, (image_root, category_gt_root, instance_gt_root, human_gt_root)) in _PREDEFINED_SPLITS.items(): image_root = os.path.join(root, image_root) category_gt_root = os.path.join(root, c...
def get_name_from_qid(qid): candidate = qid_name_mapping.find_one({'qid': qid}) if candidate: return candidate['name'] else: time.sleep(1) url = ' query = '\n SELECT ?label\n WHERE {{\n {} rdfs:label ?label.\n FILTER(LANG(?label) = "en").\n }}\n '.format...
class Evaluate(CMD): def __call__(self, args, eval_dep=False, decode_type='mbr'): super(Evaluate, self).__call__(args) self.device = args.device self.args = args dataset = DataModule(args) self.model = get_model(args.model, dataset) best_model_path = (self.args.load_f...
def parse_effect(alist, type_dict, predicate_dict): tag = alist[0] if (tag == 'and'): return pddl.ConjunctiveEffect([parse_effect(eff, type_dict, predicate_dict) for eff in alist[1:]]) elif (tag == 'forall'): assert (len(alist) == 3) parameters = parse_typed_list(alist[1]) ef...
class TestCase(unittest.TestCase): def setUpClass(cls): workspace.GlobalInit(get_default_test_flags()) core.SetEnginePref({}, {}) def setUp(self): test_method = getattr(self, self._testMethodName) is_flaky_test = getattr(test_method, '__caffe2_flaky__', False) if (is_flak...
class WarmupPolyLR(torch.optim.lr_scheduler._LRScheduler): def __init__(self, optimizer: torch.optim.Optimizer, max_iters: int, warmup_factor: float=0.001, warmup_iters: int=1000, warmup_method: str='linear', last_epoch: int=(- 1), power: float=0.9, constant_ending: float=0.0): self.max_iters = max_iters ...
def _vgg(arch, cfg, batch_norm, pretrained, progress, **kwargs): if pretrained: kwargs['init_weights'] = False model = VGG(make_layers(cfgs[cfg], batch_norm=batch_norm), **kwargs) if pretrained: raise Exception('no pretrained model supported') return model
def register_Ns3CallbackImplBase_methods(root_module, cls): cls.add_constructor([]) cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')]) cls.add_method('GetTypeid', 'std::string', [], is_pure_virtual=True, is_const=True, is_virtual=True) cls.add_method('IsEqual', 'bool', [param('ns3::Pt...
def write_ascii_data_cols(_file_path, _cols, _str_sep, _str_head=None, _i_col_start=0, _i_col_end=(- 1)): f = open(_file_path, 'w') if (_str_head != None): lenStrHead = len(_str_head) if (lenStrHead > 0): strHead = _str_head if (_str_head[(lenStrHead - 1)] != '\n'): ...
class VQAClassificationDataset(Dataset): def __init__(self, task: str, dataroot: str, annotations_jsonpath: str, split: str, image_features_reader: ImageFeaturesH5Reader, gt_image_features_reader: ImageFeaturesH5Reader, tokenizer: BertTokenizer, padding_index: int=0, max_seq_length: int=16, max_region_num: int=37):...
class epub2txt(): def __init__(self, epubfile=None): self.epub = epubfile def convert(self): file = zipfile.ZipFile(self.epub, 'r') rootfile = ContainerParser(file.read('META-INF/container.xml')).parseContainer() (title, author, ncx) = BookParser(file.read(rootfile)).parseBook() ...
class DataLoader(object): __initialized = False def __init__(self, dataset, batch_size=1, shuffle=False, sampler=None, batch_sampler=None, num_workers=0, collate_fn=default_collate, pin_memory=False, drop_last=False, timeout=0, worker_init_fn=None): self.dataset = dataset self.batch_size = batch...
def get_lr_scheduler(config: DictConfig, optimizer, epoch_time_step) -> LearningRateScheduler: if (config.train.lr_scheduler == 'tri_stage_lr_scheduler'): lr_scheduler = TriStageLRScheduler(optimizer=optimizer, init_lr=config.train.init_lr, peak_lr=config.train.peak_lr, final_lr=config.train.final_lr, init_...
def update_limb_lengths(length: float) -> None: for side in ('left', 'right'): for joint in ('knee', 'wheel'): joint_id = model.getJointId(f'{side}_{joint}') model.jointPlacements[joint_id].translation[1] = (length - known_offsets[joint])
def summ(body_file, summ_file, param_file, oracle_len, w_exp, jamr=False): logger.debug('start testing...') logger.debug(('[settings]: len_%s_exp_%d' % (oracle_len, w_exp))) corpus = buildCorpus(body_file, summ_file, w_exp) decoder = Decoder() decoder.weights.load(param_file) estimator = ParamEs...
def FedAvg(models, weights=None): models = [model.to('cpu') for model in models] new_model = models[0] state_dicts = [model.state_dict() for model in models] state_dict = new_model.state_dict() for key in models[1].state_dict(): state_dict[key] = torch.from_numpy(np.average([state[key].numpy...
def parse_title(csv_filename, items=['seed', 'real', 'coda', 'noise', 'dyna', 'roll', 'mbpo', 'c3xm']): f = csv_filename.split('/')[(- 1)] res = [] for item in items: if (item in f): res.append(f.split(item)[(- 1)].split('_')[0]) else: res.append(None) return res
def _encode(values, *, uniques, check_unknown=True): if (values.dtype.kind in 'OUS'): try: return _map_to_integer(values, uniques) except KeyError as e: raise ValueError(f'y contains previously unseen labels: {str(e)}') else: if check_unknown: diff = _...
class PowellBenchmark(Benchmark): def __init__(self, nb_features: int=2): self.nb_features = nb_features ind_domain = ((- 4.0), 5.0) super().__init__(fn=algorithms.partial(illumination_powell, nb_features=nb_features), ind_domain=ind_domain, fitness_domain=((0.0, math.inf),), features_domain...
def cse_codegen(symbols): cse_results = sympy.cse(symbols, sympy.numbered_symbols('c')) output = io.StringIO() for helper in cse_results[0]: output.write('Scalar const ') output.write(sympy.printing.ccode(helper[1], helper[0])) output.write('\n') assert (len(cse_results[1]) == 1)...
def extract_eval_track_list(annotation_path, n_train, init_train): track_list = defaultdict(list) with open(annotation_path, 'r') as ann_file: lines = ann_file.readlines() for line in lines: line = line.split(',') (img_id, track_id, xmin, ymin, xmax, ymax) = line[:6] ...
class DataCreator(): def __init__(self, weather_raw_dir, start_date, end_date, spatial_range, target_dim, downsample_mode, dump_data_folder, weather_freq=3, features=None, atm_dim=0, check_files=False, rebuild=True, smooth=False, smooth_win_len=31): self.data_dir = os.path.abspath(os.path.dirname(os.path.ab...
def best_match(arg_types, functions, pos=None, env=None, args=None): actual_nargs = len(arg_types) candidates = [] errors = [] for func in functions: error_mesg = '' func_type = func.type if func_type.is_ptr: func_type = func_type.base_type if (not func_type.i...
def blift(LF, Li, p, k, S=None, all_orbits=False): P = LF[0].parent() keepScaledIneqs = [scale(P(coeff), Li, p) for coeff in LF if (coeff != 0)] keptVals = [i[2] for i in keepScaledIneqs if i[0]] if keptVals: pass else: if all_orbits: return [[True, t] for t in range(p)] ...
def main(): argparser = argparse.ArgumentParser(description='dacelab: An Octave to SDFG compiler') argparser.add_argument('infile', metavar='infile', type=argparse.FileType('r'), help='Input file (Octave code)') argparser.add_argument('-o', '--outfile', metavar='outfile', type=argparse.FileType('w'), defaul...
class Indexer(): def __init__(self, checkpoint, config=None, verbose: int=3): self.index_path = None self.verbose = verbose self.checkpoint = checkpoint self.checkpoint_config = ColBERTConfig.load_from_checkpoint(checkpoint) self.config = ColBERTConfig.from_existing(self.chec...
def check_all_progs(verbose=False, raise_error=False, filehandle=None, debug=False, assembler=None): for prog in sorted(prog_name_to_default): if debug: print('__________ checking', prog, '____________', flush=True) make_and_check_prog(prog, verbose=verbose, raise_error=raise_error, file...
class IndependentGenerator(ModuleToDistributionGenerator): def __init__(self, *args, reinterpreted_batch_ndims=1): super().__init__(*args) self.reinterpreted_batch_ndims = reinterpreted_batch_ndims def forward(self, *input): distribution = super().forward(*input) return Independe...
def sample_point_from_triangle(p1, p2, p3): a = np.random.uniform(0, 1) b = np.random.uniform(0, 1) x = ((p1 + (a * (p2 - p1))) + (b * (p3 - p1))) return x
def _unflatten_sparse_tensors(flat, tensors): (flat_indices, flat_values) = flat indices = _unflatten_dense_tensors(flat_indices, [t._indices() for t in tensors]) values = _unflatten_dense_tensors(flat_values, [t._values() for t in tensors]) outputs = [] for (t, i, v) in zip(tensors, indices, values...
def simple_reduce_tests(rank, world_size): tests = [(c10d.ReduceOp.SUM, torch.tensor([(rank + 1.0)]), torch.tensor([float(((world_size * (world_size + 1)) / 2))])), (c10d.ReduceOp.PRODUCT, torch.tensor([(rank + 1.0)]), torch.tensor([float(math.factorial(world_size))])), (c10d.ReduceOp.MIN, torch.tensor([(rank + 1.0...
def find_equivalent_sets(clusters, nodes): node_2_set = {} set_2_nodes = {} non_singletons = set() set_index = 0 for cluster in clusters: for element in cluster[2]: node_2_set[element] = set_index non_singletons.add(element) try: set_2_node...
class AverageMeter(): def __init__(self, name=None, momentum=0.997): if ((momentum >= 1) or (momentum <= 0)): raise AssertionError('`momentum` should be a non zero float less than 1') self.name = name self.momentum = momentum self._averaged_value = None self._coun...
class MapWrapper(object): def __init__(self, pool=1): self.pool = None self._mapfunc = map self._own_pool = False if callable(pool): self.pool = pool self._mapfunc = self.pool elif (int(pool) == (- 1)): self.pool = Pool() self._...
def F1_close(x, y): e = (y - x) return (np.sqrt(np.pi) * ((((x + ((1 / 2) * e)) - ((1 / 6) * (e ** 2))) - ((1 / 12) * (e ** 3))) + ((((1 / 90) * x) * ((x ** 2) + 1.0)) * (e ** 4))))
_properties class Vectorization(transformation.SingleStateTransformation): vector_len = Property(desc='Vector length', dtype=int, default=4) propagate_parent = Property(desc='Propagate vector length through parent SDFGs', dtype=bool, default=False) strided_map = Property(desc='Use strided map range (jump by...
def train_truncated_bptt(loggers, loaders, model, optimizer, scheduler, datasets, **kwargs): start_epoch = 0 if cfg.train.auto_resume: start_epoch = load_ckpt(model, optimizer, scheduler) if (start_epoch == cfg.optim.max_epoch): logging.info('Checkpoint found, Task already done') else: ...
class Reconstruct(SequenceDataset): _name_ = 'reconstruct' def init_defaults(self): return {'l_seq': 1024, 'l_mem': 512, 'dt': 0.001, 'freq': 1.0, 'seed': 0, 'static': False, 'n_train': 10000, 'n_eval': 1000} def d_input(self): return 1 def d_output(self): return self.l_mem d...
class ConvolutionLayer(nn.Module): def __init__(self, in_channels: int, out_channels: int): super(ConvolutionLayer, self).__init__() self.conv1 = nn.Conv1d(in_channels=in_channels, out_channels=(out_channels // 3), stride=1, kernel_size=2) self.conv2 = nn.Conv1d(in_channels=in_channels, out_...
def collate(samples, pad_idx, eos_idx): if (len(samples) == 0): return {} def merge(key): return data_utils.collate_tokens([s[key] for s in samples], pad_idx, eos_idx=eos_idx) id = np.array([s['id'] for s in samples]) src_tokens = merge('source') src_lengths = torch.LongTensor([s['so...
class AStarAgent(Agent): def getSolution(self, state, balance=1, maxIterations=(- 1)): iterations = 0 bestNode = None Node.balance = balance queue = PriorityQueue() queue.put(Node(state.clone(), None, None)) visisted = set() while (((iterations < maxIterations...
class COCO(data.Dataset): num_classes = 80 default_resolution = [512, 512] mean = np.array([0., 0., 0.], dtype=np.float32).reshape(1, 1, 3) std = np.array([0., 0., 0.], dtype=np.float32).reshape(1, 1, 3) def __init__(self, opt, split): super(COCO, self).__init__() self.data_dir = os....
def stringify_terms(terms: List[List[int]], variable_names: List[str]=list('xyz')) -> List[str]: def stringify_power(index: int, degree: int) -> str: var = variable_names[index] if (degree == 0): return '' if (degree == 1): return var return f'{var}^{degree}' ...
class BarycenterRegularization(ShapeRegularizationTerm): def __init__(self, db: database.Database) -> None: super().__init__(db) self.geometric_dimension = db.geometry_db.mesh.geometric_dimension() self.spatial_coordinate = fenics.SpatialCoordinate(self.mesh) self.mu = self.config.ge...
def _resolve_NameNode(env, node): try: resolved_name = env.lookup(node.name).name except AttributeError: raise CompileError(node.pos, INVALID_ERR) viewscope = env.global_scope().context.cython_scope.viewscope entry = viewscope.lookup(resolved_name) if (entry is None): raise C...
def _correlate_sparse(image, kernel_shape, kernel_indices, kernel_values): (idx, val) = (kernel_indices[0], kernel_values[0]) if (tuple(idx) != ((0,) * image.ndim)): raise RuntimeError('Unexpected initial index in kernel_indices') out = _get_view(image, kernel_shape, idx, val).copy() for (idx, v...
.parametrize(['distance_trace', 'time_explosion', 'mu', 'r'], [(0, 1, 0, 0), (0, 1, 1, 0), (0, 1, 0, 1)]) def test_packet_energy_limit_one(distance_trace, time_explosion, mu, r): initial_energy = 0.9 nu = 0.4 packet = r_packet.RPacket(r, mu, nu, initial_energy) new_energy = r_packet.calc_packet_energy(p...
class FunnelForPreTraining(metaclass=DummyObject): _backends = ['torch'] def __init__(self, *args, **kwargs): requires_backends(self, ['torch'])
class ImageDataset(Dataset): def show_train(self): (num_train_pids, num_train_cams) = self.parse_data(self.train) headers = ['subset', '# ids', '# images', '# cameras'] csv_results = [['train', num_train_pids, len(self.train), num_train_cams]] table = tabulate(csv_results, tablefmt='...
class DataAugmentationForVideoMAE(object): def __init__(self, args): self.input_mean = [0.485, 0.456, 0.406] self.input_std = [0.229, 0.224, 0.225] normalize = GroupNormalize(self.input_mean, self.input_std) if (not args.no_augmentation): self.train_augmentation = GroupMu...
class CppCodePrinter(CXX11CodePrinter): def __init__(self, settings: T.Optional[T.Dict[(str, T.Any)]]=None, override_methods: T.Optional[T.Dict[(sympy.Function, str)]]=None) -> None: settings = dict((settings or {}), math_macros={key: f'Scalar({macro})' for (key, macro) in get_math_macros().items()}) ...
def InvokeWithModifiedEnv(extra_env, function, *args, **kwargs): try: original_env = environ.copy() environ.update(extra_env) return function(*args, **kwargs) finally: environ.clear() environ.update(original_env)
def parse_xml(xml_string): try: root = ET.fromstring(xml_string) return root except: return None
.parametrize('with_data_key', (True, False)) def test_data_key(graphql_url, with_data_key): response = requests.post(graphql_url, json={'query': get_introspection_query()}, timeout=1) decoded = response.json() if (not with_data_key): decoded = decoded['data'] schema = schemathesis.graphql.from_d...
class SimCityWrapper(gym.Wrapper): def __init__(self, game, cfg: Config): self.env = game self.env.configure(map_width=16) super(SimCityWrapper, self).__init__(self.env) def step(self, action, **kwargs): (obs, rew, done, truncated, info) = super().step(action, **kwargs) o...
(params=DDPG_PARAMS) def ddpg_actor_param(request): param = request.param return (ActorDRR(user_num=param['user_num'], item_num=param['item_num'], embedding_dim=param['embedding_dim'], hidden_dim=param['hidden_dim'], memory_size=param['memory_size'], env_gamma_alpha=param['env_gamma_alpha'], device=param['devic...
class OrthogonalRegression(MultiOutputMixin, RegressorMixin): def __init__(self, use_orthogonal_projector=True, linear_estimator=None): self.use_orthogonal_projector = use_orthogonal_projector self.linear_estimator = linear_estimator def fit(self, X, y): (X, y) = check_X_y(X, y, y_numeri...
class FeedForward(nn.Module): def __init__(self, dim: int, dim_out: Optional[int]=None, mult: int=4, glu: bool=False, dropout: float=0.0): super().__init__() inner_dim = int((dim * mult)) dim_out = (dim_out if (dim_out is not None) else dim) project_in = GEGLU(dim, inner_dim) ...
def three_way_fen_coding(fen): fen_arr = parse_fen(fen) three_way_repr = np.zeros((64, 6)) for (idx, fen_val) in enumerate(fen_arr): if (fen_val > (- 1)): piece_type = (fen_val // 2) parity = (1 if (fen_val % 2) else (- 1)) three_way_repr[(idx, piece_type)] = pari...
def require_jieba(test_case): return unittest.skipUnless(is_jieba_available(), 'test requires jieba')(test_case)
class ImagenetDataProvider(DataProvider): DEFAULT_PATH = 'data/ImageNet/imagenet_ilsvrc/ILSVRC/Data/CLS-LOC/' def __init__(self, save_path=None, train_batch_size=256, test_batch_size=512, valid_size=None, n_worker=8, resize_scale=0.08, distort_color=None, image_size=224, num_replicas=None, rank=None): w...
def WarmupWrapper(scheduler_type): class Wrapped(scheduler_type): def __init__(self, warmup_epochs, *args): self.warmup_epochs = warmup_epochs super(Wrapped, self).__init__(*args) def get_lr(self): if (self.last_epoch < self.warmup_epochs): return ...
class CosineLinear(Module): def __init__(self, in_features, out_features, sigma=True): super(CosineLinear, self).__init__() self.in_features = in_features self.out_features = out_features self.weight = Parameter(torch.Tensor(out_features, in_features)) if sigma: s...
class TFConvBertForMultipleChoice(metaclass=DummyObject): _backends = ['tf'] def __init__(self, *args, **kwargs): requires_backends(self, ['tf'])
def main(): parser = argparse.ArgumentParser(description='Runs clang-tidy over all files in a compilation database. Requires clang-tidy and clang-apply-replacements in $PATH.') parser.add_argument('-clang-tidy-binary', metavar='PATH', default='clang-tidy', help='path to clang-tidy binary') parser.add_argume...
def _warmup_mmap_file(path): with open(path, 'rb') as stream: while stream.read(((100 * 1024) * 1024)): pass
def test_cache_and_get_tensor(nparray, tensor_key): db = TensorDB() db.cache_tensor({tensor_key: nparray}) cached_nparray = db.get_tensor_from_cache(tensor_key) assert np.array_equal(nparray, cached_nparray)