code
stringlengths
281
23.7M
class FilteredDataset(Dataset): def __init__(self, source, filterer=(lambda i, s: s[1]), target=[], verbosity=make_verbose()): self.source = source if (not isinstance(target, list)): target = [target] self.indices = [i for (i, s) in wrap_with_tqdm(enumerate(source), verbosity) if...
class SimpleMajorityVote(Aggregate): def aggregate(answers: Union[(str, List[str])], **kwargs: Any) -> Union[(str, List[str])]: answer_count = {} for answer in answers: if isinstance(answer, str): if (answer not in answer_count): answer_count[answer] =...
class NBD(): def __init__(self, **server_settings): self.bd = server_settings.get('block_device', '') self.netboot_directory = server_settings.get('netboot_directory', '.') self.write = server_settings.get('write', False) self.cow = server_settings.get('cow', True) self.in_me...
def quantize_sharded_embeddings(module: torch.nn.Module, dtype: torch.dtype) -> torch.nn.Module: qconfig = quant.QConfigDynamic(activation=quant.PlaceholderObserver, weight=quant.PlaceholderObserver.with_args(dtype=dtype)) return quant.quantize_dynamic(module, qconfig_spec={BatchedFusedEmbeddingBag: qconfig, Ba...
class TestOrderedLogistic(BaseTestDistributionRandom): pymc_dist = _OrderedLogistic pymc_dist_params = {'eta': 0, 'cutpoints': np.array([(- 2), 0, 2])} expected_rv_op_params = {'p': np.array([0., 0., 0., 0.])} checks_to_run = ['check_pymc_params_match_rv_op', 'check_rv_size'] .parametrize('eta, cutp...
def main(): parser = argparse.ArgumentParser() parser.add_argument('--model', help='Path of the detection model.', required=True) parser.add_argument('--label', help='Path of the labels file.') parser.add_argument('--output', help='File path of the output image.') args = parser.parse_args() if a...
class F10_TestCase(F9_TestCase): def runTest(self): F9_TestCase.runTest(self) self.assert_deprecated('xconfig', '--driver') self.assert_deprecated('xconfig', '--depth') self.assert_deprecated('xconfig', '--resolution') self.assert_deprecated('xconfig', '--videoram')
def _send_message_to_trio(trio_token: (TrioToken | None), message_to_trio: (Run[RetT] | RunSync[RetT])) -> RetT: token_provided = (trio_token is not None) if (not token_provided): try: trio_token = PARENT_TASK_DATA.token except AttributeError: raise RuntimeError("this thr...
def get_head_node_ip(cluster_cfg: str) -> str: logger.info(f"Getting Ray cluster head node IP for '{cluster_cfg}'") proc = subprocess.run(f'ray get-head-ip {cluster_cfg}', shell=True, capture_output=True, text=True, check=True) head_node_ip = proc.stdout.splitlines()[(- 1)] logger.info(f"Ray cluster hea...
_end_docstrings(PIPELINE_INIT_ARGS, '\n ignore_labels (`List[str]`, defaults to `["O"]`):\n A list of labels to ignore.\n grouped_entities (`bool`, *optional*, defaults to `False`):\n DEPRECATED, use `aggregation_strategy` instead. Whether or not to group the tokens corresponding to ...
def test_value_radio(db, mocker): mocker.patch('rdmo.options.models.Option.trans', mocked_trans) value = Value.objects.get(id=4) assert (value.value == 'Text: Lorem ipsum') assert (value.value_and_unit == 'Text: Lorem ipsum') assert (value.option_text == 'Text') assert (value.option_additional_i...
def _oom_observer(output_dir: str) -> Callable[([Union[(int, torch.device)], int, int, int], None)]: def oom_logger(device: Union[(int, torch.device)], alloc: int, device_alloc: int, device_free: int) -> None: logger.info(f'Saving memory snapshot device: {device}, alloc: {_bytes_to_mb_gb(alloc)}, device_all...
def compute_dataset_normalization(dataloader, no_last_dim_norm=True): states_list = [] action_list = [] print('computing normalization....') for (i_batch, sample_batched) in enumerate(dataloader): if ('actions' not in sample_batched): raise NotImplementedError('todo!') states...
class VAE(base_ae.SingleLatentWithPriorAE): def forward(self, x, beta): return self.elbo(x, beta, return_extra_vals=False) def elbo(self, x, beta=1.0, return_extra_vals=False): self.encoder.update(x) z_sample = self.encoder.sample_via_reparam(1)[0] self._last_z_sample_on_obj = z_...
_ordering (eq=False, order=False, slots=True, frozen=True) class VersionInfo(): year = attrib(type=int) minor = attrib(type=int) micro = attrib(type=int) releaselevel = attrib(type=str) def _from_version_string(cls, s): v = s.split('.') if (len(v) == 3): v.append('final')...
_canonicalize('fast_compile') _specialize _rewriter([sparse.DenseFromSparse]) def local_dense_from_sparse_sparse_from_dense(fgraph, node): if isinstance(node.op, sparse.DenseFromSparse): inp = node.inputs[0] if (inp.owner and isinstance(inp.owner.op, sparse.SparseFromDense)): return inp....
def recall(pr, gt, eps=1e-07, threshold=None, ignore_channels=None): pr = _threshold(pr, threshold=threshold) (pr, gt) = _take_channels(pr, gt, ignore_channels=ignore_channels) tp = torch.sum((gt * pr)) fn = (torch.sum(gt) - tp) score = ((tp + eps) / ((tp + fn) + eps)) return score
class ProjectMemberAllManager(RetrieveMixin, RESTManager): _path = '/projects/{project_id}/members/all' _obj_cls = ProjectMemberAll _from_parent_attrs = {'project_id': 'id'} def get(self, id: Union[(str, int)], lazy: bool=False, **kwargs: Any) -> ProjectMemberAll: return cast(ProjectMemberAll, s...
def get_dataset(dataset_name): if (dataset_name.lower() == 'cifar10'): return cifar10(data_augmentation=True) elif (dataset_name.lower() == 'cifar100'): return cifar100(data_augmentation=True) elif (dataset_name.lower() == 'cifarfs'): return cifarfs(data_augmentation=True) elif (...
def test_taxon__listed_taxa(): taxon = Taxon.from_json(j_taxon_1) listed_taxon = taxon.listed_taxa[0] assert isinstance(listed_taxon, ListedTaxon) assert (listed_taxon.taxon_id == taxon.id) assert (listed_taxon.list.id == 299) assert (listed_taxon.list.title == 'United States Check List') as...
class TestRedundantAssignmentChecker(pylint.testutils.CheckerTestCase): CHECKER_CLASS = RedundantAssignmentChecker def setUp(self): self.setup_method() def test_no_messages_simple(self): src = '\n x = 10\n print(x)\n x = 10\n ' mod = astroid.parse(src) ...
def cl_parse(command, args, setup=None, details=None): usage = subcommand_usages[command] descr = subcommand_descriptions[command] if isinstance(usage, str): usage = [usage] susage = ('%s %s' % (program_name, usage[0])) for s in usage[1:]: susage += ('\n%s%s %s' % ((' ' * 7), program...
def test_set_deployment_placement_options(): deployment_config = {'ray_actor_options': {'num_cpus': 2, 'resources': {'custom_resource': 1}}} scaling_config = ScalingConfig(num_workers=2, resources_per_worker={'custom_resource_2': 1}, placement_group_strategy='PACK') deployment_config = set_deployment_placem...
class EventThread(Thread): display = None _stop = None def __init__(self, display): super(EventThread, self).__init__() self.display = display self.daemon = True def run(self): while True: event = self.display.next_event() print(('event: %r' % even...
class TestOpenGLInfo(): (autouse=True) def cache_clear(self): version.opengl_info.cache_clear() def test_func(self, qapp): pytest.importorskip('qutebrowser.qt.opengl') version.opengl_info() def test_func_fake(self, qapp, monkeypatch): monkeypatch.setenv('QUTE_FAKE_OPENGL'...
def test_reg_field_configure(): field = uvm_reg_field() parent = uvm_reg() field.configure(parent, 8, 16, 'RW', True, 15) assert (field.get_parent() == parent) assert (field.get_n_bits() == 8) assert (field.get_lsb_pos() == 16) assert (field.get_access() == 'RW') assert field.is_volatile...
def _find_chromium_mac() -> Optional[str]: default_dir = '/Applications/Chromium.app/Contents/MacOS/Chromium' if os.path.exists(default_dir): return default_dir name = 'Chromium.app' alternate_dirs = [x for x in sps.check_output(['mdfind', name]).decode().split('\n') if x.endswith(name)] if ...
class TestCollectCfDataset(): def test_collect_cf_dataset(self): from satpy.cf.datasets import _collect_cf_dataset geos = AreaDefinition(area_id='geos', description='geos', proj_id='geos', projection={'proj': 'geos', 'h': .0, 'a': 6378169.0, 'b': 6356583.8}, width=2, height=2, area_extent=[(- 1), (-...
def validate(val_loader, model, criterion): losses = AverageMeter() top1 = AverageMeter() top5 = AverageMeter() for (i, (input, target)) in enumerate(val_loader): input_var = torch.autograd.Variable(input, volatile=True).cuda() target_var = torch.autograd.Variable(target, volatile=True)....
def collect_results_gpu(result_part, size): (rank, world_size) = get_dist_info() part_tensor = torch.tensor(bytearray(pickle.dumps(result_part)), dtype=torch.uint8, device='cuda') shape_tensor = torch.tensor(part_tensor.shape, device='cuda') shape_list = [shape_tensor.clone() for _ in range(world_size)]...
def categorical_df_concat(df_list, inplace=False): if (not inplace): df_list = deepcopy(df_list) df = df_list[0] if (not all([df.dtypes.equals(df_i.dtypes) for df_i in df_list[1:]])): raise ValueError('Input DataFrames must have the same columns/dtypes.') categorical_columns = df.columns...
def test_trigger_level(): with expected_protocol(Racal1992, [(' SLA 1.500000', None), (' SLB 1.500000', None), (' RLA', 'LA+.50E+00'), (' RLB', 'LB+.50E+00')]) as instr: instr.trigger_level_a = 1.5 instr.trigger_level_b = 1.5 assert (instr.trigger_level_a == 1.5) assert (instr.trigge...
class TestTowerQPSMetric(TestMetric): def __init__(self, world_size: int, rec_tasks: List[RecTaskInfo]) -> None: super().__init__(world_size, rec_tasks) def _get_states(labels: torch.Tensor, predictions: torch.Tensor, weights: torch.Tensor) -> Dict[(str, torch.Tensor)]: return {} def _reduce...
def combine_dataset_vids(datasets, combined_dataset, do_filter_by_true_starts=False, do_filter_by_affine_breaks=False, do_filter_by_excluded_vids=False, load_n=None, _print=print): all_train_vid_names = [] if (len(datasets) == 1): ds = datasets[0] ds._print = _print _print('Loading vids ...
class Generator(QIODevice): def __init__(self, format, durationUs, sampleRate, parent): super(Generator, self).__init__(parent) self.m_pos = 0 self.m_buffer = QByteArray() self.generateData(format, durationUs, sampleRate) def start(self): self.open(QIODevice.ReadOnly) ...
def test_feature_path_ok_running_outside_rootdir(pytester): base_dir = 'features' prepare_testdir(pytester, base_dir) old_dir = os.getcwd() os.chdir('/') try: result = pytester.runpytest(pytester.path, '-k', 'test_ok_by_ini') result.assert_outcomes(passed=2) finally: os.c...
class SymbolTableNode(): __slots__ = ('kind', 'node', 'module_public', 'module_hidden', 'cross_ref', 'implicit', 'plugin_generated', 'no_serialize') def __init__(self, kind: int, node: (SymbolNode | None), module_public: bool=True, implicit: bool=False, module_hidden: bool=False, *, plugin_generated: bool=False...
def validate(config): def is_bad_str(s): return ((s is None) or (len(s) == 0)) if is_bad_str(config.database): return 'database missing' if is_bad_str(config.useragent): return 'useragent missing' if (config.ratelimit < 0): warning("Rate limit can't be negative, defaultin...
class FC6_Iscsi(KickstartCommand): removedKeywords = KickstartCommand.removedKeywords removedAttrs = KickstartCommand.removedAttrs def __init__(self, writePriority=71, *args, **kwargs): KickstartCommand.__init__(self, writePriority, *args, **kwargs) self.op = self._getParser() self.i...
class DBlock(nn.Module): def __init__(self, in_channels, out_channels, which_conv=SNConv2d, wide=True, preactivation=False, activation=None, downsample=None): super(DBlock, self).__init__() (self.in_channels, self.out_channels) = (in_channels, out_channels) self.hidden_channels = (self.out_c...
def _set_cuda_rng_state(new_state, device=(- 1)): if (hasattr(_C, '_cuda_setRNGState') and callable(_C._cuda_setRNGState)): def cb(): with device_ctx_manager(device): _C._cuda_setRNGState(new_state) else: if (device == (- 1)): device = torch.device('cuda')...
class Channel(CommonBase): placeholder = 'ch' def __init__(self, parent, id): self.parent = parent self.id = id super().__init__() def insert_id(self, command): return command.format_map({self.placeholder: self.id}) def write(self, command, **kwargs): self.parent....
class KnownValues(unittest.TestCase): def test_tda_lda(self): td = tdscf.TDA(mf_lda).run(nstates=nstates) tdg = td.nuc_grad_method() g1 = tdg.kernel(td.xy[2]) self.assertAlmostEqual(g1[(0, 2)], (- 0.), 6) td_solver = td.as_scanner() e1 = td_solver(pmol.set_geom_('H 0 ...
def deprecate_stdlib(tc, vers=None): if ((vers is None) or (sys.version_info >= vers)): return pytest.deprecated_call() class _deprecate(): def __init__(self, tc): pass def __enter__(self): return self def __exit__(self, *tb): pass return _...
class AttnUpDecoderBlock2D(nn.Module): def __init__(self, in_channels: int, out_channels: int, dropout: float=0.0, num_layers: int=1, resnet_eps: float=1e-06, resnet_time_scale_shift: str='default', resnet_act_fn: str='swish', resnet_groups: int=32, resnet_pre_norm: bool=True, attn_num_head_channels=1, output_scale...
class Widget(QWidget): def __init__(self, helper, parent): super(Widget, self).__init__(parent) self.helper = helper self.elapsed = 0 self.setFixedSize(200, 200) def animate(self): self.elapsed = ((self.elapsed + self.sender().interval()) % 1000) self.repaint() ...
def _convert_dep_info_to_data_query(dep_info): key_item = dep_info.copy() key_item.pop('prerequisites', None) key_item.pop('optional_prerequisites', None) if ('modifiers' in key_item): key_item['modifiers'] = tuple(key_item['modifiers']) key = DataQuery.from_dict(key_item) return key
class DistilBertModel(nn.Module): def __init__(self, embedding, projection, config=None) -> None: super().__init__() self.model = DistilBertForMaskedLM(config).to(device) self.embedding = copy.deepcopy(embedding.requires_grad_(False)) self.projection = copy.deepcopy(projection.requir...
def test_complete_headers_rpt(test_model_02): headers = get_rpt_sections_details(test_model_02.rpt.path) sections_in_rpt = ['Link Flow Summary', 'Link Flow Summary', 'Subcatchment Summary', 'Cross Section Summary', 'Link Summary'] assert all(((section in headers) for section in sections_in_rpt)) assert ...
class ModelFormSingleTagFieldOptionalTest(TagTestManager, TestCase): manage_models = [test_models.SingleTagFieldOptionalModel] def setUpExtra(self): self.form = test_forms.SingleTagFieldOptionalModelForm self.model = test_models.SingleTagFieldOptionalModel self.tag_model = self.model.tag...
def _shufflenetv2(arch, pretrained, progress, *args, **kwargs): model = ShuffleNetV2(*args, **kwargs) if pretrained: model_url = model_urls[arch] if (model_url is None): raise NotImplementedError('pretrained {} is not supported as of now'.format(arch)) else: state...
def main(): os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'test_app.settings') try: from django.core.management import execute_from_command_line except ImportError as exc: raise ImportError("Couldn't import Django. Are you sure it's installed and available on your PYTHONPATH environment va...
def get_local_rank(world_size: Optional[int]=None, rank: Optional[int]=None) -> int: my_local_rank = _env2int(['LOCAL_RANK', 'MPI_LOCALRANKID', 'OMPI_COMM_WORLD_LOCAL_RANK', 'MV2_COMM_WORLD_LOCAL_RANK'], (- 1)) local_size = get_local_size(world_size) if ((my_local_rank == (- 1)) or (my_local_rank >= local_s...
def wing_loss(output: torch.Tensor, target: torch.Tensor, width=5, curvature=0.5, reduction='mean'): diff_abs = (target - output).abs() loss = diff_abs.clone() idx_smaller = (diff_abs < width) idx_bigger = (diff_abs >= width) loss[idx_smaller] = (width * torch.log((1 + (diff_abs[idx_smaller] / curva...
class AH2500A(Instrument): _reclv = re.compile('[FHZ0-9.=\\s]*C=\\s*(-?[0-9.]+)\\s*PF L=\\s*(-?[0-9.]+)\\s*NS V=\\s*(-?[0-9.]+)\\s*V') _renumeric = re.compile('[-+]?(\\d*\\.?\\d+)') def __init__(self, adapter, name=None, timeout=3000, write_termination='\n', read_termination='\n', **kwargs): kwargs....
class FakeDBusMessage(): def __init__(self, signature: str, *arguments: Any, typ: QDBusMessage.MessageType=QDBusMessage.MessageType.ReplyMessage, error_name: Optional[str]=None) -> None: self._signature = signature self._arguments = arguments self._type = typ self._error_name = error...
class HurdlePoisson(): def __new__(cls, name, psi, mu, **kwargs): return _hurdle_mixture(name=name, nonzero_p=psi, nonzero_dist=Poisson.dist(mu=mu), dtype='int', **kwargs) def dist(cls, psi, mu, **kwargs): return _hurdle_mixture(name=None, nonzero_p=psi, nonzero_dist=Poisson.dist(mu=mu), dtype='...
def feature_set_dates_output_dataframe(spark_context, spark_session): data = [{'id': 1, 'timestamp': '2016-04-11 11:31:11', 'feature': 200}, {'id': 1, 'timestamp': '2016-04-12 11:44:12', 'feature': 300}] df = spark_session.read.json(spark_context.parallelize(data, 1)) df = df.withColumn('timestamp', df.time...
def write_tsp_file(fp, xs, ys, norm, name): if (len(xs) != len(ys)): raise ValueError('x and y coordinate vector must have the same length ({} != {})'.format(len(xs), len(ys))) if (norm not in EDGE_WEIGHT_TYPES): raise ValueError('Norm {!r} must be one of {}'.format(norm, ', '.join(EDGE_WEIGHT_T...
def train(epochs, ctx): net.initialize(mx.init.Xavier(magnitude=2.24), ctx=ctx) trainer = gluon.Trainer(net.collect_params(), opt.optimizer, {'learning_rate': opt.lr, 'momentum': opt.momentum}) metric = mx.metric.Accuracy() loss = gluon.loss.SoftmaxCrossEntropyLoss() for epoch in range(epochs): ...
class _DSFID3(ID3): _error(IOError, error) def _pre_load_header(self, fileobj): fileobj.seek(0) id3_location = DSDChunk(fileobj).offset_metdata_chunk if (id3_location == 0): raise ID3NoHeaderError('File has no existing ID3 tag') fileobj.seek(id3_location) _error(I...
class TestHuffmanDecoder(): (data=binary()) (b'\xff') (b'_\xff\xff\xff\xff') (b'\x00?\xff\xff\xff') def test_huffman_decoder_properly_handles_all_bytestrings(self, data): try: result = decode_huffman(data) except HPACKDecodingError: result = b'' assert...
class CommandLoader(Loadable, SignalDispatcher, FileManagerAware): finished = False process = None def __init__(self, args, descr, silent=False, read=False, input=None, kill_on_pause=False, popenArgs=None): SignalDispatcher.__init__(self) Loadable.__init__(self, self.generate(), descr) ...
def make_patches_from_region(slide_path): with openslide.open_slide(slide_path) as slide: thumbnail = slide.read_region((x, y), zoom_level, (1000, 1000)) patches_dir = ((str(BASE_TRUTH_DIR) + str(exp_folder_name)) + '/') print('patches_dir', patches_dir) assure_path_exists(patches_dir) plt.i...
def main(local_rank, args): set_random_seed(args.seed) torch.backends.cudnn.deterministic = False torch.backends.cudnn.benchmark = True cfg = Config.fromfile(args.py_config) cfg.work_dir = args.work_dir if (args.gpus > 1): distributed = True ip = os.environ.get('MASTER_ADDR', '12...
def generate_loc_dict(G_edges_data): loc_dict = {} index_dict = {} count = 0 for triple in G_edges_data: print(triple) (h, t, attribute) = triple l = attribute['loc'] if (l is not None): if (l not in loc_dict): loc_dict[l] = {} ...
class BaseTestWindow(window.Window): def __init__(self, multiline, wrap_lines, msg, *args, **kwargs): super(BaseTestWindow, self).__init__(*args, width=640, height=480, **kwargs) self.batch = graphics.Batch() self.document = text.decode_attributed(msg) self.margin = 2 self.la...
def to_ani(frames: List[CursorFrame]) -> bytes: ani_header = ANIParser.ANIH_HEADER.pack(ANIParser.ANIH_HEADER.size, len(frames), len(frames), 0, 0, 32, 1, 1, ANIParser.ICON_FLAG) cur_list = get_ani_cur_list(frames) chunks = [ANIParser.CHUNK_HEADER.pack(ANIParser.HEADER_CHUNK, len(ani_header)), ani_header, A...
def test_single_only(hatch, helpers, temp_dir, config_file): config_file.model.template.plugins['default']['tests'] = False config_file.save() project_name = 'My.App' with temp_dir.as_cwd(): result = hatch('new', project_name) assert (result.exit_code == 0), result.output project_path = ...
def test_search_for_file_sdist_with_extras(provider: Provider, fixture_dir: FixtureDirGetter) -> None: dependency = FileDependency('demo', (fixture_dir('distributions') / 'demo-0.1.0.tar.gz'), extras=['foo']) package = provider.search_for_direct_origin_dependency(dependency) assert (package.name == 'demo') ...
class CrumblingWall(TutorialObject, DefaultExit): def at_init(self): self.reset() def at_object_creation(self): super().at_object_creation() self.aliases.add(['secret passage', 'passage', 'crack', 'opening', 'secret door']) self.db.root_pos = {'yellow': 0, 'green': 0, 'red': 0, '...
def main(args): dataset = Dataset(args) os.makedirs(args.save_dir, exist_ok=True) with open(os.path.join(args.save_dir, 'dataset_info'), 'wb') as wf: pickle.dump(dataset.dataset_info, wf) if (args.task == 'rhyme'): with open(os.path.join(args.save_dir, 'rhyme_info'), 'wb') as wf: ...
def select_cross_entropy_loss(pred, label): pred = pred.view((- 1), 2) label = label.view((- 1)) pos = label.data.eq(1).nonzero().squeeze().cuda() neg = label.data.eq(0).nonzero().squeeze().cuda() loss_pos = get_cls_loss(pred, label, pos) loss_neg = get_cls_loss(pred, label, neg) return ((lo...
def _valid_command_options(cmdclass: Mapping=EMPTY) -> Dict[(str, Set[str])]: from .._importlib import metadata from setuptools.dist import Distribution valid_options = {'global': _normalise_cmd_options(Distribution.global_options)} unloaded_entry_points = metadata.entry_points(group='distutils.commands...
def getProposedModelC(size=224, seq_len=32, cnn_weight='imagenet', cnn_trainable=True, lstm_type='sepconv', weight_decay=2e-05, frame_diff_interval=1, mode='both', cnn_dropout=0.25, lstm_dropout=0.25, dense_dropout=0.3, seed=42): print('cnn_trainable:', cnn_trainable) print('cnn dropout : ', cnn_dropout) pr...
def setUp(): for phase_class in uvm_common_phases: phase_func = phase_class.__name__[4:] phase_list[phase_func] = [] top = my_comp('top', None) aa = my_comp('aa', top) bb = my_comp('bb', top) my_comp('cc', aa) my_comp('dd', aa) my_comp('ee', bb) my_comp('ff', bb) retu...
class Session(): def __init__(self, model_dir=None, model_name=None): logger.debug('Initializing %s: (model_dir: %s, model_name: %s)', self.__class__.__name__, model_dir, model_name) self.serializer = JSONSerializer self.state = None self.modeldir = model_dir self.modelname =...
def _check_closed_doors(state: EnvironmentState, room1: GraphNode, room2: GraphNode): return [] graph_adj_lists = _create_walkable_graph(state) bfs_prev = BFS_check_closed(state, graph_adj_lists, room1.id) if (room2.id in bfs_prev): return [] bfs_prev = BFS(graph_adj_lists, room1.id) if ...
class BindTransmitterResp(Command): params = {'system_id': Param(type=str, max=16), 'sc_interface_version': Param(type=int, size=1)} params_order = ('system_id', 'sc_interface_version') def __init__(self, command, **kwargs): super(BindTransmitterResp, self).__init__(command, need_sequence=False, **k...
class ClientStatusDB(SocketDB): def __init__(self, sock_port): super().__init__() self.sock.connect(('localhost', sock_port)) def _set(self, i: str, k: str, v: int): self._sock_send(self.sock, '|'.join(('set', i, k, str(v)))) def _get(self, i: str, k: str) -> int: self._sock_...
def add_metadata(runner): runner.metadata['description'] = 'Async tree workloads.' runner.metadata['async_tree_recurse_levels'] = NUM_RECURSE_LEVELS runner.metadata['async_tree_recurse_branches'] = NUM_RECURSE_BRANCHES runner.metadata['async_tree_random_seed'] = RANDOM_SEED runner.metadata['async_tr...
def get_directory_list(path): directory_list = [] if os.path.isfile(path): return [] if (len([f for f in os.listdir(path) if (f == 'config.json')]) > 0): directory_list.append(path) for d in os.listdir(path): new_path = os.path.join(path, d) if os.path.isdir(new_path): ...
class GraspNetStage1(nn.Module): def __init__(self, input_feature_dim=0, num_view=300): super().__init__() self.backbone = Pointnet2Backbone(input_feature_dim) self.vpmodule = ApproachNet(num_view, 256) def forward(self, end_points): pointcloud = end_points['point_clouds'] ...
class BamResNet(nn.Module): def __init__(self, channels, init_block_channels, bottleneck, in_channels=3, in_size=(224, 224), num_classes=1000): super(BamResNet, self).__init__() self.in_size = in_size self.num_classes = num_classes self.features = nn.Sequential() self.feature...
class TestClose(): def adapterC(self): return VISAAdapter(SIM_RESOURCE, visa_library='') def test_connection_session_closed(self, adapterC): assert (adapterC.connection.session is not None) adapterC.close() with pytest.raises(pyvisa.errors.InvalidSession, match='Invalid session')...
class TestLRUDict(unittest.TestCase): def test_lrudict_defaultbehaviour(self): d = LRUDict() dd = dict() for count in range(1, 100): d[count] = f'v{count}' dd[count] = f'v{count}' if ((count % 5) == 0): d.get((count - 2)) dd...
class CustomApi(): def __init__(self, port: int=24859): self._handler: Optional[Callable] = None self._app: web.Application = web.Application() self._runner: Optional[web.AppRunner] = None self._port = port def on_update_custom_api(self) -> Callable: if (self._handler is ...
def test_custom_validator_class_can_detect_custom_conditions(run_line, tmp_path): doc = (tmp_path / 'invalid.json') doc.write_text(json.dumps(INVALID_DOC)) schema = (tmp_path / 'schema.json') schema.write_text(json.dumps(SCHEMA)) result = run_line(['check-jsonschema', '--schemafile', str(schema), st...
def rtn_errno_location(se: 'SymbolicExecutor', pstate: 'ProcessState'): logger.debug('__errno_location hooked') segs = pstate.memory.find_map(pstate.EXTERN_SEG) if segs: map = segs[0] ERRNO = ((map.start + map.size) - 4) else: assert False pstate.memory.write_dword(ERRNO, 0) ...
class LongNegativeCategoryEntryTestCase(unittest.TestCase): def setUpClass(cls): cls.entry = CategoryEntry(name='This is quite a LOOONG Category', entries=[BaseEntry('entry', (- 100), '2000-08-13')]) def test_name(self): self.assertEqual(self.entry.name, 'this is quite a looong category') de...
def list_pods(cli, namespace, label_selector=None): pods = [] try: if label_selector: ret = cli.list_namespaced_pod(namespace, pretty=True, label_selector=label_selector) else: ret = cli.list_namespaced_pod(namespace, pretty=True) except ApiException as e: log...
def getSplittedDataset(trainpart, testpart, predictpart, expset): assert (((parameters['trainpart'] + parameters['testpart']) + parameters['predictpart']) == 1), 'Train + Test + Prediction should be 1' (x, y) = expset[0] logging.critical('\n[FUNCTION]: Splitting dataset by getSplittedDataset()......') l...
class PostNorm_Classifier(nn.Module): def __init__(self, num_classes=10, in_dim=640, norm=False, feature_norm=False, lws=False, tau=0, bias=False, avg_T=1): super(PostNorm_Classifier, self).__init__() self.fc = nn.Linear(in_dim, num_classes) self.weight_norm = norm self.feature_norm ...
def quantity_delta(base, changed): old = base.mean() new = changed.mean() is_time = (base.get_unit() == 'second') if ((old == 0) or (new == 0)): return 'incomparable (one result was zero)' if (new > old): if is_time: return ('%.2fx slower' % (new / old)) else: ...
def init(disp, info): disp.extension_add_method('display', 'xrandr_query_version', query_version) disp.extension_add_method('window', 'xrandr_select_input', select_input) disp.extension_add_method('window', 'xrandr_get_screen_info', get_screen_info) disp.extension_add_method('drawable', 'xrandr_1_0set_s...
def test_update_once(): class A(Component): _port def recv(s, v): s.v = v def construct(s): s.send = CallerPort() s.v = None _once def up(): if (s.v is not None): s.send(s.v) s.add_con...
def num_to_str(num, unit=None, precision=2, number_only=False, auto_select_unit=False): unit_list = ['K', 'M', 'G', 'T', 'P'] if (auto_select_unit and (unit is None)): for (i, tmp) in enumerate(unit_list): unit_num = (1024 ** (i + 1)) if (num < unit_num): break ...
def calculate_metrics(task_type: str, y: np.ndarray, prediction: np.ndarray, classification_mode: str, y_info: ty.Optional[ty.Dict[(str, ty.Any)]]) -> ty.Dict[(str, float)]: if (task_type == util.REGRESSION): del classification_mode rmse = (skm.mean_squared_error(y, prediction) ** 0.5) if y_...
class Process(): def par(func, iterables, num_processes, desc=''): pool = multiprocessing.Pool(processes=num_processes) pool_func = pool.imap(func=func, iterable=iterables) pool_func = tqdm(pool_func, total=len(iterables), ncols=100, desc=desc) results = [r for r in pool_func] ...
class HistogramTests(unittest.TestCase): def setUp(self): self.transport = mock.Mock(spec=metrics.NullTransport) def test_log(self): histogram = metrics.Histogram(self.transport, b'example_hist') histogram.add_sample(33) self.assertEqual(self.transport.send.call_count, 1) ...