code
stringlengths
281
23.7M
class Tester(Manager): def __init__(self, args): tic = time.time() super(Tester, self).__init__(args) self.model.load_state_dict(torch.load(args.PathModel)) self.create_thinningsampler(args.NumSample, args.NumExp) print(f'time spent on initializatin : {(time.time() - tic):.2f...
class TestLt(unittest.TestCase): def test_lt_matrix(self): base = Ga('a b', g=[1, 1], coords=symbols('x, y', real=True)) (a, b) = base.mv() A = base.lt([(a + b), ((2 * a) - b)]) assert (str(A) == 'Lt(a) = a + b\nLt(b) = 2*a - b') assert (str(A.matrix()) == 'Matrix([[1, 2], [1...
def test_speedaction_rel(): speedaction = OSC.RelativeSpeedAction(1, 'Ego', TD) prettyprint(speedaction.get_element()) speedaction2 = OSC.RelativeSpeedAction(1, 'Ego', TD) speedaction3 = OSC.RelativeSpeedAction(1, 'Ego1', TD) assert (speedaction == speedaction2) assert (speedaction != speedactio...
class ins(object): def ssh(self): Mylogo() print((('\n\n\x1b[01;31m [\x1b[01;33m+\x1b[01;31m] \x1b[01;36mOpenSSH \x1b[01;31mis not installed in your ' + system) + '.')) opt = input('\n\x1b[01;33m Do you want to install OpenSSH [\x1b[01;32mY/n\x1b[01;33m] >>\x1b[01;36m ') if ((opt =...
def pretf_blocks(var): private_label = 'private' private = (yield resource.aws_security_group[private_label](name='pretf-example-aws-private')) public_label = 'public' public = (yield resource.aws_security_group[public_label](name='pretf-example-aws-public')) for cidr in sorted(set(var.security_grou...
def initiator_init(raiden: 'RaidenService', transfer_identifier: PaymentID, transfer_amount: PaymentAmount, transfer_secret: Secret, transfer_secrethash: SecretHash, token_network_address: TokenNetworkAddress, target_address: TargetAddress, lock_timeout: BlockTimeout=None, route_states: List[RouteState]=None) -> Tuple[...
class MetricName(MetricNameBase): DEFAULT = '' NE = 'ne' SEGMENTED_NE = 'segmented_ne' LOG_LOSS = 'logloss' THROUGHPUT = 'throughput' TOTAL_EXAMPLES = 'total_examples' CTR = 'ctr' CALIBRATION = 'calibration' MSE = 'mse' MAE = 'mae' RMSE = 'rmse' AUC = 'auc' AUPRC = 'a...
class TestRowWiseNormalize(unittest.TestCase): def test_3by3_matrix(self): matrix = np.array([[0.5, 2.0, 3.0], [3.0, 4.0, 5.0], [4.0, 2.0, 1.0]]) adjusted_matrix = refinement.RowWiseNormalize().refine(matrix) expected = np.array([[0.167, 0.667, 1.0], [0.6, 0.8, 1.0], [1.0, 0.5, 0.25]]) ...
class Notification(): def __init__(self, id: int): self.device_id = id self.device_handle: Optional[str] = None self.host_id = 0 def show(self, data: ShowNotificationData) -> None: self.device_handle = data.device_handle actions = [] if (data.positive_action is no...
def create_vgg16bn(models_path, task, save_type, get_params=False): print('Creating VGG16BN untrained {} models...'.format(task)) model_params = get_task_params(task) if (model_params['input_size'] == 32): model_params['fc_layers'] = [512, 512] elif (model_params['input_size'] == 64): mo...
class MakeClientTests(unittest.TestCase): def test_no_endpoint(self): client = metrics.make_client('namespace', None, log_if_unconfigured=False) self.assertIsInstance(client.transport, metrics.NullTransport) def test_valid_endpoint(self): client = metrics.make_client('namespace', EXAMPLE...
def get_valid_gpu_id(gpu_status): for (i, _gpu_status) in enumerate(gpu_status): if (_gpu_status.occupied is False): current_gpu_id = _gpu_status.gpu_id gpu_status_id = i _gpu_status.occupied = True return (current_gpu_id, gpu_status_id) return (None, None...
class Pirani(SensorChannel): _id = Sources.PIRANI gas_factor = gas_factor statistics = Channel.measurement('0PM011', 'Get the sensor statistics as a tuple: wear in percent (negative: corrosion,\n positive: contamination), time since last adjustment in hours.', preprocess_reply=(lambda msg: msg.st...
class _DiscreteBounded(Discrete): rv_op = discrete_boundrv def __new__(cls, *args, **kwargs): kwargs.setdefault('transform', None) if (kwargs.get('transform') is not None): raise ValueError('Cannot transform discrete variable.') return super().__new__(cls, *args, **kwargs) ...
class TransitionWithSkip(nn.Module): def __init__(self, module): super().__init__() self.module = module def forward(self, x): for module in self.module: x = module(x) if isinstance(module, nn.ReLU): skip = x return (x, skip)
def load_checkpoint(model_load_path, model): my_model_dict = model.state_dict() pre_weight = torch.load(model_load_path) part_load = {} match_size = 0 nomatch_size = 0 for k in pre_weight.keys(): value = pre_weight[k] if ((k in my_model_dict) and (my_model_dict[k].shape == value....
class Effect6655(BaseEffect): type = 'passive' def handler(fit, src, context, projectionRange, **kwargs): fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Capital Projectile Turret')), 'speed', src.getModifiedItemAttr('shipBonusTitanM2'), skill='Minmatar Titan', **kwargs)
class TestForestPlot(): def plotter(self): return EffectMeasurePlot def data(self): labs = ['Overall', 'Adjusted', '', '2012-2013', 'Adjusted', '', '2013-2014', 'Adjusted', '', '2014-2015', 'Adjusted'] measure = [np.nan, 0.94, np.nan, np.nan, 1.22, np.nan, np.nan, 0.59, np.nan, np.nan, 1...
class BasicBlock(nn.Module): outchannel_ratio = 1 def __init__(self, inplanes, planes, stride=1, downsample=None): super(BasicBlock, self).__init__() self.bn1 = nn.BatchNorm2d(inplanes) self.conv1 = conv3x3(inplanes, planes, stride) self.bn2 = nn.BatchNorm2d(planes) self....
def main(): pgen_dataset_id = '2020-03-19' dcol_dataset_id = datetime.datetime.now().isoformat(timespec='minutes') hardware_grid_problem_task = HardwareGridProblemGenerationTask(dataset_id=pgen_dataset_id, device_name='Sycamore23', instance_i=0, n_qubits=23) sk_problem_task = SKProblemGenerationTask(dat...
class SSDPResponse(object): class _FakeSocket(io.BytesIO): def makefile(self, *args, **kw): return self def __init__(self, response): r = r.begin() self.location = r.getheader('location') self.usn = r.getheader('usn') self.st = r.getheader('st') ...
def simplify_network_to_base_voltage(n, linetype, base_voltage): logger.info(f'Mapping all network lines onto a single {int(base_voltage)}kV layer') n.lines['type'] = linetype n.lines['v_nom'] = base_voltage n.lines['i_nom'] = n.line_types.i_nom[linetype] n.lines['num_parallel'] = n.lines.eval('s_no...
def create_train_op(loss, optimizer, global_step, params): with tf.name_scope('create_train_op'): grads_and_vars = optimizer.compute_gradients(loss, tf.trainable_variables(params.trainable_scope), colocate_gradients_with_ops=True) gradients = [item[0] for item in grads_and_vars] variables = ...
class Effect6799(BaseEffect): type = 'passive' def handler(fit, module, context, projectionRange, **kwargs): types = ('thermal', 'em', 'explosive', 'kinetic') for type in types: fit.modules.filteredChargeMultiply((lambda mod: (mod.charge.requiresSkill('Rockets') or mod.charge.require...
class PtableTourney(models.Model): class Meta(): table = 'ptable_tourney' id = fields.IntField(pk=True) guild_id = fields.BigIntField() associative_id = fields.CharField(max_length=10) title = fields.CharField(max_length=100) secondary_title = fields.CharField(max_length=100) footer ...
def attention(query, key, value, mask=None, dropout=None): d_k = query.size((- 1)) scores = (torch.matmul(query, key.transpose((- 2), (- 1))) / math.sqrt(d_k)) if (mask is not None): scores = scores.masked_fill((mask > 0), (- .0)) p_attn = torch.softmax(scores, dim=(- 1)) if (dropout is not ...
(trylast=True) def pytask_extend_command_line_interface(cli: click.Group) -> None: for command in ('build', 'clean', 'collect', 'dag', 'profile'): cli.commands[command].params.extend((_PATH_ARGUMENT, _DATABASE_URL_OPTION)) for command in ('build', 'clean', 'collect', 'dag', 'markers', 'profile'): ...
class SelectSponsorshipApplicationBenefitsViewTests(TestCase): url = reverse_lazy('select_sponsorship_application_benefits') def setUp(self): self.current_year = SponsorshipCurrentYear.get_year() self.psf = baker.make('sponsors.SponsorshipProgram', name='PSF') self.wk = baker.make('spons...
def retrieve_wikisql_query_answer_tapas(table, example) -> List: (answer_coordinates, aggregation_op) = _get_answer_coordinates(table, example) float_answer = _get_float_answer(table, answer_coordinates, aggregation_op) answer_text = _get_answer_text(table, answer_coordinates, float_answer) if (len(answ...
def updown_linear_approx(eigvals_lower, eigvals_upper, nv): nal = len(eigvals_lower) nau = len(eigvals_upper) if (nv < (nal + nau)): raise ValueError('Number of supplied eigenvalues ({0} lower and {1} upper) is higher than number of nodes ({2})!'.format(nal, nau, nv)) ret = np.zeros(nv) ret[...
.parametrize('extra_headers', [[], [(b'connection', b'Keep-Alive')]]) def test_handshake_response_broken_connection_header(extra_headers: Headers) -> None: with pytest.raises(RemoteProtocolError) as excinfo: _make_handshake(101, ([(b'upgrade', b'websocket')] + extra_headers)) assert (str(excinfo.value) ...
def test_sep_fcn_head(): head = DepthwiseSeparableFCNHead(in_channels=128, channels=128, concat_input=False, num_classes=19, in_index=(- 1), norm_cfg=dict(type='BN', requires_grad=True, momentum=0.01)) x = [torch.rand(2, 128, 32, 32)] output = head(x) assert (output.shape == (2, head.num_classes, 32, 32...
class TestCopyGC(EndianTest): def setUp(self): self.req_args_0 = {'dst_gc': , 'mask': , 'src_gc': } self.req_bin_0 = b'9\x00\x04\x00q$\xbaF\xbb\xbc\xf5\\x047' def testPackRequest0(self): bin = request.CopyGC._request.to_binary(*(), **self.req_args_0) self.assertBinaryEqual(bin, s...
def test__check_initialpaths_for_relpath() -> None: cwd = Path.cwd() class FakeSession1(): _initialpaths = frozenset({cwd}) session = cast(pytest.Session, FakeSession1) assert (nodes._check_initialpaths_for_relpath(session, cwd) == '') sub = (cwd / 'file') class FakeSession2(): _...
def convert_checkpoint_to_pytorch(tf_checkpoint_path: str, config_path: str, pytorch_dump_path: str): def get_masked_lm_array(name: str): full_name = f'masked_lm/{name}/.ATTRIBUTES/VARIABLE_VALUE' array = tf.train.load_variable(tf_checkpoint_path, full_name) if ('kernel' in name): ...
class MetaMapAttribute(AttributeContainerMeta): def __init__(self, name, bases, namespace, discriminator=None): super().__init__(name, bases, namespace, discriminator=discriminator) for (attr_name, attr) in self._attributes.items(): if (isinstance(attr, (BinaryAttribute, BinarySetAttribu...
def prompt_for_value(prompt, values, default): def completer(text, state): for val in values: if (text.upper() in val.upper()): if (not state): return val else: state -= 1 return None readline.set_completer_delim...
class GatedTanh(nn.Module): def __init__(self, in_dim, out_dim): super(GatedTanh, self).__init__() self.fc = nn.Linear(in_dim, out_dim) self.gate_fc = nn.Linear(in_dim, out_dim) def forward(self, x): y_tilda = torch.tanh(self.fc(x)) gated = torch.sigmoid(self.gate_fc(x)) ...
def cdnod(data: ndarray, c_indx: ndarray, alpha: float=0.05, indep_test: str=fisherz, stable: bool=True, uc_rule: int=0, uc_priority: int=2, mvcdnod: bool=False, correction_name: str='MV_Crtn_Fisher_Z', background_knowledge: Optional[BackgroundKnowledge]=None, verbose: bool=False, show_progress: bool=True, **kwargs) ->...
class PPO(): def __init__(self, config): self.lr = config.lr self.gamma = config.gamma self.gae_lambda = config.gae_lambda self.eps_clip = config.eps_clip self.k_epochs = config.k_epochs self.tau = config.tau self.ploss_coef = config.ploss_coef self.vl...
def validate_environment_marker(em): clauses = re.split('\\s+(?:and|or)\\s+', em) problems = [] for c in clauses: parts = MARKER_OP.split(c.strip('()')) if (len(parts) != 3): problems.append('Invalid expression in environment marker: {!r}'.format(c)) continue ...
class ProbMaskConv(nn.Conv2d): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.scores = nn.Parameter(torch.Tensor(self.weight.size())) self.subnet = None self.train_weights = False if (parser_args.score_init_constant is not None): self....
(wrapper=True, tryfirst=True) def pytest_runtest_makereport(item: Item, call) -> Generator[(None, TestReport, TestReport)]: rep = (yield) assert (rep.when is not None) empty: Dict[(str, bool)] = {} item.stash.setdefault(tmppath_result_key, empty)[rep.when] = rep.passed return rep
class ItemCircleAnimation(DemoItem): def __init__(self, parent=None): super(ItemCircleAnimation, self).__init__(parent) self.letterList = [] self.letterCount = Colors.tickerLetterCount self.scale = 1.0 self.showCount = (- 1) self.tickOnPaint = False self.pause...
class KeypointBoxCoderTest(tf.test.TestCase): def test_get_correct_relative_codes_after_encoding(self): boxes = [[10.0, 10.0, 20.0, 15.0], [0.2, 0.1, 0.5, 0.4]] keypoints = [[[15.0, 12.0], [10.0, 15.0]], [[0.5, 0.3], [0.2, 0.4]]] num_keypoints = len(keypoints[0]) anchors = [[15.0, 12...
class UncertaintyProblem(CircuitFactory, ABC): def __init__(self, num_qubits: int) -> None: warnings.warn('The {0} is deprecated as of Aqua 0.8.0 and will be removed no earlier than 3 months after the release date. Instead, you can construct the circuits manually using the respective circuit components. See...
class TestUCASAODKF(TestUCASAOD): def eval(self): kf = build_whole_network.DetectionNetworkKF(cfgs=self.cfgs, is_training=False) all_boxes_r = self.eval_with_plac(img_dir=self.args.img_dir, det_net=kf, image_ext=self.args.image_ext) imgs = os.listdir(self.args.img_dir) real_test_imgn...
class DriveLetterSupportTest(TestCase): def setUp(self): self.filesystem = fake_filesystem.FakeFilesystem(path_separator='!') self.filesystem.alternative_path_separator = '^' self.filesystem.is_windows_fs = True def test_initial_value(self): filesystem = fake_filesystem.FakeFiles...
def get_data_lst(data, data_root, entities=None): if (type(entities) == str): entities_lst = entities.split(',') elif (type(entities) == list): entities_lst = entities else: raise ValueError('wrong entities') name_lst = [] train_df_lst = [] test_df_lst = [] label_lst ...
.parametrize('test_input,expected', [([1, 2, 3], b'OUTP#13\x01\x02\x03'), (range(100), (b'OUTP#3100' + bytes(range(100))))]) def test_adapter_write_binary_values(adapter, test_input, expected): adapter.write_binary_values('OUTP', test_input, datatype='B') assert (adapter.connection.read((len(expected) + 10)) ==...
def main(): (gen, dis) = load_models() (loader, loader_test) = make_dataset() optimizer = get_optimizer(dis.parameters()) loss_f = nn.CrossEntropyLoss() noise = torch.FloatTensor(opt.batch_size, opt.nz).zero_().cuda() noise_v = Variable(noise) noise_y = torch.LongTensor(opt.batch_size).zero_...
def _make_json_rpc_null_response(succeed_at: int) -> Callable[([PreparedRequest], Tuple[(int, Dict[(str, Any)], str)])]: request_count = 0 def make_response(request: PreparedRequest) -> Tuple[(int, Dict[(str, Any)], str)]: nonlocal request_count assert isinstance(request.body, bytes), MYPY_ANNOT...
class SawyerPlateSlideV1Policy(Policy): _fully_parsed def _parse_obs(obs): return {'hand_pos': obs[:3], 'puck_pos': obs[3:6], 'shelf_x': obs[(- 3)], 'unused_info': obs[[6, 7, 8, 10, 11]]} def get_action(self, obs): o_d = self._parse_obs(obs) action = Action({'delta_pos': np.arange(3)...
def test_reject_vote_when_voting_is_not_open(graphql_client, user, conference_factory, submission_factory, requests_mock): graphql_client.force_login(user) conference = conference_factory() requests_mock.post(f'{settings.PRETIX_API}organizers/{conference.pretix_organizer_id}/events/{conference.pretix_event_...
class ESPNetv2(nn.Module): def __init__(self, channels, init_block_channels, final_block_channels, final_block_groups, dilations, dropout_rate=0.2, in_channels=3, in_size=(224, 224), num_classes=1000): super(ESPNetv2, self).__init__() self.in_size = in_size self.num_classes = num_classes ...
class Array(BaseRTLIRType): def __init__(s, dim_sizes, sub_type, obj=None, unpacked=False): assert isinstance(sub_type, BaseRTLIRType), f'array subtype {sub_type} is not RTLIR type!' assert (not isinstance(sub_type, Array)), f'array subtype {sub_type} should not be array RTLIR type!' assert ...
def get_zone_id(longitude, latitude): taxi_zone_shape_dic = get_taxi_zones_dic('./Data/mh-180/mh-180.shp') find_count = 0 for obj_id in taxi_zone_shape_dic: if taxi_zone_shape_dic[obj_id].intersects(Point(longitude, latitude)): taxi_zone = obj_id find_count = (find_count + 1)...
class PEMachineType(Enum): UNKNOWN = 0 I386 = 332 R3000 = 354 R4000 = 358 R10000 = 360 WCEMIPSV2 = 361 ALPHA = 388 SH3 = 418 SH3DSP = 419 SH3E = 420 SH4 = 422 SH5 = 424 ARM = 448 AARCH64 = 43620 THUMB = 450 ARMNT = 452 AM33 = 467 POWERPC = 496 ...
class ROIKeypointHead(torch.nn.Module): def __init__(self, cfg): super(ROIKeypointHead, self).__init__() self.cfg = cfg.clone() self.feature_extractor = make_roi_keypoint_feature_extractor(cfg) self.predictor = make_roi_keypoint_predictor(cfg) self.post_processor = make_roi_k...
def handle_deprecations(doc): if ('parser' in doc): warnings.warn('The parser attribute is deprecated, use expect instead', DeprecationWarning, stacklevel=2) doc['expect'] = (doc.get('expect', []) + [doc.pop('parser')]) if ('body' in doc): warnings.warn('The body attribute is deprecated,...
class Spinner(): busy = False delay = 0.1 def spinning_cursor(): while 1: for cursor in '|/-\\': (yield cursor) def __init__(self, delay=None): self.spinner_generator = self.spinning_cursor() if (delay and float(delay)): self.delay = delay ...
def add_rorp(rorp, dest_rorp=None): if ((not rorp.isreg()) or (rorp.getnumlinks() < 2)): return None rp_inode_key = _get_inode_key(rorp) if (rp_inode_key not in _inode_index): if (not dest_rorp): dest_key = None else: if (dest_rorp.getnumlinks() == 1): ...
.parametrize('username,password', users) def test_create(db, client, username, password): client.login(username=username, password=password) url = reverse(urlnames['list']) data = {'title': 'Lorem ipsum dolor sit amet', 'description': 'At vero eos et accusam et justo duo dolores et ea rebum.', 'catalog': ca...
def test_hash_algo_off(main): main(['--hash-algo=off']) assert (main.app._pypiserver_config.hash_algo is None) main(['--hash-algo=0']) assert (main.app._pypiserver_config.hash_algo is None) main(['--hash-algo=no']) assert (main.app._pypiserver_config.hash_algo is None) main(['--hash-algo=fal...
def find_evaluated_item_indexes_by_schema(validator, instance, schema): if validator.is_type(schema, 'boolean'): return [] evaluated_indexes = [] if ('items' in schema): return list(range(0, len(instance))) ref = schema.get('$ref') if (ref is not None): resolved = validator._...
('beeref.selection.SelectableMixin.mouseMoveEvent') def test_mouse_move_when_not_crop_mode(mouse_mock, qapp, item): event = MagicMock() event.pos.return_value = QtCore.QPointF(30, 50) item.mouseMoveEvent(event) event.accept.assert_not_called() mouse_mock.assert_called_once_with(event)
class GrapevineBot(Bot): factory_path = 'evennia.server.portal.grapevine.RestartingWebsocketServerFactory' def start(self, ev_channel=None, grapevine_channel=None): if (not _GRAPEVINE_ENABLED): self.delete() return global _SESSIONS if (not _SESSIONS): ...
class AutocleanView(ScrimsView): def __init__(self, ctx: Context, scrim: Scrim): super().__init__(ctx) self.ctx = ctx self.record = scrim async def initial_embed(self) -> discord.Embed: autoclean_time = (self.record.autoclean_time.strftime('%I:%M %p') if self.record.autoclean_tim...
def check_marian_cfg_assumptions(marian_cfg): assumed_settings = {'layer-normalization': False, 'right-left': False, 'transformer-ffn-depth': 2, 'transformer-aan-depth': 2, 'transformer-no-projection': False, 'transformer-postprocess-emb': 'd', 'transformer-postprocess': 'dan', 'transformer-preprocess': '', 'type':...
def lamda_scheduler(start_warmup_value, base_value, epochs, niter_per_ep, warmup_epochs=5): warmup_schedule = np.array([]) warmup_iters = (warmup_epochs * niter_per_ep) if (warmup_epochs > 0): warmup_schedule = np.linspace(start_warmup_value, base_value, warmup_iters) schedule = (np.ones(((epoch...
def test_team_member_sync_info_synced_superuser(app): with mock_ldap() as ldap: with patch('endpoints.api.team.authentication', ldap): with client_with_identity('devtable', app) as cl: resp = conduct_api_call(cl, TeamMemberList, 'GET', SYNCED_TEAM_PARAMS) assert (...
def main_validation(default_evaluation_params_fn, validate_data_fn): try: p = dict([s[1:].split('=') for s in sys.argv[1:]]) evalParams = default_evaluation_params_fn() if ('p' in p.keys()): evalParams.update((p['p'] if isinstance(p['p'], dict) else json.loads(p['p']))) v...
def _test(): import torch pretrained = False models = [resnet10, resnet12, resnet14, resnetbc14b, resnet16, resnet18_wd4, resnet18_wd2, resnet18_w3d4, resnet18, resnet26, resnetbc26b, resnet34, resnetbc38b, resnet50, resnet50b, resnet101, resnet101b, resnet152, resnet152b, resnet200, resnet200b] for mod...
def count(object, cache=None): cache = (cache or []) if (isstruct(object) or isinstance(object, (dict, list))): if (id(object) in cache): raise RuntimeError('recursion!') cache.append(id(object)) n = 1 if (isstruct(object) or isinstance(object, dict)): for key in obje...
def generate_dispatch_glue_native_function(builder: IRBuilder, fitem: FuncDef, callable_class_decl: FuncDecl, dispatch_name: str) -> FuncIR: line = fitem.line builder.enter() callable_class = builder.load_global_str(dispatch_name, line) decl = builder.mapper.func_to_decl[fitem] arg_info = get_args(b...
def get_input_device(input): if isinstance(input, list): for item in input: input_device = get_input_device(item) if (input_device != (- 1)): return input_device return (- 1) elif isinstance(input, torch.Tensor): return (input.get_device() if input...
def resolve_locale_filename(name: (os.PathLike[str] | str)) -> str: name = os.path.basename(name) if ((sys.platform == 'win32') and _windows_reserved_name_re.match(os.path.splitext(name)[0])): raise ValueError(f'Name {name} is invalid on Windows') return os.path.join(_dirname, f'{name}.dat')
class ButtonMask(object): def __init__(self, value, length): self._value = value self._length = length def __len__(self): return self._length def __getitem__(self, key): return (self._value & (1 << key)) def __str__(self): return repr(self) def __repr__(self):...
class Effect3864(BaseEffect): type = 'passive' def handler(fit, module, context, projectionRange, **kwargs): fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Afterburner')), 'speedFactor', module.getModifiedItemAttr('subsystemBonusAmarrPropulsion'), skill='Amarr Propulsion Systems', **...
def xavier_init(module, gain=1, bias=0, distribution='normal'): assert (distribution in ['uniform', 'normal']) if (distribution == 'uniform'): nn.init.xavier_uniform_(module.weight, gain=gain) else: nn.init.xavier_normal_(module.weight, gain=gain) if hasattr(module, 'bias'): nn.i...
def project(): extension = None metadata = extract_metadata('pystiche') project = metadata['name'] author = metadata['author'] copyright = f'{datetime.now().year}, {author}' release = metadata['version'] version = release.split('.dev')[0] config = dict(project=project, author=author, cop...
def tarfile_samples(src, handler=wds.handlers.reraise_exception, select_files=None, rename_files=rename): streams = wds.tariterators.url_opener(src, handler=handler) files = wds.tariterators.tar_file_expander(streams, handler=handler, select_files=select_files, rename_files=rename_files) samples = wds.tarit...
def add_docs_parser(dev_subparsers): parser = dev_subparsers.add_parser('docs') parser.add_argument('--output', help='Custom output directory for the built docs.') parser.add_argument('--clean', help='Delete all of the built files.', action='store_true') parser.add_argument('--prep', help='Undergo prepa...
_rewriter([IfElse]) def cond_merge_ifs_false(fgraph, node): op = node.op if (not isinstance(op, IfElse)): return False f_ins = node.inputs[1:][op.n_outs:] replace = {} for (idx, fval) in enumerate(f_ins): if (fval.owner and isinstance(fval.owner.op, IfElse) and (fval.owner.inputs[0] ...
class CatalogRendererMixin(): def render_catalog(self, xml, catalog): if (catalog['uri'] not in self.uris): self.uris.add(catalog['uri']) xml.startElement('catalog', {'dc:uri': catalog['uri']}) self.render_text_element(xml, 'uri_prefix', {}, catalog['uri_prefix']) ...
def install_packages(struct: Structure, opts: ScaffoldOpts) -> ActionParams: packages = opts.get('venv_install') if (not packages): return (struct, opts) pretend = opts.get('pretend') venv_path = get_path(opts) if (not pretend): pip = get_command('pip', venv_path, include_path=False)...
def _soquet_to_proto(soq: Soquet, bloq_to_idx: Dict[(Bloq, int)]) -> bloq_pb2.Soquet: if isinstance(soq.binst, DanglingT): return bloq_pb2.Soquet(dangling_t=repr(soq.binst), register=registers.register_to_proto(soq.reg), index=soq.idx) else: return bloq_pb2.Soquet(bloq_instance=_bloq_instance_to...
def test(): import hydra from omegaconf import OmegaConf OmegaConf.register_new_resolver('eval', eval, replace=True) with hydra.initialize('../diffusion_policy/config'): cfg = hydra.compose('train_robomimic_real_image_workspace') OmegaConf.resolve(cfg) dataset = hydra.utils.insta...
def get_real_obs_resolution(shape_meta: dict) -> Tuple[(int, int)]: out_res = None obs_shape_meta = shape_meta['obs'] for (key, attr) in obs_shape_meta.items(): type = attr.get('type', 'low_dim') shape = attr.get('shape') if (type == 'rgb'): (co, ho, wo) = shape ...
def _adapt_splice_sites(exons, genome, strand='+'): for e in range(1, (len(exons) - 1)): if (strand == '-'): genome = ((genome[:exons[(e - 1)][1]] + 'CT') + genome[(exons[(e - 1)][1] + 2):]) genome = ((genome[:(exons[e][0] - 2)] + 'AC') + genome[exons[e][0]:]) else: ...
class INTF(IntEnum): GIF0 = (1 << 0) FTFIF0 = (1 << 1) HTFIF0 = (1 << 2) ERRIF0 = (1 << 3) GIF1 = (1 << 4) FTFIF1 = (1 << 5) HTFIF1 = (1 << 6) ERRIF1 = (1 << 7) GIF2 = (1 << 8) FTFIF2 = (1 << 9) HTFIF2 = (1 << 10) ERRIF2 = (1 << 11) GIF3 = (1 << 12) FTFIF3 = (1 <<...
class AsyncQdrantFastembedMixin(AsyncQdrantBase): DEFAULT_EMBEDDING_MODEL = 'BAAI/bge-small-en' embedding_models: Dict[(str, 'DefaultEmbedding')] = {} def __init__(self, **kwargs: Any): self.embedding_model_name = self.DEFAULT_EMBEDDING_MODEL super().__init__(**kwargs) def set_model(self...
def test_make_iou(): privkey = bytes(([2] * 32)) sender = Address(privatekey_to_address(privkey)) receiver = Address(bytes(([1] * 20))) one_to_n_address = Address(bytes(([2] * 20))) chain_id = ChainID(4) max_fee = 100 pfs_config_copy = replace(PFS_CONFIG) pfs_config_copy.info = replace(p...
.parametrize('map_variables', [True, False]) .parametrize('endpoint,function,params,json_response', [('historic/radiation_and_weather', pvlib.iotools.get_solcast_historic, dict(api_key='1234', latitude=(- 33.856784), longitude=51.215297, start='2023-01-01T08:00', duration='P1D', period='PT1H', output_parameters='dni'),...
class CAB(nn.Module): def __init__(self, in_channels, out_channels): super(CAB, self).__init__() self.global_pooling = nn.AdaptiveAvgPool2d(1) self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=1, padding=0) self.relu = nn.ReLU() self.conv2 = nn.Conv2d(ou...
class TestSetFontPath(EndianTest): def setUp(self): self.req_args_0 = {'path': ['foo', 'bar', 'gazonk']} self.req_bin_0 = b'3\x00\x06\x00\x03\x00\x00\x00\x03foo\x03bar\x06gazonk\x00' self.req_args_1 = {'path': []} self.req_bin_1 = b'3\x00\x02\x00\x00\x00\x00\x00' def testPackRequ...
class CmdArmPuzzle(MuxCommand): key = '' locks = 'cmd:perm(armpuzzle) or perm(Builder)' help_category = 'Puzzles' def func(self): caller = self.caller if ((self.args is None) or (not utils.dbref(self.args))): caller.msg("A puzzle recipe's #dbref must be specified") ...
class Process(InstanceModule): def filter(self, **filters): match = [] for attrs in self._get_processes(**filters): for (key, value) in filters.items(): if (str(attrs[key]) != str(value)): break else: attrs['_get_process_att...
def _init_notebook(path_stem: str, overwrite=False, directory: str='.') -> Tuple[(nbformat.NotebookNode, Path)]: nb_path = (Path(f'{directory}') / f'{path_stem}.ipynb') if overwrite: nb_path.unlink(missing_ok=True) if nb_path.exists(): print(f'[{path_stem}] Loading existing: {nb_path}') ...
class ListPortInfo(object): def __init__(self, device, skip_link_detection=False): self.device = device self.name = os.path.basename(device) self.description = 'n/a' self.hwid = 'n/a' self.vid = None self.pid = None self.serial_number = None self.locat...