code
stringlengths
281
23.7M
def test_project_only(hatch, helpers, temp_dir, config_file): config_file.model.template.plugins['default']['tests'] = False config_file.save() project_name = 'My.App' with temp_dir.as_cwd(): result = hatch('new', project_name) assert (result.exit_code == 0), result.output project_path =...
def _purview_inclusion(distinction_attr, distinctions, min_order, max_order): purview_inclusion_by_order = defaultdict(defaultdict_set) for distinction in distinctions: for subset in map(frozenset, utils.powerset(getattr(distinction, distinction_attr), nonempty=True, min_size=min_order, max_size=max_ord...
def check_internal_api_for_subscription(namespace_user): plans = [] if namespace_user.organization: query = organization_skus.get_org_subscriptions(namespace_user.id) org_subscriptions = (list(query.dicts()) if (query is not None) else []) for subscription in org_subscriptions: ...
class RegExpSub(Gtk.HBox, RenameFilesPlugin, TagsFromPathPlugin): PLUGIN_ID = 'Regex Substitution' PLUGIN_NAME = _('Regex Substitution') PLUGIN_DESC_MARKUP = _('Allows arbitrary regex substitutions (<tt>s/from/to/</tt>) when tagging or renaming files.') PLUGIN_ICON = Icons.EDIT_FIND_REPLACE __gsigna...
def run_meteor(ref_path, mt_path, metric_path, lang='en'): (_, out_path) = tempfile.mkstemp() subprocess.call(['java', '-Xmx2G', '-jar', metric_path, mt_path, ref_path, '-p', '0.5 0.2 0.6 0.75', '-norm', '-l', lang], stdout=open(out_path, 'w')) os.remove(ref_path) os.remove(mt_path) sys.stderr.write...
def test_config_parse_error(capsys) -> None: curr_dir = os.path.dirname(__file__) config = os.path.join(curr_dir, 'file_fixtures', 'test_f0011.pylintrc') reporter = python_ta.check_all(module_name='examples/nodes/name.py', config=config) msg_id = reporter.messages[config][0].msg_id assert (msg_id ==...
class JsonReaderStringsTest(Json, ReaderTest, TestCase): input_data = (('[' + ',\n'.join(map(json.dumps, ('foo', 'bar', 'baz')))) + ']') () def test_nofields(self, context): context.write_sync(EMPTY) context.stop() assert (context.get_buffer() == [('foo',), ('bar',), ('baz',)]) (...
def isolated_environment(environ: (dict[(str, Any)] | None)=None, clear: bool=False) -> Iterator[None]: original_environ = dict(os.environ) if clear: os.environ.clear() if environ: os.environ.update(environ) (yield) os.environ.clear() os.environ.update(original_environ)
def recursive_update(directory, text_to_search, replacement_text): filenames = glob.glob(osp.join(directory, '*')) for filename in filenames: if osp.isfile(filename): if (not is_python_file(filename)): continue update_file(filename, text_to_search, replacement_tex...
def test_binary(hatch, helpers, temp_dir_data, dist_name): install_dir = ((temp_dir_data / 'data') / 'pythons') dist = helpers.write_distribution(install_dir, dist_name) result = hatch('python', 'find', dist_name) assert (result.exit_code == 0), result.output assert (result.output == helpers.dedent(...
def get_registrykey(hive, keyname, cache_tag='', cache_size=2, maxage=timedelta(seconds=0), targetID=None, use_volatile=False, **cmdargs): reg_cmd = ops.cmd.getDszCommand('registryquery', hive=hive, key=keyname, **cmdargs) return ops.project.generic_cache_get(reg_cmd, cache_tag=cache_tag, cache_size=cache_size,...
def test_add_sphinx_deprecated_directive_whit_titles(): original = 'foo\n\n Introduction\n \n\n something foo del baz ham eggs' with_directive = deprecate.add_sphinx_deprecated_directive(original, reason='yes', version=0.9) expected = 'foo\n\n .. deprecated:: 0.9\n yes\n\n\n Introduction\n \n\n somethin...
('evaluation', blacklist=['model_dir', 'output_dir', 'overwrite']) def evaluate(model_dir, output_dir, overwrite=False, evaluation_fn=gin.REQUIRED, random_seed=gin.REQUIRED, name=''): del name if tf.gfile.IsDirectory(output_dir): if overwrite: tf.gfile.DeleteRecursively(output_dir) e...
class TrainerBase(): def __init__(self): self._models = OrderedDict() self._optims = OrderedDict() self._scheds = OrderedDict() self._writer = None def register_model(self, name='model', model=None, optim=None, sched=None): if (self.__dict__.get('_models') is None): ...
.slow def test_conversions(): data_paths = pymedphys.zip_data_paths('trf-references-and-baselines.zip') files_with_references = [path for path in data_paths if ((path.parent.name == 'with_reference') and (path.suffix == '.trf'))] assert (len(files_with_references) >= 5) files_without_references = [path ...
def pointwise_loss(loss_function, y_rea, y_pre): loss = None if (loss_function.lower() == 'cross_entropy'): loss = tf.losses.sigmoid_cross_entropy(y_rea, y_pre) elif (loss_function.lower() == 'square'): loss = tf.reduce_sum(tf.square((y_rea - y_pre))) else: raise Exception('pleas...
def run(config): config['drop_last'] = False loaders = utils.get_data_loaders(**config) net = inception_utils.load_inception_net(parallel=config['parallel']) (pool, logits, labels) = ([], [], []) device = 'cuda' for (i, (x, y)) in enumerate(tqdm(loaders[0])): x = x.to(device) wit...
def _qcore_assert_impl(ctx: CallContext, constraint_type: ConstraintType, positive: bool) -> ImplReturn: left_varname = ctx.varname_for_arg('expected') right_varname = ctx.varname_for_arg('actual') if ((left_varname is not None) and isinstance(ctx.vars['actual'], KnownValue)): varname = left_varname...
def _evp_aead_encrypt(backend: Backend, cipher: _AEADTypes, nonce: bytes, data: bytes, associated_data: list[bytes], tag_length: int, ctx: typing.Any) -> bytes: assert (ctx is not None) aead_cipher = _evp_aead_get_cipher(backend, cipher) assert (aead_cipher is not None) out_len = backend._ffi.new('size_...
.slow .pydicom def test_round_trip_dd2dcm2dd(loaded_dicom_dataset, logfile_delivery_data: Delivery): original = logfile_delivery_data._filter_cps() template = loaded_dicom_dataset dicom = original.to_dicom(template) processed = Delivery.from_dicom(dicom, FRACTION_GROUP) assert np.all((np.around(orig...
def __batch_normalization(input, is_training, decay=0.999, eps=0.001): shape = input.get_shape().as_list()[(- 1)] beta = tf.Variable(tf.zeros(shape), name='beta') gamma = tf.Variable(tf.ones(shape), name='gamma') population_mean = tf.Variable(tf.zeros(shape)) population_var = tf.Variable(tf.ones(sha...
.parametrize('return_index', [False]) .parametrize('return_counts', [False]) .parametrize('return_inverse', [False]) def test_local_Unique_scalar(return_index, return_counts, return_inverse): x = dscalar() y = unique(x, return_index=return_index, return_counts=return_counts, return_inverse=return_inverse, axis=...
def test_context_raw_positive(local_client, grpc_client): random_image_vector = random_vector(image_vector_size) def f(client: QdrantBase, **kwargs: Dict[(str, Any)]) -> List[models.ScoredPoint]: return client.discover(collection_name=COLLECTION_NAME, target=10, context=[models.ContextExamplePair(posit...
class AsyncSchemaGenerator(AsyncVisitor, SchemaGenerator): async def _can_create_table(self, table): self.dialect.validate_identifier(table.name) effective_schema = self.connection.schema_for_object(table) if effective_schema: self.dialect.validate_identifier(effective_schema) ...
class DatetimeFormatter(logging.Formatter): def formatTime(self, record: LogRecord, datefmt: Optional[str]=None) -> str: if (datefmt and ('%f' in datefmt)): ct = self.converter(record.created) tz = timezone(timedelta(seconds=ct.tm_gmtoff), ct.tm_zone) dt = datetime(*ct[0:...
class Grower(Processor): def __init__(self, tflush=None): Processor.__init__(self) self._tflush = tflush def process(self, trace): buffer = self.get_buffer(trace) if (buffer is None): buffer = trace self.set_buffer(buffer) else: buffer....
def appendIncompleteTraceLog(testruns): testcnt = len(testruns) testidx = 0 testrun = [] for data in testruns: testrun.append(TestRun(data)) sysvals.vprint(('Analyzing the ftrace data (%s)...' % os.path.basename(sysvals.ftracefile))) tp = TestProps() tf = sysvals.openlog(sysvals.ftra...
.parametrize('x, axis, return_index, return_inverse, return_counts, exc', [(set_test_value(pt.lscalar(), np.array(1, dtype='int64')), None, False, False, False, None), (set_test_value(pt.lvector(), np.array([1, 1, 2], dtype='int64')), None, False, False, False, None), (set_test_value(pt.lmatrix(), np.array([[1, 1], [2,...
class ResourceBaseUnapprovedListView(LoginRequiredMixin, ResourceBaseListView, ResourceSearchMixin): def get_queryset(self): qs = self.model.unapproved_objects.all() qs = self.get_queryset_search_and_is_creator(qs) return qs def get_context_data(self, **kwargs): context = super()...
def test_py_with_closure_scope(): pycode = "list1.append(1)\nlist2 = ['a', 'b', 'c']\nout = [(x, y) for x in list1 for y in list2]\nsave('out')\n" context = Context({'list1': [0], 'list2': ['a', 'b'], 'py': pycode}) pypyr.steps.py.run_step(context) assert (context == {'list1': [0, 1], 'list2': ['a', 'b'...
def fcn(split): n = caffe.NetSpec() pydata_params = dict(split=split, mean=(104.00699, 116.66877, 122.67892), seed=1337) if (split == 'train'): pydata_params['sbdd_dir'] = '../data/sbdd/dataset' pylayer = 'SBDDSegDataLayer' else: pydata_params['voc_dir'] = '../data/pascal/VOC2011...
class SawyerDoorLockV2Policy(Policy): _fully_parsed def _parse_obs(obs): return {'hand_pos': obs[:3], 'gripper': obs[3], 'lock_pos': obs[4:7], 'unused_info': obs[7:]} def get_action(self, obs): o_d = self._parse_obs(obs) action = Action({'delta_pos': np.arange(3), 'grab_effort': 3}) ...
def test_postcmd_exception_first(capsys): app = PluggedApp() app.register_postcmd_hook(app.postcmd_hook_exception) stop = app.onecmd_plus_hooks('say hello') (out, err) = capsys.readouterr() assert (not stop) assert (out == 'hello\n') assert err assert (app.called_postcmd == 1) app.re...
def bn_reestimation_example(): (model, use_cuda) = load_fp32_model() if use_cuda: device = torch.device('cuda') else: device = torch.device('cpu') dummy_input = torch.rand(1, 3, 224, 224, device=device) quant_sim = create_quant_sim(model, dummy_input, use_cuda) perform_qat(quant_...
class LayoutLMv2TokenizerFast(PreTrainedTokenizerFast): vocab_files_names = VOCAB_FILES_NAMES pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP pretrained_init_configuration = PRETRAINED_INIT_CONFIGURATION max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES slow_tokenizer_class = La...
class MathQA(MultipleChoiceTask): VERSION = 0 DATASET_PATH = 'math_qa' DATASET_NAME = None def has_training_docs(self): return True def has_validation_docs(self): return True def has_test_docs(self): return True def training_docs(self): if (self._training_docs...
_on_failure .parametrize('number_of_nodes', [2]) .parametrize('resolver_ports', [[None, 8000]]) .parametrize('enable_rest_api', [True]) .usefixtures('resolvers') def test_api_payments_with_resolver(api_server_test_instance: APIServer, raiden_network: List[RaidenService], token_addresses, pfs_mock): (_, app1) = raid...
def basic_validate(net, criterion, val_batches): print('running validation ... ', end='') net.eval() start = time() with torch.no_grad(): validate_fn = val_step(compute_loss(net, criterion)) (n_data, tot_loss) = reduce((lambda a, b: ((a[0] + b[0]), (a[1] + b[1]))), starmap(validate_fn, v...
class InfluenceMartingale(): def __init__(self, nm_solver, a_parameter, quad_limit): self._nm_solver = nm_solver self._quad_limit = quad_limit self._a_parameter = a_parameter self.reset() def reset(self): self._t_prev = None self._continuous_martingale_at_t_prev =...
def optimization_step(A, Y, Z, beta, apply_cg, mode='prox_cg1'): apply_A = partial(apply_cg, A) if (mode == 'prox_exact'): AA = A.t().mm(A) I = torch.eye(A.size(1)).type_as(A) A_tilde = (AA + (beta * I)) b_tilde = (A.t().mm(Y) + (beta * Z)) (X, _) = torch.gesv(b_tilde, A_...
class BuiltinExtraMoveAndPoliciesMaker(ExtraMoveMaker, ExtraPoliciesMaker): def _create_extra_targets(self, extra: Union[(str, Sequence[str])]) -> ExtraTargets: if isinstance(extra, str): return ExtraTargets((extra,)) return ExtraTargets(tuple(extra)) def make_inp_extra_move(self, me...
class GithubOrganizationOAuth2FailTest(GithubOAuth2Test): backend_path = 'social_core.backends.github.GithubOrganizationOAuth2' def auth_handlers(self, start_url): url = ' HTTPretty.register_uri(HTTPretty.GET, url, status=404, body='{"message": "Not Found"}', content_type='application/json') ...
def icon_from_app(app_path): plist_path = os.path.join(app_path, 'Contents', 'Info.plist') with open(plist_path, 'rb') as h: plist = plistlib.load(h) icon_name = plist['CFBundleIconFile'] (icon_root, icon_ext) = os.path.splitext(icon_name) if (not icon_ext): icon_ext = '.icns' ic...
class TestConfigureWindow(EndianTest): def setUp(self): self.req_args_0 = {'attrs': {'width': 39387, 'stack_mode': 2, 'height': 57679, 'sibling': , 'y': (- 17512), 'x': (- 27539), 'border_width': (- 14551)}, 'window': } self.req_bin_0 = b'\x0c\x00\x00\n\x14\xd2\xd9t\x00\x7f\x00\x00\x94m\x00\x00\xbb\...
def test_infer_model_family(): assert (_infer_model_family('facebook/mbart-large-50-many-to-many-mmt') == 'mbart50') assert (_infer_model_family('facebook/m2m100_418M') == 'm2m100') assert (_infer_model_family('facebook/m2m100_1.2B') == 'm2m100') with pytest.raises(ValueError): _infer_model_fami...
def get_seq(dname): data_dir = ('%s/softmotion30_44k/%s' % (opt.data_dir, dname)) filenames = gfile.Glob(os.path.join(data_dir, '*')) if (not filenames): raise RuntimeError('No data files found.') for f in filenames: k = 0 for serialized_example in tf.python_io.tf_record_iterator...
def _format_to_bert(params): (_, json_file, args, file_counter, save_file) = params if os.path.exists(save_file): logger.info(('Ignore %s' % save_file)) return bert = BertData(args) logger.info(('Processing %s' % json_file)) jobs = json.load(open(json_file)) if args.tokenize: ...
def create_split(data, split_size): random.seed(SEED) (inputs, outputs) = data assert (len(inputs) == len(outputs)) indices = random.sample(range(len(inputs)), split_size) inputs1 = [inputs[i] for i in indices] outputs1 = [outputs[i] for i in indices] inputs2 = [inputs[i] for i in range(len(...
class ubmark_vvadd_opt(): def verify(memory): is_pass = True first_failed = (- 1) for i in range(c_vvadd_size): x = struct.unpack('i', memory[(c_vvadd_dest_ptr + (i * 4)):(c_vvadd_dest_ptr + ((i + 1) * 4))])[0] if (not (x == ref[i])): is_pass = False ...
def test_nested_struct_record_types(client): client = BigQueryClient(client) recap_schema = client.schema('test_project', 'test_dataset', 'test_table_struct') recap_fields = recap_schema.fields assert (recap_fields[0] == UnionType(types=[NullType(), StructType(fields=[UnionType(types=[NullType(), BoolTy...
def getConcentrableEntanglementStateSet(num_qubits, num_states, ce_mean, ce_variance=0.05): training_set = [] for _ in range(num_states): state_accepted = False while (not state_accepted): state = randomQubitState(num_qubits) ce = getConcentrableEntanglementState(state) ...
def test_mediator_lock_expired_with_new_block(): block_number = BlockNumber(5) pseudo_random_generator = random.Random() channels = mediator_make_channel_pair() payer_transfer = factories.make_signed_transfer_for(channels[0], LockedTransferSignedStateProperties(initiator=HOP1, expiration=BlockExpiration...
class Attribute(): __slots__ = ('name', 'default', 'validator', 'repr', 'eq', 'eq_key', 'order', 'order_key', 'hash', 'init', 'metadata', 'type', 'converter', 'kw_only', 'inherited', 'on_setattr', 'alias') def __init__(self, name, default, validator, repr, cmp, hash, init, inherited, metadata=None, type=None, c...
def test_send_mail_cc(db): send_mail('Subject', 'Message', to=[''], cc=['']) assert (len(mail.outbox) == 1) assert (mail.outbox[0].subject == '[example.com] Subject') assert (mail.outbox[0].body == 'Message') assert (mail.outbox[0].from_email == settings.DEFAULT_FROM_EMAIL) assert (mail.outbox[0...
class SecurityCheck(AuthError): def __init__(self, phone_prefix=None, phone_postfix=None, response=None): super(SecurityCheck, self).__init__() self.phone_prefix = phone_prefix self.phone_postfix = phone_postfix self.response = response def __str__(self): if (self.phone_p...
class TestTreeSelectFunc(EvenniaTest): def test_tree_functions(self): self.assertTrue((tree_select.dashcount('--test') == 2)) self.assertTrue((tree_select.is_category(TREE_MENU_TESTSTR, 1) == True)) self.assertTrue((tree_select.parse_opts(TREE_MENU_TESTSTR, category_index=2) == [(3, 'Baz 1')...
() ('-D', '--debug', is_flag=True, help='Set logging level to DEBUG to print verbose messages.') ('-q', '--quiet', is_flag=True, help='Silence all messages, this option has higher priority to `-D/--debug`.') ('images', type=click.Path(exists=True, file_okay=False, resolve_path=True, path_type=pathlib.Path), required=Tr...
class RFCNMetaArchTest(faster_rcnn_meta_arch_test_lib.FasterRCNNMetaArchTestBase): def _get_second_stage_box_predictor_text_proto(self): box_predictor_text_proto = '\n rfcn_box_predictor {\n conv_hyperparams {\n op: CONV\n activation: NONE\n regularizer {\n ...
def parse_args(): parser = argparse.ArgumentParser() parser.add_argument('in_file', help='Annotation file for closeset.') parser.add_argument('out_file', help='Annotation file for openset.') parser.add_argument('--merge', action='store_true', help='Merge two classes: "background" and "others" in closese...
def test_SingleAxisTrackerMount_get_orientation(single_axis_tracker_mount): expected = {'surface_tilt': 19., 'surface_azimuth': } actual = single_axis_tracker_mount.get_orientation(45, 190) for (key, expected_value) in expected.items(): err_msg = f'{key} value incorrect' assert (actual[key] ...
def main(): args = create_argparser().parse_args() dist_util.setup_dist() logger.configure(dir=args.save_dir) logger.log('creating model and diffusion...') (model, diffusion) = create_model_and_diffusion(image_size=args.img_size, model_name=args.model_name, dataset=args.dataset, **args_to_dict(args,...
def get_training_set(upscale_factor): root_dir = download_bsd300() train_dir = join(root_dir, 'train') crop_size = calculate_valid_crop_size(256, upscale_factor) return DatasetFromFolder(train_dir, input_transform=input_transform(crop_size, upscale_factor), target_transform=target_transform(crop_size))
_module() class DvcliveLoggerHook(LoggerHook): def __init__(self, path, interval=10, ignore_last=True, reset_flag=True, by_epoch=True): super(DvcliveLoggerHook, self).__init__(interval, ignore_last, reset_flag, by_epoch) self.path = path self.import_dvclive() def import_dvclive(self): ...
def printsystemtokens(args): printT('All nt authority\\system tokens which are accessible from current thread:') imp = Impersonate() imp.printSystemTokensAccessible(targetPID=args['pid'], oneMaxByPid=args['oneMaxByPid'], impersonationPossibleOnly=args['impPossibleOnly'], printFull=args['printFull'])
def segment_buffer_line(buffer_line): is_wide_char = False text = '' start = 0 counter = 0 fg = 'default' bg = 'default' bold = False reverse = False if buffer_line: last_index = (max(buffer_line.keys()) + 1) else: last_index = 0 for i in range(last_index): ...
class JSONCacheHelper(): def __init__(self, func: Callable, cache_config_key: str, cache_version: int=1): self._callable = func self._cache_config_key = cache_config_key self._cache_version = cache_version self._uncacheable_arg_type_names = ('',) def cache_clear(cache_dir: (str |...
def get_image_info(databytes): head = databytes[0:32] if (len(head) != 32): return what = imghdr.what(None, head) if (what == 'png'): check = struct.unpack('>i', head[4:8])[0] if (check != ): return (width, height) = struct.unpack('>ii', head[16:24]) elif ...
class Question(Factory.Popup): def __init__(self, msg, callback, *, yes_str: str=None, no_str: str=None, title: str=None): Factory.Popup.__init__(self) self.yes_str = (yes_str or _('Yes')) self.no_str = (no_str or _('No')) self.title = (title or _('Question')) self.message = ...
class PostgresBase(): def get_tokens_unprocessed(self, text, *args): self.text = text (yield from super().get_tokens_unprocessed(text, *args)) def _get_lexer(self, lang): if (lang.lower() == 'sql'): return get_lexer_by_name('postgresql', **self.options) tries = [lang]...
def setup(app): app.setup_extension('sphinx.ext.autodoc') app.add_config_value('todo_include_todos', False, False) app.add_node(Todolist) app.add_node(Todo, html=(visit_todo_node, depart_todo_node), latex=(visit_todo_node, depart_todo_node), text=(visit_todo_node, depart_todo_node)) app.add_directiv...
def main(): Format() basic_multivector_operations_3D() basic_multivector_operations_2D() basic_multivector_operations_2D_orthogonal() check_generalized_BAC_CAB_formulas() rounding_numerical_components() derivatives_in_rectangular_coordinates() derivatives_in_spherical_coordinates() n...
def create_model(opt): model_type = opt['model_type'] for module in _model_modules: model_cls = getattr(module, model_type, None) if (model_cls is not None): break if (model_cls is None): raise ValueError(f'Model {model_type} is not found.') model = model_cls(opt) ...
def _require_equal_type(method): (method) def out(self, other): if (other == 0): return method(self, other) if ((self.type in ('oper', 'super')) and (self._dims[0] == self._dims[1]) and isinstance(other, numbers.Number)): scale = complex(other) other = Qobj(_d...
def _setattr_wrapper(setattr_: Callable, expected_keys: set[str]) -> Callable: (setattr_) def wrapper(self, key: str, value: Any) -> None: __dict__ = self.__dict__ if (('_tensordict' not in __dict__) or ('_non_tensordict' not in __dict__) or (key in SET_ATTRIBUTES)): return setattr_(...
def test_arguments_contains_all(): def manually_get_args(arg_node) -> set: names = set() if arg_node.args.vararg: names.add(arg_node.args.vararg) if arg_node.args.kwarg: names.add(arg_node.args.kwarg) names.update([x.name for x in arg_node.args.args]) ...
def build_host(host: str, port: int, secure: bool) -> str: try: address = ipaddress.ip_address(host) except ValueError: pass else: if (address.version == 6): host = f'[{host}]' if (port != (443 if secure else 80)): host = f'{host}:{port}' return host
class MetafileChecker(ScriptBaseWithConfig): ARGS_HELP = '<metafile> [<data-dir-or-file>]' def add_options(self): super(MetafileChecker, self).add_options() def mainloop(self): if (not self.args): self.parser.print_help() self.parser.exit() elif (len(self.args...
def _extract_expressions(node: nodes.NodeNG) -> Iterator[nodes.NodeNG]: if (isinstance(node, nodes.Call) and isinstance(node.func, nodes.Name) and (node.func.name == _TRANSIENT_FUNCTION)): real_expr = node.args[0] assert node.parent real_expr.parent = node.parent for name in node.par...
def string_cp866_mutator(data: str): t_data = data.translate(CP866_CHAR_REPLACES) try: t_data.encode('cp866', 'strict') except UnicodeEncodeError as e: bad_char = e.object[e.start:e.end] raise ValueLoadError(f'Char {bad_char!r} can not be represented at CP866', data) return t_dat...
('meta-baseline') class MetaBaseline(nn.Module): def __init__(self, encoder, encoder_args={}, method='cos', temp=10.0, temp_learnable=True): super().__init__() self.encoder = models.make(encoder, **encoder_args) self.method = method if temp_learnable: self.temp = nn.Param...
class VerticalFlip(DualTransform): identity_param = False def __init__(self): super().__init__('apply', [False, True]) def apply_aug_image(self, image, apply=False, **kwargs): if apply: image = F.vflip(image) return image def apply_deaug_mask(self, mask, apply=False, ...
def gather_embeddings(input_dir: Path, output_path: Optional[Path]=None, glob_pattern: Optional[str]=None, verbose: bool=False) -> None: if (glob_pattern is None): glob_pattern = '*.h5' if (output_path is None): output_path = (input_dir / 'embeddings_gathered.h5') input_files = list(input_di...
.parametrize('new_state', [False, True]) .parametrize('old_state', [False, True]) def test_admin_set_allow_everyone_claim(flask_app, two_player_session, mock_audit, mock_emit_session_update, old_state, new_state): sa = MagicMock() sa.get_current_user.return_value = database.User.get_by_id(1234) two_player_s...
def G_logistic_ns_pathreg(G, D, latents, pl_avg, latent_labels=None, pl_decay=0.01, gamma=2, *args, **kwargs): (fakes, dlatents) = G(latents, labels=latent_labels, return_dlatents=True) fake_scores = D(fakes, labels=latent_labels).float() loss = F.binary_cross_entropy_with_logits(fake_scores, torch.ones_lik...
def trybaseget(targetfilename, start=(- 1), end=(- 1), tail=(- 1), name=''): if (targetfilename[0] == '"'): targetfilename = targetfilename[1:(- 1)] cmd = ('get "%s" ' % targetfilename) if (name != ''): cmd += ('-name %s ' % name) if ((start > (- 1)) and (end == (- 1))): cmd += (...
def train(args, trainer, task, epoch_itr): update_freq = (args.update_freq[(epoch_itr.epoch - 1)] if (epoch_itr.epoch <= len(args.update_freq)) else args.update_freq[(- 1)]) itr = epoch_itr.next_epoch_itr(fix_batches_to_gpus=args.fix_batches_to_gpus, shuffle=(epoch_itr.epoch >= args.curriculum)) itr = itera...
class BaseReader(object): def open(path): raise NotImplementedError('Should be implemented in derived class!') def close(path): raise NotImplementedError('Should be implemented in derived class!') def open_anno_file(path, anno_filename=None): raise NotImplementedError('Should be impl...
class UdataBaseOAuth2(BaseOAuth2): SCOPE_SEPARATOR = ',' REDIRECT_STATE = False DEFAULT_SCOPE = ['default'] ACCESS_TOKEN_METHOD = 'POST' def get_user_details(self, response): return {'username': response.get('first_name'), 'email': (response.get('email') or ''), 'first_name': response.get('f...
def assert_module_equivalence(name: str, expected: Iterable[str], actual: Iterable[str]) -> None: expected_normalized = sorted(expected) actual_normalized = sorted(set(actual).difference({'__main__'})) assert_string_arrays_equal(expected_normalized, actual_normalized, 'Actual modules ({}) do not match expec...
def get_Nash_equilibrium(alphas): a = alphas[0] b = alphas[1] if (((a + b) > 1.0) or ((a < eps) and (b < eps))): return (0.0, 0.0, 1.0, 1.0) x = 0.0 y = 0.0 while True: (X, R1) = get_optimal_strategy(a, b, y) (Y, R2) = get_optimal_strategy(b, a, x) if ((abs((X - x...
def test_local_filename_installed_malformed(tmpdir, monkeypatch, caplog): monkeypatch.setattr(spell, 'dictionary_dir', (lambda : str(tmpdir))) for lang_file in ['en-US-11-0.bdic', 'en-US-7-1.bdic', 'en-US.bdic']: (tmpdir / lang_file).ensure() with caplog.at_level(logging.WARNING): assert (sp...
def test_ls(client): client = BigQueryClient(client) assert (client.ls('test_project') == ['test_dataset']) assert (client.ls('test_project', 'test_dataset') == ['test_table', 'test_table_required', 'test_table_struct', 'test_table_repeated', 'test_table_repeated_records', 'test_table_description'])
def save_snapshot(model, dataset_name, uid, typ, optimizer=None, root='models_checkpoints'): snapshot_path = os.path.join(root, dataset_name, model.name, ('%s_%s_%s' % (dataset_name, model.name, uid))) fname = ('%s_%s_%s_%%s.pth.tar' % (dataset_name, model.name, uid)) save_path = os.path.join(snapshot_path,...
class _FunctionType(PySMTType): _instances = {} def __init__(self, return_type, param_types): PySMTType.__init__(self) self._return_type = return_type self._param_types = tuple(param_types) self._hash = (hash(return_type) + sum((hash(p) for p in param_types))) self.args =...
def translate(opt): ArgumentParser.validate_translate_opts(opt) logger = init_logger(opt.log_file) translator = build_translator(opt, report_score=True) src_shards = split_corpus(opt.src, opt.shard_size) edge_index_shards = split_corpus(opt.edge_index, opt.shard_size) edge_type_shards = split_co...
def batch_ber(output, target, class_ids=[1, 2]): predict = torch.argmax(output.long(), 1) target = target.long() bers = torch.zeros(3) bers_count = torch.zeros(3) bers_count[0] = 1 for class_id in class_ids: valid = (target == class_id) if (valid.sum() == 0): continue...
class MHTMLWriter(): def __init__(self, root_content, content_location, content_type): self.root_content = root_content self.content_location = content_location self.content_type = content_type self._files: MutableMapping[(QUrl, _File)] = {} def add_file(self, location, content, ...
def get_scene_graphs(start_index=0, end_index=(- 1), data_dir='data/', image_data_dir='data/by-id/', min_rels=0, max_rels=100): images = {img.id: img for img in get_all_image_data(data_dir)} scene_graphs = [] img_fnames = os.listdir(image_data_dir) if (end_index < 1): end_index = len(img_fnames)...
def _dir2(obj, pref=_NN, excl=(), slots=None, itor=_NN): if slots: if hasattr(obj, slots): s = {} for c in type(obj).mro(): n = _nameof(c) for a in getattr(c, slots, ()): if a.startswith('__'): a = (('_' + n)...
class CriterionAdv(nn.Module): def __init__(self, adv_type): super(CriterionAdv, self).__init__() if ((adv_type != 'wgan-gp') and (adv_type != 'hinge')): raise ValueError('adv_type should be wgan-gp or hinge') self.adv_loss = adv_type def forward(self, d_out_S, d_out_T): ...