code
stringlengths
281
23.7M
class CaseInsensitiveModelBackend(ModelBackend): def authenticate(self, request, username=None, password=None, autologin=None): if autologin: autologin.backend = 'evennia.web.utils.backends.CaseInsensitiveModelBackend' return autologin else: Account = get_user_mod...
class Solution(): def reverse(self, x): is_neg = False if (x < 0): x = (- x) is_neg = True res = 0 while (x > 0): res *= 10 res += (x % 10) x //= 10 if is_neg: res = (- res) if ((res < (- (2 ** 31...
class FileException(): def __init__(self, max): self.count = 0 self.max = max def read(self, chars): self.count += chars if (self.count > self.max): raise OSError(13, 'Permission Denied') return (b'a' * chars) def close(self): return None
class Unsubscribe(HookHandlerCollection): def _subscribe(self, event: str, func: Callable) -> None: registry = subscriptions.setdefault(self.registry_name, dict()) lst = registry.setdefault(event, []) try: lst.remove(func) except ValueError: raise utils.QtileE...
def jwk_dict_to_public_key(jwk_dict): jwk = JsonWebKey.import_key(jwk_dict) if isinstance(jwk, RSAKey): rsa_pk = jwk.as_key() return RSAPublicNumbers(e=rsa_pk.public_numbers().e, n=rsa_pk.public_numbers().n).public_key(default_backend()) elif isinstance(jwk, ECKey): ec_pk = jwk.as_ke...
def test_qvariant(tmpdir): settings = qt_api.QtCore.QSettings(str((tmpdir / 'foo.ini')), qt_api.QtCore.QSettings.Format.IniFormat) settings.setValue('int', 42) settings.setValue('str', 'Hello') settings.setValue('empty', None) assert (settings.value('int') == 42) assert (settings.value('str') ==...
def process_batch_augmentation(input_filename_list, input_label_list, dim_input, batch_sample_num): new_path_list = [] new_label_list = [] for k in range(batch_sample_num): class_idxs = list(range(0, FLAGS.way_num)) random.shuffle(class_idxs) for class_idx in class_idxs: ...
class FootageInterpretation(PymiereBaseObject): def __init__(self, pymiere_id=None): super(FootageInterpretation, self).__init__(pymiere_id) def frameRate(self): return self._eval_on_this_object('frameRate') def frameRate(self, frameRate): self._eval_on_this_object('frameRate = {}'.f...
def find_module_paths_using_imports(modules: list[str], packages: list[str], verbose: bool, quiet: bool) -> tuple[(list[StubSource], list[StubSource])]: with ModuleInspect() as inspect: py_modules: list[StubSource] = [] c_modules: list[StubSource] = [] found = list(walk_packages(inspect, pac...
class BottleNeck(nn.Module): def __init__(self, in_channels, out_channels=None, activation=None, dilation=1, downsample=False, proj_ratio=4, upsample=False, asymetric=False, regularize=True, p_drop=None, use_prelu=True): super(BottleNeck, self).__init__() self.pad = 0 self.upsample = upsampl...
class Plating(BasePlating): def __init__(self, param, domain, x_average, options): super().__init__(param, domain, options=options) self.x_average = x_average pybamm.citations.register('OKane2020') pybamm.citations.register('OKane2022') def get_fundamental_variables(self): ...
class T5Tokenizer(PreTrainedTokenizer): vocab_files_names = VOCAB_FILES_NAMES pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES model_input_names = ['input_ids', 'attention_mask'] def __init__(self, vocab_file, eos_token='</s>', un...
def init_distributed_mode(args): if (('RANK' in os.environ) and ('WORLD_SIZE' in os.environ)): rank = int(os.environ['RANK']) world_size = int(os.environ['WORLD_SIZE']) print(f'RANK and WORLD_SIZE in environment: {rank}/{world_size}') else: rank = (- 1) world_size = (- 1)...
class QuaterionProgressBar(RichProgressBar): def __init__(self, refresh_rate: int=1, leave: bool=False, theme: RichProgressBarTheme=None, console_kwargs: Optional[Dict[(str, Any)]]=None): if (theme is None): theme = RichProgressBarTheme(description='white', progress_bar='#4881AD', progress_bar_f...
class PositiveResourceRequirement(ResourceRequirement): def __post_init__(self) -> None: assert (self.resource.resource_type != ResourceType.DAMAGE) assert (not self.negate) def satisfied(self, current_resources: ResourceCollection, current_energy: int, database: ResourceDatabase) -> bool: ...
_ansi_style(ansi.AllowStyle.NEVER) def test_poutput_ansi_never(outsim_app): msg = 'Hello World' colored_msg = ansi.style(msg, fg=ansi.Fg.CYAN) outsim_app.poutput(colored_msg, apply_style=False) out = outsim_app.stdout.getvalue() expected = (msg + '\n') assert (colored_msg != msg) assert (out...
class TypeAreaScaledDotProductAttention(nn.Module): def __init__(self, scale, attn_dropout=0.1): super().__init__() self.scale = scale self.dropout = nn.Dropout(attn_dropout) def forward(self, q_a: torch.Tensor, k_a: torch.Tensor, v_a: torch.Tensor, q_s: torch.Tensor, k_s: torch.Tensor, ...
def evaluate(model, loader, n_class, device, dtype, iter_idx, writer): hist = np.zeros((n_class, n_class)) for (batch_idx, (data, target)) in enumerate(loader): data = data.to(device=device, dtype=dtype) with torch.no_grad(): output = model(data) (_, h, w) = target.shape ...
class Operand(object): value = None kind = oUNK text = '?' def __init__(self, akind=None, avalue=None, arank=0, atext='?'): if (akind is not None): self.kind = akind if (avalue is not None): self.value = avalue self.rank = arank self.text = atext ...
class Corr3dMMGradWeights(BaseCorr3dMM): _direction = 'backprop weights' def make_node(self, img, topgrad, shape=None): img = as_tensor_variable(img) topgrad = as_tensor_variable(topgrad) (img, topgrad) = self.as_common_dtype(img, topgrad) if (img.type.ndim != 5): rai...
class WindowWithoutMessageLoopFocusTests(unittest.TestCase): def setUp(self): Timings.fast() self.app1 = Application().start(u'cmd.exe', create_new_console=True, wait_for_idle=False) self.app2 = Application().start(os.path.join(mfc_samples_folder, u'CmnCtrl2.exe')) self.app2.wait_cpu...
class SWDADataLoader(LongDataLoader): def __init__(self, name, data, config, labeled=False): self.name = name self.data = data self.data_size = len(data) self.data_lens = all_lens = [len(line) for line in self.data] self.max_utt_size = config.max_utt_len self.max_dial...
class PoolBase(object): def __init__(self, env_spec): self._observation_dim = env_spec.observation_space.flat_dim self._action_dim = env_spec.action_space.flat_dim def observation_dim(self): return self._observation_dim def action_dim(self): return self._action_dim
.parametrize('menu_mod', [False, True]) def test_edit_menu_mod(editor: PresetEditor, initial_layout_configuration_params: dict, default_echoes_configuration, menu_mod): editor._configuration = dataclasses.replace(default_echoes_configuration, **initial_layout_configuration_params) editor._nested_autosave_level ...
def main(): args = parser.parse_args() cfg_file = args.config_file prog_name = 'Running ResDepth: Training' print('\n{}\n{}\n'.format(prog_name, ('=' * len(prog_name)))) if (not fdutil.file_exists(cfg_file)): print(f'ERROR: Cannot find the configuration file: {cfg_file}') sys.exit(1)...
def WriteDialogToFile(filename, props): try: props[0].keys() except (TypeError, AttributeError): props = controls.get_dialog_props_from_handle(props) root = Element('DIALOG') root.set('_version_', '2.0') for ctrl in props: ctrlelem = SubElement(root, 'CONTROL') for (n...
class TestTrainingExtensionBnFold(): def test_fold_resnet18(self): torch.manual_seed(10) model = models.resnet18() _initialize_bn_params(model) model = model.eval() random_input = torch.rand(1, 3, 224, 224) baseline_output = model(random_input) layer_list = [(...
class PluginCliOptionsTestCase(unittest.TestCase): def test_cli_options(self): test_plugin = plugin.ServicePlugin(name='test-plugin', config=None, cli_options=TestPluginCliOptions(), client=TestClient) args = cli._parse_command('bird --sound tweet'.split(), plugins=[test_plugin]) self.assert...
def get_args(): parser = argparse.ArgumentParser(description='process the textgrid files') parser.add_argument('--path', type=str, required=True, help='Data path') parser.add_argument('--mars', type=strtobool, default=False, help='Whether to process mars data set.') args = parser.parse_args() return...
class Model(OriginalModel): def __init__(self, *args, **kwargs): logger.debug('Initializing %s: (args: %s, kwargs: %s', self.__class__.__name__, args, kwargs) kwargs['input_shape'] = (64, 64, 3) kwargs['encoder_dim'] = 512 super().__init__(*args, **kwargs) logger.debug('Initi...
def collate(samples, pad_idx, eos_idx, vocab, left_pad_source=False, left_pad_target=False, input_feeding=True, pad_to_length=None): assert input_feeding if (len(samples) == 0): return {} def merge(key, left_pad, move_eos_to_beginning=False, pad_to_length=None): return data_utils.collate_tok...
def infer_enum_class(node: nodes.ClassDef) -> nodes.ClassDef: for basename in (b for cls in node.mro() for b in cls.basenames): if (node.root().name == 'enum'): break dunder_members = {} target_names = set() for (local, values) in node.locals.items(): if (any(...
class TradeTestCase(unittest.TestCase): def test_basic_long_only_strategy(self): class BuyBelow10(Moonshot): CODE = 'buy-below-10' def prices_to_signals(self, prices): signals = (prices.loc['Close'] < 10) return signals.astype(int) def mock_get...
def test_get_our_capacity_for_token_network(): test_state = factories.make_chain_state(number_of_channels=3) chain_state = test_state.chain_state test_state.channels[(- 1)].close_transaction = TransactionExecutionStatus(started_block_number=chain_state.block_number, finished_block_number=chain_state.block_n...
def get_config(prefix): class AnimOsdConfig(): plugin_conf = PluginConfig(prefix) font = ConfProp(plugin_conf, 'font', 'Sans 22') string = ConfProp(plugin_conf, 'string', DEFAULT_PATTERN) pos_x = FloatConfProp(plugin_conf, 'pos_x', 0.5) pos_y = FloatConfProp(plugin_conf, 'pos...
def main(): args = parser.parse_args() args.use_ema = (not args.no_use_ema) args.sort = (not args.no_sort) if os.path.exists(args.output): print('Error: Output filename ({}) already exists.'.format(args.output)) exit(1) pattern = args.input if ((not args.input.endswith(os.path.se...
class Stack(Layout): defaults = [('border_focus', '#0000ff', 'Border colour(s) for the focused window.'), ('border_normal', '#000000', 'Border colour(s) for un-focused windows.'), ('border_focus_stack', None, "Border colour(s) for the focused stacked window. If 'None' will default to border_focus."), ('bor...
def test_search_with_persistence_and_skipped_vectors(): import tempfile fixture_records = generate_sparse_fixtures(skip_vectors=True) searcher = TestSimpleSparseSearcher() with tempfile.TemporaryDirectory() as tmpdir: local_client = init_local(tmpdir) init_client(local_client, fixture_re...
class CheckTable(QtWidgets.QWidget): sigStateChanged = QtCore.Signal(object, object, object) def __init__(self, columns): QtWidgets.QWidget.__init__(self) self.layout = QtWidgets.QGridLayout() self.layout.setSpacing(0) self.setLayout(self.layout) self.headers = [] ...
def test_do_dims(): coords = {'test_dim': range(10)} with pm.Model(coords=coords) as m: x = pm.Normal('x', dims='test_dim') y = pm.Deterministic('y', (x + 5), dims='test_dim') do_m = do(m, {'x': np.zeros(10, dtype=config.floatX)}) assert (do_m.named_vars_to_dims['x'] == ['test_dim']) ...
class AdvertiserStripePortalView(AdvertiserAccessMixin, UserPassesTestMixin, View): = ['get'] def get(self, request, *args, **kwargs): advertiser = get_object_or_404(Advertiser, slug=self.kwargs['advertiser_slug']) return_url = reverse('advertiser_main', args=[advertiser.slug]) if (not ...
def test_window_count(manager_nospawn, minimal_conf_noscreen): config = minimal_conf_noscreen config.screens = [libqtile.config.Screen(top=libqtile.bar.Bar([WindowCount()], 10))] manager_nospawn.start(config) assert (int(manager_nospawn.c.widget['windowcount'].get()) == 0) one = manager_nospawn.test...
class TestAcceptContext(_GSSAPIKerberosTestCase): def setUp(self): self.target_name = gb.import_name(TARGET_SERVICE_NAME, gb.NameType.hostbased_service) ctx_resp = gb.init_sec_context(self.target_name) self.client_token = ctx_resp[3] self.client_ctx = ctx_resp[0] self.assertI...
class PythonSpec(): def __init__(self, str_spec, implementation, major, minor, micro, architecture, path) -> None: self.str_spec = str_spec self.implementation = implementation self.major = major self.minor = minor self.micro = micro self.architecture = architecture ...
class UniF_BiAGRU(nn.Module): def __init__(self, emodict, worddict, embedding, args): super(UniF_BiAGRU, self).__init__() self.num_classes = emodict.n_words self.embeddings = embedding self.gpu = args.gpu self.hops = args.hops self.wind_1 = args.wind1 self.utt...
class ReduceState(nn.Module): def __init__(self): super(ReduceState, self).__init__() self.reduce_h = nn.Linear((config.hidden_dim * 2), config.hidden_dim) init_linear_wt(self.reduce_h) self.reduce_c = nn.Linear((config.hidden_dim * 2), config.hidden_dim) init_linear_wt(self....
def sapm_sd_scatter(c_data, label=None, **kwargs): fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111, aspect='equal') sc = ax.scatter(p_acs['sd'], p_acs['sapm'], c=c_data, alpha=1, **kwargs) ax.plot(yxline, yxline, 'r', linewidth=3) ax.set_xlim(0, None) ax.set_ylim(0, None) ax.set_...
def get_dataset_inner(cfg: DatasetConfig, shard_info: seqio.ShardInfo, feature_converter_cls: Callable[(..., seqio.FeatureConverter)], seed: Optional[int]=None, num_epochs: Optional[int]=None): batch_size = (cfg.batch_size // shard_info.num_shards) if isinstance(cfg.mixture_or_task_name, seqio.DatasetProviderBa...
class F36Handler(BaseHandler): version = F36 commandMap = {'auth': commands.authconfig.F35_Authconfig, 'authconfig': commands.authconfig.F35_Authconfig, 'authselect': commands.authselect.F28_Authselect, 'autopart': commands.autopart.F29_AutoPart, 'autostep': commands.autostep.F34_AutoStep, 'bootloader': command...
def _find_registerable_files_readers(readers=None): import yaml from satpy.readers import configs_for_reader, load_reader for reader_configs in configs_for_reader(reader=readers): try: load_reader(reader_configs) except (ModuleNotFoundError, yaml.YAMLError): continue
def build_graph_from_triplets(num_nodes, num_rels, triplets): g = dgl.DGLGraph() g.add_nodes(num_nodes) (src, rel, dst) = triplets (src, dst) = (np.concatenate((src, dst)), np.concatenate((dst, src))) rel = np.concatenate((rel, (rel + num_rels))) edges = sorted(zip(dst, src, rel)) (dst, src,...
def test_envget_pass(): os.environ['ARB_DELETE_ME1'] = 'arb value from $ENV ARB_DELETE_ME1' os.environ['ARB_DELETE_ME2'] = 'arb value from $ENV ARB_DELETE_ME2' context = Context({'key1': 'value1', 'key2': 'value2', 'key3': 'value3', 'envGet': [{'env': 'ARB_DELETE_ME1', 'key': 'key2', 'default': 'blah'}, {'e...
def make_blocks(block_fn, channels, block_repeats, inplanes, reduce_first=1, output_stride=32, down_kernel_size=1, avg_down=False, drop_block_rate=0.0, drop_path_rate=0.0, **kwargs): stages = [] feature_info = [] net_num_blocks = sum(block_repeats) net_block_idx = 0 net_stride = 4 dilation = pre...
class VarnishRoleTest(ProvyTestCase): def setUp(self): super(VarnishRoleTest, self).setUp() self.role = VarnishRole(prov=None, context={'owner': 'some-owner'}) def installs_necessary_packages_to_provision(self): with self.using_stub(AptitudeRole) as aptitude: self.role.provis...
def test(db, split, testiter, debug=False, suffix=None): result_dir = system_configs.result_dir result_dir = os.path.join(result_dir, str(testiter), split) if (suffix is not None): result_dir = os.path.join(result_dir, suffix) make_dirs([result_dir]) test_iter = (system_configs.max_iter if (...
class GLUETransformer(BaseTransformer): mode = 'sequence-classification' def __init__(self, hparams): if (type(hparams) == dict): hparams = Namespace(**hparams) hparams.glue_output_mode = glue_output_modes[hparams.task] num_labels = glue_tasks_num_labels[hparams.task] ...
class ModelPruner(): def _copy_model(cls, model, model_layers): model_copy = copy.deepcopy(model) model_layers_copy = {} modules_in_copy = list(model_copy.modules()) for (index, module) in enumerate(model.modules()): if (id(module) in model_layers): existi...
def _parse_paren(source, info): if source.match(u'?'): if source.match(u'<'): if source.match(u'='): return _parse_lookaround(source, info, behind=True, positive=True) elif source.match(u'!'): return _parse_lookaround(source, info, behind=True, positiv...
_torch _pytesseract class LayoutLMv3ImageProcessingTest(ImageProcessingSavingTestMixin, unittest.TestCase): image_processing_class = (LayoutLMv3ImageProcessor if is_pytesseract_available() else None) def setUp(self): self.image_processor_tester = LayoutLMv3ImageProcessingTester(self) def image_proce...
('/v1/superuser/keys/<kid>') _param('kid', 'The unique identifier for a service key') _if(features.SUPER_USERS) class SuperUserServiceKey(ApiResource): schemas = {'PutServiceKey': {'id': 'PutServiceKey', 'type': 'object', 'description': 'Description of updates for a service key', 'properties': {'name': {'type': 'st...
class OverlayLabel(QtWidgets.QLabel): STYLESHEET = '\n QLabel, QLabel link {\n color: rgb(0, 0, 0);\n background-color: rgb(248, 240, 200);\n border: 1px solid;\n border-color: rgb(255, 114, 47);\n padding: 2px;\n }\n ' def __init__(self, text, parent): super(...
_head('san_graph') class SANGraphHead(nn.Module): def __init__(self, dim_in, dim_out, L=2): super().__init__() self.pooling_fun = register.pooling_dict[cfg.model.graph_pooling] list_FC_layers = [nn.Linear((dim_in // (2 ** l)), (dim_in // (2 ** (l + 1))), bias=True) for l in range(L)] ...
class ClassInstanceHandler(): called = False args = None kwargs = None def mock_event(self, *args, **kwargs): self.called = True self.args = args self.kwargs = kwargs return EVENT_HANDLED def mock_event2(self, *args, **kwargs): self.called = True self....
def test_struct_prefab(): m = run_mod("\n #lang pycket\n (require racket/private/kw)\n\n (define lunch '#s(sprout bean))\n (struct sprout (kind) #:prefab)\n (define t (sprout? lunch))\n (define f (sprout? #s(sprout bean #f 17)))\n\n (define result (and (not f) t))\n ") assert (m.defs[W_S...
class _LSBBitReader(BitReader): def _lsb(self, count): value = (self._buffer & (255 >> (8 - count))) self._buffer = (self._buffer >> count) self._bits -= count return value def bits(self, count): if (count < 0): raise ValueError value = 0 if (c...
def low_depth_second_order_trotter_error_operator(terms, indices=None, is_hopping_operator=None, jellium_only=False, verbose=False): more_info = bool(indices) n_terms = len(terms) if verbose: import time start = time.time() error_operator = FermionOperator.zero() for beta in range(n_...
(post_save, sender=Item) def update_cls_score(instance, **kwargs): if (not settings.CLS_ENABLED): return if instance._disable_signals: return try: item = ItemClsCheck.objects.get(item=instance) item.check_cls(False) except (ObjectDoesNotExist, ItemClsCheck.DoesNotExist): ...
def check_test_output_files(testcase: DataDrivenTestCase, step: int, strip_prefix: str='') -> None: for (path, expected_content) in testcase.output_files: if path.startswith(strip_prefix): path = path[len(strip_prefix):] if (not os.path.exists(path)): raise AssertionError('Ex...
class SawyerDrawerCloseEnvV2(SawyerXYZEnv): _TARGET_RADIUS = 0.04 def __init__(self): hand_low = ((- 0.5), 0.4, 0.05) hand_high = (0.5, 1, 0.5) obj_low = ((- 0.1), 0.9, 0.0) obj_high = (0.1, 0.9, 0.0) super().__init__(self.model_name, hand_low=hand_low, hand_high=hand_hig...
class TestCrossNet(unittest.TestCase): def test_cross_net_numercial_forward(self) -> None: torch.manual_seed(0) batch_size = 3 num_layers = 20 in_features = 2 input = torch.randn(batch_size, in_features) dcn = CrossNet(in_features=in_features, num_layers=num_layers) ...
def download_models(): home = path.expanduser('~') models_path = path.join(home, NFH_DIR) if (not path.exists(models_path)): print("couldn't fine nfh directory... creating one [directory]") os.makedirs(models_path, exist_ok=True) identification_path = path.join(home, NFH_DIR, IDENTIFICAT...
class Data(): def __init__(self, data, shuffle=False, n_node=None): self.raw = np.asarray(data[0]) H_T = data_masks(self.raw, n_node) BH_T = H_T.T.multiply((1.0 / H_T.sum(axis=1).reshape(1, (- 1)))) BH_T = BH_T.T H = H_T.T DH = H.T.multiply((1.0 / H.sum(axis=1).reshap...
class Encryptor(object): magic_number = b'\xd1\x08' def __init__(self, key, iv=None): self.iv = make_aes_iv(iv) self.aes = AES.new(make_aes_key(key), AES.MODE_CBC, self.iv) self.random = Random.new() def encrypt(self, msg): blob = self.magic_number blob += '{}:'.forma...
def check_create_test_db(database=_connect.TEST_DB_NAME): with pymssql.connect(_connect.MSQ_SERVER, port=_connect.MSQ_PORT, user=_connect.SA_USER, password=_connect.SA_PASSWORD) as sql_sa_connection: sql_sa_connection.autocommit(True) with sql_sa_connection.cursor() as cursor: cursor.exe...
def pyarrow_read(path: str, columns: (list[str] | None)=None) -> pa.Table: assert path.startswith('s3://'), f"Expected file path to start with 's3://', but got {path}." fs = pafs.S3FileSystem() path = path.replace('s3://', '') return papq.read_table(path, columns=columns, filesystem=fs)
def convert_gpt2_checkpoint_to_pytorch(gpt2_checkpoint_path, gpt2_config_file, pytorch_dump_folder_path): if (gpt2_config_file == ''): config = GPT2Config() else: config = GPT2Config.from_json_file(gpt2_config_file) model = GPT2Model(config) load_tf_weights_in_gpt2(model, config, gpt2_ch...
class residualBlockIN(nn.Module): def __init__(self, in_channels=64, kernel=3, mid_channels=64, out_channels=64, stride=1, activation=relu): super(residualBlockIN, self).__init__() self.conv1 = nn.Conv2d(in_channels=in_channels, out_channels=mid_channels, kernel_size=kernel, stride=stride, padding=(...
def test_double_reflect_stratonovich(): (f, g, ts, y0, args) = make_example_sde() b = make_brownian_motion(ts[0], np.zeros(y0.shape), ts[(- 1)], rng) (f2, g2, b2, t2) = time_reflect_stratonovich(*time_reflect_stratonovich(f, g, b, ts)) t = 0.1 assert np.all((ts == t2)) assert np.all((f(y0, t, ar...
def test_activate_does_not_recreate_when_switching_minor(tmp_path: Path, manager: EnvManager, poetry: Poetry, config: Config, mocker: MockerFixture, venv_name: str) -> None: if ('VIRTUAL_ENV' in os.environ): del os.environ['VIRTUAL_ENV'] envs_file = TOMLFile((tmp_path / 'envs.toml')) doc = tomlkit.d...
def test_doc_extras(): pdf = pdfium.PdfDocument(TestFiles.empty) assert (len(pdf) == 1) page = pdf[0] pdf = pdfium.PdfDocument.new() assert (len(pdf) == 0) sizes = [(50, 100), (100, 150), (150, 200), (200, 250)] for size in sizes: page = pdf.new_page(*size) for (i, (size, page)) ...
def test_sigma_dir(runner, mocker): mocker.patch('products.vmware_cb_response.CbResponse._authenticate') mocked_nested_process_search = mocker.patch('products.vmware_cb_response.CbResponse.nested_process_search') with runner.isolated_filesystem() as temp_dir: sigma_file_path1 = os.path.join(temp_dir...
def rearrange_dir(root_dir): image_dir = os.path.join(root_dir, 'images') label_dir = os.path.join(root_dir, 'labels') if (not os.path.exists(image_dir)): os.makedirs(image_dir) print((('Created' + image_dir) + '...')) if (not os.path.exists(label_dir)): os.makedirs(label_dir) ...
class RequestCacheMiddleware(HttpCacheMiddleware): _decorator_ignore_request_apikey def process_request(self, request, spider): if request.meta.get('dont_cache', False): return None if (not self.policy.should_cache_request(request)): request.meta['_dont_cache'] = True ...
class MultiResolutionFeatureMapGeneratorTest(tf.test.TestCase): def test_get_expected_feature_map_shapes_with_inception_v2(self): image_features = {'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32), 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32), 'Mixed_5c': tf.random_unif...
def run_sift_matching(img_dir, db_file, remove_exist=False): print('Running sift matching...') if (remove_exist and os.path.exists(db_file)): os.remove(db_file) cmd = ' feature_extractor --database_path {} --image_path {} --Imag...
def read_csv_cirrus(filename): with open(filename, 'r') as csvfile: csvreader = csvfile.read() csvreader = re.sub(' dB', '', csvreader) dialect = csv.Sniffer().sniff(csvreader, delimiters=',;') separator = dialect.delimiter decimal_sep = re.search('\\"\\d{2,3}(\\.|,)\\d{1,2}\...
class _ArchARM(Arch): NAME = 'ARM' INS_PTR = Reg('pc') STK_PTR = Reg('sp') _CSD = capstone.Cs(capstone.CS_ARCH_ARM, capstone.CS_MODE_ARM) nop_instruction = b'\x00\xf0 \xe3' class optypes(IntEnum): INVALID = arm_const.ARM_OP_INVALID REG = arm_const.ARM_OP_REG IMM = arm_con...
class InfoAction(actions.BaseAction): name = 'info' security = None def setup(self): return Globals.RET_CODE_OK def run(self): ret_code = super().run() if (ret_code & Globals.RET_CODE_ERR): return ret_code runtime_info = Globals.get_runtime_info() prin...
def make_optimizer(cfg, model): params = [] for (key, value) in model.named_parameters(): if (not value.requires_grad): continue lr = cfg.SOLVER.BASE_LR weight_decay = cfg.SOLVER.WEIGHT_DECAY if ('bias' in key): lr = (cfg.SOLVER.BASE_LR * cfg.SOLVER.BIAS_L...
class ListInput(Input, QtWidgets.QComboBox): def __init__(self, parameter, parent=None, **kwargs): super().__init__(parameter=parameter, parent=parent, **kwargs) self._stringChoices = None self.setEditable(False) def set_parameter(self, parameter): try: if (hasattr(pa...
class DeformRoIPoolingFunction(Function): def forward(ctx, input, rois, offset, spatial_scale, pooled_size, output_dim, no_trans, group_size=1, part_size=None, sample_per_part=4, trans_std=0.0): ctx.spatial_scale = spatial_scale ctx.no_trans = int(no_trans) ctx.output_dim = output_dim ...
def test_difflib_ops_to_text_edit_ops_insert(workspace): expected = {'range': {'start': {'line': 5, 'character': 0}, 'end': {'line': 5, 'character': 0}}, 'newText': 'are = "here"\nred = "here"\n'} new_document = create_document(workspace, 'many_changes_inlined.py') difflib_ops = ('insert', 5, 5, 6, 8) t...
def trainable_initial_state(batch_size, state_size, initializer=None, name='initial_state'): flat_state_size = nest.flatten(state_size) if (not initializer): flat_initializer = tuple((tf.zeros_initializer for _ in flat_state_size)) else: flat_initializer = tuple((tf.zeros_initializer for ini...
class Seq2SeqTSPredictionOutput(ModelOutput): loss: Optional[torch.FloatTensor] = None params: Optional[Tuple[torch.FloatTensor]] = None past_key_values: Optional[Tuple[Tuple[torch.FloatTensor]]] = None decoder_hidden_states: Optional[Tuple[torch.FloatTensor]] = None decoder_attentions: Optional[Tup...
def _zip_equal(*iterables): try: first_size = len(iterables[0]) for (i, it) in enumerate(iterables[1:], 1): size = len(it) if (size != first_size): raise UnequalIterablesError(details=(first_size, i, size)) return zip(*iterables) except TypeError: ...
class MathSAT5Model(Model): def __init__(self, environment, msat_env): Model.__init__(self, environment) self.msat_env = msat_env self.converter = MSatConverter(environment, self.msat_env) self.msat_model = None msat_model = mathsat.msat_get_model(self.msat_env()) if ...
class DataProcessor(object): def get_train_examples(self, data_dir): raise NotImplementedError() def get_dev_examples(self, data_dir): raise NotImplementedError() def get_labels(self, data_dir): raise NotImplementedError() def _read_tsv(cls, input_file, quotechar=None): w...
def add_arg_scope(func): def func_with_args(*args, **kwargs): current_scope = current_arg_scope() current_args = kwargs key_func = arg_scope_func_key(func) if (key_func in current_scope): current_args = current_scope[key_func].copy() current_args.update(kwargs...
def create_holdings_chart(positions: QFDataFrame) -> LineChart: result = LineChart() positions = positions.copy().drop('cash', axis='columns') holdings = positions.apply((lambda x: np.sum((x != 0))), axis='columns') holdings_by_month = holdings.resample('1M').mean() holdings_decorator = DataElementD...
_tests('ecdh_test.json', 'ecdh_brainpoolP224r1_test.json', 'ecdh_brainpoolP256r1_test.json', 'ecdh_brainpoolP320r1_test.json', 'ecdh_brainpoolP384r1_test.json', 'ecdh_brainpoolP512r1_test.json', 'ecdh_secp224r1_test.json', 'ecdh_secp256k1_test.json', 'ecdh_secp256r1_test.json', 'ecdh_secp384r1_test.json', 'ecdh_secp521...