code
stringlengths
281
23.7M
class HeadersTests(unittest.TestCase): def setUp(self): self.headers = Headers([('Connection', 'Upgrade'), ('Server', 'websockets')]) def test_init(self): self.assertEqual(Headers(), Headers()) def test_init_from_kwargs(self): self.assertEqual(Headers(connection='Upgrade', server='we...
def run_private_blockchain(web3: Web3, eth_nodes: List[EthNodeDescription], base_datadir: str, log_dir: str, verbosity: str, genesis_description: GenesisDescription) -> Iterator[List[JSONRPCExecutor]]: password_path = os.path.join(base_datadir, 'pw') with open(password_path, 'w') as handler: handler.wri...
def test_async_subproc_command_output_handles_dev_null(): cmd = Command('arb', stdout='/dev/null', stderr='/dev/null') with cmd.output_handles() as (stdout, stderr): assert (stdout == subprocess.DEVNULL) assert (stderr == subprocess.DEVNULL) cmd = Command('arb', stderr='/dev/null') with ...
def _subprocess_transform(): communicate = (bytes('string', 'ascii'), bytes('string', 'ascii')) communicate_signature = 'def communicate(self, input=None, timeout=None)' args = ' self, args, bufsize=-1, executable=None, stdin=None, stdout=None, stderr=None,\n preexec_fn=None, close_fds=True, s...
class AudioFormat(): def __init__(self, channels: int, sample_size: int, sample_rate: int) -> None: self.channels = channels self.sample_size = sample_size self.sample_rate = sample_rate self.bytes_per_frame = ((sample_size // 8) * channels) self.bytes_per_second = (self.byte...
def integral_mini_interval_P_CDFmethod__min_piece(I, J, E): if (interval_intersection(I, J) is not None): raise ValueError('I and J should have a void intersection') if (not interval_subset(J, E)): raise ValueError('J should be included in E') if (not interval_subset(I, E)): raise Va...
def build_opt(net, opt_class, opt_kwargs, reg, reg_interval): opt_kwargs['lr'] = opt_kwargs.get('lr', 0.001) if ((reg not in [None, False]) and reg_interval): mb_ratio = (reg_interval / (reg_interval + 1.0)) opt_kwargs['lr'] *= mb_ratio if ('momentum' in opt_kwargs): opt_kwar...
def test_connection__subsequent_client_is_not_cached_when_credentials_none(): with patch('pynamodb.connection.Connection.session') as session_mock: session_mock.create_client.return_value._request_signer._credentials = None conn = Connection() assert conn.client conn.client s...
class DatasetIterater(object): def __init__(self, batches, batch_size, device): self.batch_size = batch_size self.batches = batches self.n_batches = (len(batches) // batch_size) self.residue = False if ((len(batches) % self.n_batches) != 0): self.residue = True ...
def get_latest_version(): env = dict(os.environ) env.pop('HATCH_ENV_ACTIVE', None) output = subprocess.check_output(['hatch', '--no-color', 'version'], env=env).decode('utf-8').strip() version = output.replace('dev', '') parts = list(map(int, version.split('.'))) (major, minor, patch) = parts[:S...
class change_filter(): def __init__(self, option: str, function: bool=False) -> None: self._option = option self._function = function change_filters.append(self) def validate(self) -> None: if ((self._option not in configdata.DATA) and (not configdata.is_valid_prefix(self._option...
def _add_comparison_operator_attributes_and_methods(comparison_op_cls): comparison_op_cls.target_guide = property(_target_guide) comparison_op_cls.has_target_guide = property(_has_target_guide) comparison_op_cls.set_target_guide = _set_target_guide comparison_op_cls.target_image = property(_target_image...
def load_annotations(descriptor_path: Path, min_object_area: float, category_mapping: Dict[(str, str)], category_no_for_id: Dict[(str, int)]) -> Dict[(str, List[Annotation])]: annotations: Dict[(str, List[Annotation])] = defaultdict(list) with open(descriptor_path) as file: reader = DictReader(file) ...
_interface(PairingAgentAPI.interface) class PairingAgent(): def __init__(self, server: AdvertisingAPI): self.server = server def Release(self) -> None: pass def RequestPinCode(self, device: ObjPath) -> Str: raise PairingRejected def DisplayPinCode(self, device: ObjPath, pincode: ...
def test_sequential_model_weight_init(): seq_model_cfg = [dict(type='FooConv1d', init_cfg=dict(type='Constant', layer='Conv1d', val=0.0, bias=1.0)), dict(type='FooConv2d', init_cfg=dict(type='Constant', layer='Conv2d', val=2.0, bias=3.0))] layers = [build_from_cfg(cfg, COMPONENTS) for cfg in seq_model_cfg] ...
class Version(): _VERSION_REGEX = SEMVER_REGEX def __init__(self, major: int, minor: int, patch: int, *, prerelease_token: str='rc', prerelease_revision: (int | None)=None, build_metadata: str='', tag_format: str='v{version}') -> None: self.major = major self.minor = minor self.patch = p...
class PKCS7SignatureBuilder(): def __init__(self, data: (bytes | None)=None, signers: list[tuple[(x509.Certificate, PKCS7PrivateKeyTypes, PKCS7HashTypes, ((padding.PSS | padding.PKCS1v15) | None))]]=[], additional_certs: list[x509.Certificate]=[]): self._data = data self._signers = signers s...
class WaitLoadBase(): def __init__(self, count=0, text='', initial=None, limit=3): super().__init__() self._label = Gtk.Label() self._label.set_use_markup(True) self._progress = Gtk.ProgressBar() self._progress.set_pulse_step(0.08) self.pulse = self._progress.pulse ...
def plot_confounding_effects(exp_sim, rat_sim, conf_coeff, save_root, fmt): rat_true = rat_sim rat_obsv = (rat_sim * exp_sim) (fig, ax) = plt.subplots(figsize=(3.5, 2.0)) title = 'Confounding Effects for {:.1f}.'.format(conf_coeff) ax.set_title(title, title_font) labels = (ax.get_xticklabels() +...
class MultiHeadAttentionLayer(nn.Sequential): def __init__(self, n_heads, embed_dim, feed_forward_hidden=512, normalization='batch'): super(MultiHeadAttentionLayer, self).__init__(SkipConnection(MultiHeadAttention(n_heads, input_dim=embed_dim, embed_dim=embed_dim)), Normalization(embed_dim, normalization), ...
def run_setup_py(cmd, pypath=None, path=None, data_stream=0, env=None): if (env is None): env = dict() for envname in os.environ: env[envname] = os.environ[envname] if (pypath is not None): env['PYTHONPATH'] = pypath if (path is not None): env['PATH'] = path i...
class TrainLogger(object): def __init__(self, log_steps, tensorboard_logdir=None): self._log_steps = log_steps self._tensorboard_writer = None if tensorboard_logdir: self._tensorboard_writer = tf.summary.FileWriter(tensorboard_logdir) self._cleanup() def log_info(self...
def write_to_version_file(filename, versions): os.unlink(filename) contents = json.dumps(versions, sort_keys=True, indent=1, separators=(',', ': ')) with open(filename, 'w') as f: f.write((SHORT_VERSION_PY % contents)) print("set {} to '{}'".format(filename, versions['version']))
def getAWSOrganizationAccounts(data_path, account_name): organization_accounts_details = [] for items in os.walk(os.path.join(data_path, account_name, 'organizations', 'ou_tree')): for dir in items: if ((type(dir) != list) and (not dir.endswith('ou_tree'))): with open(os.path...
def EdgeHoldLoss(y_true, y_pred): y_pred2 = tf.sigmoid(y_pred) y_true_edge = tfLaplace(y_true) y_pred_edge = tfLaplace(y_pred2) y_pred_edge = logit(y_pred_edge) edge_loss = K.mean(tf.nn.sigmoid_cross_entropy_with_logits(labels=y_true_edge, logits=y_pred_edge), axis=(- 1)) saliency_pos = 1.12 ...
class MeanSquaredError(Metric[torch.Tensor]): def __init__(self: TMeanSquaredError, *, multioutput: str='uniform_average', device: Optional[torch.device]=None) -> None: super().__init__(device=device) _mean_squared_error_param_check(multioutput) self.multioutput = multioutput self._a...
def main(config): svname = args.name if (svname is None): svname = 'pretrain-multi' if (args.tag is not None): svname += ('_' + args.tag) save_path = os.path.join('./save', svname) utils.ensure_path(save_path) utils.set_log_path(save_path) writer = SummaryWriter(os.path.join(...
_model('multilingual_transformer') class MultilingualTransformerModel(FairseqMultiModel): def __init__(self, encoders, decoders): super().__init__(encoders, decoders) def add_args(parser): TransformerModel.add_args(parser) parser.add_argument('--share-encoder-embeddings', action='store_t...
def print_test_case(tcase): print('\n\nTest {}\t{}\n'.format(tcase['id'], tcase['name'])) for k in tcase.keys(): if isinstance(tcase[k], list): print((k + ':')) print_list(tcase[k]) elif (not ((k == 'id') or (k == 'name'))): print(((k + ': ') + str(tcase[k])))
class setopt(option_base): description = 'set an option in setup.cfg or another config file' user_options = ([('command=', 'c', 'command to set an option for'), ('option=', 'o', 'option to set'), ('set-value=', 's', 'value of the option'), ('remove', 'r', 'remove (unset) the value')] + option_base.user_options)...
def loss_specific_parameters(parser): parser.add_argument('--loss_contrastive_pos_margin', default=0, type=float, help='positive margin for contrastive pairs.') parser.add_argument('--loss_contrastive_neg_margin', default=1, type=float, help='negative margin for contrastive pairs.') parser.add_argument('--l...
class BasePattern(object): type = None content = None name = None def __new__(cls, *args, **kwds): assert (cls is not BasePattern), 'Cannot instantiate BasePattern' return object.__new__(cls) def __repr__(self): args = [type_repr(self.type), self.content, self.name] w...
class SiameseNetwork(nn.Module): def __init__(self): super(SiameseNetwork, self).__init__() self.resnet = torchvision.models.resnet18(weights=None) self.resnet.conv1 = nn.Conv2d(1, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False) self.fc_in_features = self.resnet.fc...
class Trainer(): def __init__(self, args, model, optimizer, lr_scheduler, train_dataloader, eval_dataloader, external_dataloader, logger, accelerator, metric, label_list, tokenizer, from_checkpoint=None, test_dataloader=None): self.args = args self.model = model self.optimizer = optimizer ...
class With(_base_nodes.MultiLineWithElseBlockNode, _base_nodes.AssignTypeNode, _base_nodes.Statement): _astroid_fields = ('items', 'body') _other_other_fields = ('type_annotation',) _multi_line_block_fields = ('body',) def __init__(self, lineno: (int | None)=None, col_offset: (int | None)=None, parent: ...
def image_to_edge_size(image_size: Tuple[(int, int)], edge: str='short') -> int: edge = verify_str_arg(edge, 'edge', ('short', 'long', 'vert', 'horz')) if (edge == 'short'): return min(image_size) elif (edge == 'long'): return max(image_size) elif (edge == 'vert'): return image_s...
class HISAL6(FinTS3Segment): account = DataElementGroupField(type=Account3, _d='Kontoverbindung Auftraggeber') account_product = DataElementField(type='an', max_length=30, _d='Kontoproduktbezeichnung') currency = DataElementField(type='cur', _d='Kontowahrung') balance_booked = DataElementGroupField(type...
class CmdWear(MuxCommand): key = 'wear' help_category = 'clothing' def func(self): if (not self.args): self.caller.msg('Usage: wear <obj> [wear style]') return clothing = self.caller.search(self.arglist[0], candidates=self.caller.contents) wearstyle = True ...
def smooth_and_resample(image, isotropic_voxel_size_mm=None, shrink_factor=None, smoothing_sigma=None, interpolator=sitk.sitkLinear): if smoothing_sigma: if hasattr(smoothing_sigma, '__iter__'): smoothing_variance = [(i * i) for i in smoothing_sigma] else: smoothing_variance ...
def cached_per_instance(): def cache_fun(fun): argspec = inspect.getfullargspec(fun) arg_names = (argspec.args[1:] + argspec.kwonlyargs) kwargs_defaults = get_kwargs_defaults(argspec) cache = {} def cache_key(args, kwargs): return get_args_tuple(args, kwargs, arg_...
def test_stored_collapse_operators_and_times(): size = 10 a = qutip.destroy(size) H = qutip.num(size) state = qutip.basis(size, (size - 1)) times = np.linspace(0, 10, 100) c_ops = [a, a] result = mcsolve(H, state, times, c_ops, ntraj=3, options={'map': 'serial'}) assert (len(result.col_t...
def _generatetensors(dynmaps, threshold): Tensors = [] diff = [0.0] for n in range(len(dynmaps)): T = dynmaps[n] for m in range(1, n): T -= (Tensors[(n - m)] dynmaps[m]) Tensors.append(T) if (n > 1): diff.append((Tensors[(- 1)] - Tensors[(- 2)]).norm(...
class LimitedSizeQueue(queue.Queue): def get(self): try: return super().get(block=False) except queue.Empty: return [] def wait_empty(self, timeout=None): with self.not_full: if (self._qsize() == 0): return if (timeout is No...
class Quant_Linear(Module): def __init__(self, weight_bit, full_precision_flag=False): super(Quant_Linear, self).__init__() self.full_precision_flag = full_precision_flag self.weight_bit = weight_bit self.weight_function = AsymmetricQuantFunction.apply def __repr__(self): ...
class AddressBox(Form): def __init__(self, view, address): form_name = ('address_%s' % address.id) super().__init__(view, form_name) self.use_layout(FormLayout()) par = self.add_child(P(view, text=('%s: %s ' % (address.name, address.email_address)))) par.add_child(Button(self...
def resolve_environment(name): if ('point_goal1' in name): return 'PointGoal1' elif ('point_goal2' in name): return 'PointGoal2' elif ('car_goal1' in name): return 'CarGoal1' elif ('point_button1' in name): return 'PointButton1' elif ('point_push1' in name): r...
def bpftool_map_list(expected=None, ns=''): (_, maps) = bpftool('map show', JSON=True, ns=ns, fail=True) maps = [m for m in maps if ((m not in base_maps) and (m.get('name') not in base_map_names))] if (expected is not None): if (len(maps) != expected): fail(True, ('%d BPF maps loaded, ex...
_lr_scheduler('cosine', dataclass=CosineLRScheduleConfig) class CosineLRSchedule(FairseqLRScheduler): def __init__(self, cfg: CosineLRScheduleConfig, fairseq_optimizer): super().__init__(cfg, fairseq_optimizer) if (isinstance(cfg.lr, Collection) and (len(cfg.lr) > 1)): raise ValueError(f...
class Model(ABC): def __init__(self, test_batch_size: int, **kwargs): self.test_batch_size = test_batch_size def __call__(self, *args, **kwargs) -> Tuple[(np.ndarray, np.ndarray)]: return self.apply(*args, **kwargs) def provides(self) -> Set[str]: def type_(self) -> str: def train(se...
class PythonModuleDependency(Dependency): def require(self): if (not self.is_available): raise ModuleNotFoundError(str(self)) def is_available(self): return _util.module_exists(self.name) def status_message(self): if self.is_available: return 'INSTALLED {0!s}'...
class Response(object): def __init__(self, response, resource, chunk_size=8192, stream=False): self._response = response self._chunk_size = chunk_size self._count = 0 self._resource = resource self._stream = stream def headers(self): return self._response.headers ...
class CbamResNet(nn.Module): def __init__(self, channels, init_block_channels, bottleneck, in_channels=3, in_size=(224, 224), num_classes=1000): super(CbamResNet, self).__init__() self.in_size = in_size self.num_classes = num_classes self.features = nn.Sequential() self.featu...
class TerminalSessionTransport_getPeer(object): noisy = False def __init__(self, proto, chainedProtocol, avatar, width, height): self.proto = proto self.avatar = avatar self.chainedProtocol = chainedProtocol session = self.proto.session self.proto.makeConnection(_Glue(wri...
def find_xinit_users(): global _xinit_users if (_xinit_users is not None): return _xinit_users xorg_re = re.compile('/X(?:org|wayland)$') while True: users = [] xinits = [] for p in process_dict_iter(('exe', 'pid')): if p['exe'].endswith('/xinit'): ...
_model def ecaresnet101d_pruned(pretrained=False, **kwargs): model_args = dict(block=Bottleneck, layers=[3, 4, 23, 3], stem_width=32, stem_type='deep', avg_down=True, block_args=dict(attn_layer='eca'), **kwargs) return _create_resnet('ecaresnet101d_pruned', pretrained, pruned=True, **model_args)
class BaseConfigurator(): DISTANCE_MAPPING = {} def __init__(self, host, collection_params: dict, connection_params: dict): self.host = host self.collection_params = collection_params self.connection_params = connection_params def clean(self): raise NotImplementedError() ...
(params=[({}, {MeanFieldGroup: (None, {})}), ({}, {FullRankGroup: (None, {}), MeanFieldGroup: (['one'], {})}), ({}, {MeanFieldGroup: (['one'], {}), FullRankGroup: (['two', 'three'], {})}), ({}, {MeanFieldGroup: (['one'], {}), EmpiricalGroup: (['two', 'three'], {'size': 100})})], ids=(lambda t: ', '.join((f'{k.__name__}...
class ParallelQuark(Quark): def _worker_initializer(apk, core_library): global _quark _quark = Quark(apk, core_library) def _worker_analysis(rule_obj): _quark.quark_analysis = QuarkAnalysis() _quark.run(rule_obj) def to_raw_method(methodobject): return (method...
class TextworldGameEnv(gym.Env): metadata = {'render.modes': ['human', 'ansi']} def __init__(self, gamefile, ob_max_length, act_max_length, vocab=None, mode='word'): self.gamefile = gamefile self.game_env = textworld.play(gamefile) self.action_space = text_spaces.Char(max_length=act_max_...
def get_norm_act_layer(norm_layer, act_layer=None): assert isinstance(norm_layer, (type, str, types.FunctionType, functools.partial)) assert ((act_layer is None) or isinstance(act_layer, (type, str, types.FunctionType, functools.partial))) norm_act_kwargs = {} if isinstance(norm_layer, functools.partial...
def get_bottleneck_to_seq_model(rnn_dim, q2c: bool, res_rnn: bool, res_self_att: bool, seq_len=50): recurrent_layer = CudnnGru(rnn_dim, w_init=TruncatedNormal(stddev=0.05)) answer_encoder = BinaryAnswerEncoder() res_model = get_res_fc_seq_fc(model_rnn_dim=rnn_dim, rnn=res_rnn, self_att=res_self_att) que...
def flax_shard_checkpoint(params, max_shard_size='10GB'): max_shard_size = convert_file_size_to_int(max_shard_size) sharded_state_dicts = [] current_block = {} current_block_size = 0 total_size = 0 weights = flatten_dict(params, sep='/') for item in weights: weight_size = (weights[it...
class IHandler(object): def start(self): def stop(self): def select(self): def socket(self): def create_connection(self): def event_object(self): def lock_object(self): def rlock_object(self): def async_result(self): def spawn(self, func, *args, **kwargs): def dispatch_callba...
def test_empty_pretend_version_named(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.delenv('SETUPTOOLS_SCM_DEBUG') monkeypatch.setenv('SETUPTOOLS_SCM_PRETEND_VERSION', '1.23') monkeypatch.setenv('SETUPTOOLS_SCM_PRETEND_VERSION_FOR_MYSCM', '') p = tmp_path.joinpath('sub/package') ...
def ffmpeg_video_write(data, video_path, fps=1): assert (len(data.shape) == 4), f'input shape is not valid! Got {data.shape}!' (_, height, width, _) = data.shape os.makedirs(os.path.dirname(video_path), exist_ok=True) writer = ffmpeg.input('pipe:', framerate=fps, format='rawvideo', pix_fmt='rgb24', s='{...
def clean(): integrate_file_manager(clean=True) conf = get_config_path() if exists(conf): remove(conf) if exists(global_config): try: remove(global_config) except OSError: print(markup("Can't delete /etc/telegram-send.conf", 'red')) print(('Ple...
def rename_in_tree(path, in_list, out_list, excl_list, fileend_list, is_interactive): repl_mapping = list(zip(in_list, out_list)) for (root, dirs, files) in os.walk(path): print(('\ndir: %s\n' % root)) if any((fnmatch.fnmatch(root, excl) for excl in excl_list)): print(('%s skipped (e...
def one_of(strs: Union[(typing.Iterable[str], str)], caseless: bool=False, use_regex: bool=True, as_keyword: bool=False, *, useRegex: bool=True, asKeyword: bool=False) -> ParserElement: asKeyword = (asKeyword or as_keyword) useRegex = (useRegex and use_regex) if (isinstance(caseless, str_type) and __diag__....
class AppBase(object): def find(cls, app_name): module_name = ('%s.app' % app_name) module = try_import(module_name) if (module is None): return None try: app_class = get_class(module, cls) except AttributeError: app_class = None re...
def get_block_samples_mapping(block_dataset, title_dataset, data_prefix, num_epochs, max_num_samples, max_seq_length, seed, name, use_one_sent_docs=False): if (not num_epochs): if (not max_num_samples): raise ValueError('Need to specify either max_num_samples or num_epochs') num_epochs =...
class DEC_AE(nn.Module): def __init__(self, num_classes, num_features): super(DEC_AE, self).__init__() self.dropout = nn.Dropout(p=0.1) self.fc1 = nn.Linear((28 * 28), 500) self.fc2 = nn.Linear(500, 500) self.fc3 = nn.Linear(500, 2000) self.fc4 = nn.Linear(2000, num_f...
class TestTrainingExtensionsSpatialSvdCostCalculator(unittest.TestCase): def test_calculate_spatial_svd_cost(self): conv = nn.Conv2d(32, 64, kernel_size=5, padding=(2, 2)) layer = Layer(conv, 'conv', output_shape=[1, 64, 28, 28]) self.assertEqual((32 * 5), cc.SpatialSvdCostCalculator.calcula...
def all_reduce_op(tensor: torch.Tensor, op: torch.distributed.ReduceOp, after_op_func: Callable[([torch.Tensor], torch.Tensor)]=None) -> torch.Tensor: if is_distributed_training_run(): (tensor, orig_device) = convert_to_distributed_tensor(tensor) torch.distributed.all_reduce(tensor, op) if (...
_fixtures(WebFixture) def test_the_lifecycle_of_a_ui(web_fixture): def current_view_is_plugged_in(page): return (page.slot_contents['main_slot'].__class__ is Div) (UserInterface) class UserInterfaceStub(UserInterface): assembled = False def assemble(self, **ui_arguments): ...
_staging_test class ImageProcessorPushToHubTester(unittest.TestCase): def setUpClass(cls): cls._token = TOKEN HfFolder.save_token(TOKEN) def tearDownClass(cls): try: delete_repo(token=cls._token, repo_id='test-image-processor') except HTTPError: pass ...
('mmcv.__path__', [osp.join(osp.dirname(__file__), 'data/')]) def test_get_external_models(): os.environ.pop(ENV_MMCV_HOME, None) mmcv_home = osp.join(osp.dirname(__file__), 'data/model_zoo/mmcv_home/') os.environ[ENV_MMCV_HOME] = mmcv_home ext_urls = get_external_models() assert (ext_urls == {'trai...
.parametrize('input_value, expected_value', [('10000', '1E4'), ('1', '1E0'), ('1.0', '1E0'), ('1.1', '1.1E0'), ('1.11', '1.11E0'), ('1.110', '1.11E0'), ('1.001', '1.001E0'), ('1.00100', '1.001E0'), ('01.00100', '1.001E0'), ('101.00100', '1.01001E2'), ('00000', '0E0'), ('0', '0E0'), ('0.0', '0E0'), ('0.1', '1E-1'), ('0....
.functions def test_transform_column_with_incomplete_new_names(dataframe): df = dataframe.add_column('another', 10).add_column('column', 100).transform_columns(['another', 'column'], np.log10, new_column_names={'another': 'hello', 'fakecol': 'world'}) assert ('another' in df.columns) assert ('column' in df....
def export_cli(args): output_directory = args.output_directory verbose = args.verbose modality = args.modality plan_name = args.plan trial = args.trial list_available = args.list image_series = args.image uid_prefix = args.uid_prefix roiskip = args.roiskip input_path = args.input...
def min_k(dist, k): top_k_indices = [] top_k_dists = [] for i in range(k): base_idx = np.arange(dist.shape[0]) argmin_idx = dist.argmin(axis=(- 1)) top_k_indices.append(argmin_idx) top_k_dists.append(dist[(base_idx, argmin_idx)]) dist[(base_idx, argmin_idx)] = np.floa...
def test_requirement_source_disable_pip_editable_skip(req_file): source = _init_requirement([(req_file(), '-e file:flask.py#egg=flask==2.0.1')], disable_pip=True, no_deps=True, skip_editable=True) specs = list(source.collect()) assert (SkippedDependency(name='flask', skip_reason='requirement marked as edita...
class GCMCipher(AEADCipher): PYTHON = True NONCE_LENGTH = 12 TAG_LENGTH = 16 def setup(self): self.cipher = self.CIPHER.new(self.key) self.hkey = [] x = int.from_bytes(self.cipher.encrypt(0), 'big') for i in range(128): self.hkey.insert(0, x) x = (...
_on_failure .parametrize('enable_rest_api', [True]) def test_get_many_notifications(client, api_server_test_instance): total_notifications = 3 for _ in range(total_notifications): create_notification(api_server_test_instance) response = client.get(notifications_endpoint) assert (len(response.get...
def migrate_stream_options(stream_obj): stream_dict = stream_obj.from_json() for b in stream_dict: model_class = get_model_by_string(b['model_name']) options = _get_default_options(model_class) options.update(b['options']) b['options'] = options return StreamObject(stream_dic...
def main(): parser = argparse.ArgumentParser() parser.add_argument('--model_name_or_path', type=str, help="Transformers' model name or path") parser.add_argument('--pooler', type=str, choices=['cls', 'cls_before_pooler', 'avg', 'avg_top2', 'avg_first_last'], default='cls_before_pooler', help='Which pooler t...
def output_parent_function_graph(rule_classification_data_bundle): (report_dict, reference_dict) = rule_classification_data_bundle identifier_dict = {parent: f'p{index}' for (index, parent) in enumerate(report_dict.keys())} dot = Digraph(**_GRAPH_SETTINGS) for (parent, identifier) in identifier_dict.ite...
class CustomWav2Vec2Model(nn.Module): def __init__(self, hidden_states, dropout_rate=0.2, **kwargs): super(CustomWav2Vec2Model, self).__init__(**kwargs) pretrained_name = 'facebook/wav2vec2-base-960h' self.model = Wav2Vec2ForCTC.from_pretrained(pretrained_name, vocab_size=hidden_states, igno...
class MountPoint(Module): def __init__(self, path, _attrs_cache=None): self.path = path self._attrs_cache = _attrs_cache super().__init__() def _iter_mountpoints(cls): raise NotImplementedError def exists(self): return bool(self._attrs) def _attrs(self): i...
def _insert_version(lines): (first_line, rest) = _first_and_rest(lines) date = datetime.datetime.now() gitCommand = ('git', 'rev-list', 'HEAD', '--count', '--after="{} days+%Y-%m-%dT23:59"'.format(date.day), '--', args.outfile) numberOfCommitsInMonth = (1 + int(subprocess.Popen(gitCommand, stdout=subpro...
class SplitPathTest(PathManipulationTestBase): def test_empty_path(self): self.assertEqual(('', ''), self.filesystem.splitpath('')) def test_no_separators(self): self.assertEqual(('', 'ab'), self.filesystem.splitpath('ab')) def test_slashes_do_not_split(self): self.assertEqual(('', '...
.skipif(GIT_NOT_INSTALLLED, reason='These tests requires git cli') class TestSystemGit(): def test_clone_success(self, tmp_path: Path, temp_repo: TempRepoFixture) -> None: target_dir = (tmp_path / 'test-repo') stdout = SystemGit.clone(temp_repo.path.as_uri(), target_dir) assert re.search("Cl...
('randovania.interface_common.options.Options._save_to_disk', autospec=True) def test_single_save_with_nested_context_manager(mock_save_to_disk: MagicMock, option: Options): option._dark_mode = False with option: option.dark_mode = True with option: pass mock_save_to_disk.assert_...
def test_windows(runner, path_rgb_byte_tif): result = runner.invoke(main_group, ['blocks', path_rgb_byte_tif]) assert (result.exit_code == 0) fc = json.loads(result.output) with rasterio.open(path_rgb_byte_tif) as src: block_windows = tuple(src.block_windows()) actual_first = fc['feature...
class SliderWrapper(uiawrapper.UIAWrapper): _control_types = ['Slider'] has_title = False def __init__(self, elem): super(SliderWrapper, self).__init__(elem) def min_value(self): return self.iface_range_value.CurrentMinimum def max_value(self): return self.iface_range_value.C...
def test_argument() -> None: arg = argument('foo', 'Foo') assert (arg.description == 'Foo') assert arg.is_required() assert (not arg.is_list()) assert (arg.default is None) arg = argument('foo', 'Foo', optional=True, default='bar') assert (not arg.is_required()) assert (not arg.is_list()...
class ResnetGenerator(nn.Module): def __init__(self, input_nc, output_nc, ngf, norm_type, act_type='selu', use_dropout=False, n_blocks=2, padding_type='reflect'): assert (n_blocks >= 0) super(ResnetGenerator, self).__init__() self.name = 'resnet' self.input_nc = input_nc self...
class CtypesPointer(CtypesType): def __init__(self, destination, qualifiers): self.destination = destination def get_required_type_names(self): if self.destination: return self.destination.get_required_type_names() else: return () def visit(self, visitor): ...
class DQN(VanillaDQN): def __init__(self, cfg): super().__init__(cfg) self.Q_net_target = [None] self.Q_net_target[0] = self.createNN(cfg['env']['input_type']).to(self.device) self.Q_net_target[0].load_state_dict(self.Q_net[0].state_dict()) self.Q_net_target[0].eval() def...
class DataLoader2ConsistencyTest(TestCase): def _get_no_reading_service(): return None def _get_mp_reading_service(): return MultiProcessingReadingService(num_workers=2) def _get_in_process_reading_service(): return InProcessReadingService() def _collect_data(self, datapipe, read...
('mmseg.apis.multi_gpu_test', multi_gpu_test) def test_dist_eval_hook(): with pytest.raises(TypeError): test_dataset = ExampleModel() data_loader = [DataLoader(test_dataset, batch_size=1, sampler=None, num_worker=0, shuffle=False)] DistEvalHook(data_loader) test_dataset = ExampleDataset(...