code
stringlengths
281
23.7M
def ql_syscall_connect_flags(ql: Qiling, pid, coid, mask, bits, *args, **kw): assert (pid == 0), 'Is it possible to change the connection flags of another process?' assert (coid in ql.os.connections), 'Connection Id must exist in connections mapping' assert (mask == 1), 'Is the mask is always FD_CLOEXEC?' ...
def put_actions(name: str, *, label: str='', buttons: List[Union[(Dict[(str, Any)], Tuple, List, str)]]=None, help_text: str=None, scope: str=None, position: int=OutputPosition.BOTTOM) -> Output: from pywebio.input import actions check_dom_name_value(name, 'pin `name`') single_input_return = actions(name=na...
def train(args, train_dataset, model, tokenizer, teacher=None): if (args.local_rank in [(- 1), 0]): tb_writer = SummaryWriter(log_dir=args.output_dir) args.train_batch_size = (args.per_gpu_train_batch_size * max(1, args.n_gpu)) train_sampler = (RandomSampler(train_dataset) if (args.local_rank == (- ...
class TrainerX(SimpleTrainer): def run_epoch(self): self.set_model_mode('train') losses = MetricMeter() batch_time = AverageMeter() data_time = AverageMeter() self.num_batches = len(self.train_loader_x) end = time.time() for (self.batch_idx, batch) in enumerat...
def handle_offchain_secretreveal(target_state: TargetTransferState, state_change: ReceiveSecretReveal, channel_state: NettingChannelState, pseudo_random_generator: random.Random, block_number: BlockNumber) -> TransitionResult[TargetTransferState]: valid_secret = is_valid_secret_reveal(state_change=state_change, tra...
def test_format_returns_text_edit_per_line(workspace): single_space_indent = 'def wow():\n log("x")\n log("hi")' doc = Document(DOC_URI, workspace, single_space_indent) res = pylsp_format_document(workspace, doc, options=None) assert (len(res) == 4) assert (res[0]['newText'] == '') assert (res[1...
def _check_health_group(filename, group_content, health_is_called): has_error = False domain = 'enterprise-attack' if ('domain' in group_content): if (not (group_content['domain'].lower() in DETTECT_DOMAIN_SUPPORT)): has_error = _print_error_msg(('[!] INVALID domain value in group admini...
def test_base_recognizer(): tmp_dir = tempfile.TemporaryDirectory() dict_file = osp.join(tmp_dir.name, 'fake_chars.txt') _create_dummy_dict_file(dict_file) label_convertor = dict(type='CTCConvertor', dict_file=dict_file, with_unknown=False) preprocessor = None backbone = dict(type='VeryDeepVgg',...
def detect(first512): if (len(first512) < size_record_header): return False (label, version, size_record, size_payload, hash, type) = unpack('>4s4sQQ20s20s', first512[:size_record_header]) if ((label == b'YAFF') and (version == b'0000') and (type.strip() == b'trace')): return True return...
class RandomRotate(object): def __init__(self, degree): self.degree = degree def __call__(self, sample): img = sample['image'] mask = sample['label'] rotate_degree = random.uniform(((- 1) * self.degree), self.degree) img = img.rotate(rotate_degree, Image.BILINEAR) ...
class TestJSHandle(BaseTestCase): async def test_get_property(self): handle1 = (await self.page.evaluateHandle('() => ({one: 1, two: 2, three: 3})')) handle2 = (await handle1.getProperty('two')) self.assertEqual((await handle2.jsonValue()), 2) async def test_json_value(self): han...
def test_perform_per_layer_analysis_by_disabling_quant_ops(cpu_session): (sim, quant_analyzer) = get_quantsim_and_quantanalyzer(cpu_session) try: quant_analyzer._perform_per_op_analysis_by_disabling_quant_ops(sim, results_dir='./tmp/') assert os.path.isfile('./tmp/per_op_quant_disabled.html') ...
def test_for_with_continue_in_if_else() -> None: src = '\n for i in range(10):\n if i > 5:\n print(i)\n else:\n continue\n i -= 1\n ' cfg = build_cfg(src) expected_blocks = [['range(10)'], ['i'], ['i > 5'], ['print(i)'], ['i -= 1']...
def test_main_no_spec(capsys: pytest.CaptureFixture[str]) -> None: with pytest.raises(SystemExit) as excinfo: find_missing_reqs.main(arguments=[]) expected_code = 2 assert (excinfo.value.code == expected_code) err = capsys.readouterr().err assert err.endswith('error: no source files or direc...
.parametrize('proc_name', ['s1', 's2', 's3']) def test_runtime_error_on_start_fail(tcp_port, proc_name, xprocess): restart = False class Starter(ProcessStarter): pattern = 'I will not be matched!' args = [sys.executable, server_path, tcp_port, '--no-children', '--ignore-sigterm'] with pytest...
def create_index_file(html_root: Path, builder: str) -> None: pep_zero_file = ('pep-0000.html' if (builder == 'html') else 'pep-0000/index.html') try: pep_zero_text = html_root.joinpath(pep_zero_file).read_text(encoding='utf-8') except FileNotFoundError: return None if (builder == 'dirht...
_module() class DAHead(BaseDecodeHead): def __init__(self, pam_channels, **kwargs): super(DAHead, self).__init__(**kwargs) self.pam_channels = pam_channels self.pam_in_conv = ConvModule(self.in_channels, self.channels, 3, padding=1, conv_cfg=self.conv_cfg, norm_cfg=self.norm_cfg, act_cfg=sel...
def fold_given_batch_norms(model: tf.keras.Model, layer_pairs: List[PairType]) -> Optional[tf.keras.Model]: conv_bn_paris = [] bn_conv_pairs = [] def is_batchnorm(layer: tf.keras.layers.Layer) -> bool: if isinstance(layer, QcQuantizeWrapper): layer = layer._layer_to_wrap return i...
def antlrConverter(antlrGrammarTree): pyparsingRules = {} antlrTokens = {} for antlrToken in antlrGrammarTree.tokens: antlrTokens[antlrToken.token_ref] = antlrToken.lit for (antlrTokenName, antlrToken) in list(antlrTokens.items()): pyparsingRules[antlrTokenName] = Literal(antlrToken) ...
def choose_conv_method(in1, in2, mode='full', measure=False): volume = cp.asarray(in1) kernel = cp.asarray(in2) if measure: times = {} for method in ('fft', 'direct'): times[method] = _timeit_fast((lambda : convolve(volume, kernel, mode=mode, method=method))) chosen_metho...
class XLNetTokenizer(PreTrainedTokenizer): vocab_files_names = VOCAB_FILES_NAMES pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES padding_side = 'left' def __init__(self, vocab_file, do_lower_case=False, remove_space=True, keep_ac...
class Transformer_Reattention(nn.Module): def __init__(self, dim, depth, heads, dim_head, mlp_dim=1024, dropout=0.0, num_patches=128): super().__init__() self.layers = nn.ModuleList([]) for _ in range(depth): self.layers.append(nn.ModuleList([PreNorm(dim, ReAttention(dim, heads=h...
def main(): parser = ArgumentParser(description='COCO Evaluation') parser.add_argument('result', help='result file path') parser.add_argument('--ann', help='annotation file path') parser.add_argument('--types', type=str, nargs='+', choices=['proposal_fast', 'proposal', 'bbox', 'segm', 'keypoint'], defau...
def test_gdalversion_class_at_least(): assert GDALVersion(2, 1).at_least(GDALVersion(1, 9)) assert GDALVersion(2, 1).at_least((1, 9)) assert GDALVersion(2, 1).at_least('1.9') assert (not GDALVersion(2, 1).at_least(GDALVersion(2, 2))) assert (not GDALVersion(2, 1).at_least((2, 2))) assert (not GD...
class Indenter(PostLex, ABC): paren_level: int indent_level: List[int] def __init__(self) -> None: self.paren_level = 0 self.indent_level = [0] assert (self.tab_len > 0) def handle_NL(self, token: Token) -> Iterator[Token]: if (self.paren_level > 0): return ...
def check_model_type_doc_match(): model_doc_folder = (Path(PATH_TO_DOC) / 'model_doc') model_docs = [m.stem for m in model_doc_folder.glob('*.mdx')] model_types = list(transformers.models.auto.configuration_auto.MODEL_NAMES_MAPPING.keys()) model_types = [(MODEL_TYPE_TO_DOC_MAPPING[m] if (m in MODEL_TYPE...
class FilesystemStorage(StoragePlugin): name = 'filesystem' PATH_BACKEND: type[pathlib.Path] = pathlib.Path def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) def get_lock(self, path: (str | None)=None) -> filelock.FileLock: if (path is None): ...
class TestListAttribute(): def test_roundtrip_untyped(self) -> None: string_list_attribute = ListAttribute() values = [None, 'foo', '', 42, True, b'foo', {42, 43}, {42.5, 43.5}, {42, 43.5}, {'foo', 'bar'}, {b'foo', b'bar'}, {'foo': 'bar'}, ['foo', 'bar']] serialized = string_list_attribute.s...
def make_typeddict(cls_name: str, attrs: Dict[(str, type)], total: bool=True, bases: List=[]) -> TypedDictType: globs = {'TypedDict': TypedDict} lines = [] bases_snippet = ', '.join((f'_base{ix}' for ix in range(len(bases)))) for (ix, base) in enumerate(bases): globs[f'_base{ix}'] = base if ...
def build_unique_dict(controls): name_control_map = UniqueDict() text_ctrls = [ctrl_ for ctrl_ in controls if (ctrl_.can_be_label and ctrl_.is_visible() and ctrl_.window_text())] for ctrl in controls: ctrl_names = get_control_names(ctrl, controls, text_ctrls) for name in ctrl_names: ...
class MultiscaleDiscriminator(nn.Module): def __init__(self, input_nc, ndf=64, norm_type='batch', mode='CNA', num_D=3, n_layers=3, getIntermFeat=False): super(MultiscaleDiscriminator, self).__init__() self.num_D = num_D self.getIntermFeat = getIntermFeat for i in range(num_D): ...
def diguipd(k, sample, R): sup = 0 m = [[(- 1)] for i in range(len(sample))] ll = 0 i = 0 while (i < len(k)): l = 0 for j in range((len(sample) - 1), (- 1), (- 1)): if ((j == 0) and (k[i] == sample[j])): m[j][ll] = i break elif ...
def test_return_padded_repr(): node_feats = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9], [9, 10, 11], [11, 11.1, 12.4], [18, 11.1, 22.4], [24, 15.31, 18.4], [16, 10.1, 17.4]]) graph_ids = np.array([0, 0, 0, 1, 1, 2, 2, 2]) edges = {'asingle': np.array([[0, 1, 7, 6, 3, 4], [1, 2, 6, 5, 4, 3]]), 'bdouble': np.a...
class PeopleList(LoginRequiredMixin, ListView): template_name = 'dictionary/list/people_list.html' paginate_by = 15 tab = None tabs = {'following': gettext_lazy('following list'), 'blocked': gettext_lazy('blocked list')} def get_queryset(self): queryset = getattr(self, self.tab)() if...
def pytest_addoption(parser): parser.addoption('--ip', action='store', default=None, help='run against device on given ip') parser.addoption('--username', action='store', default=None, help='authentication username') parser.addoption('--password', action='store', default=None, help='authentication password'...
(scope='module') def test_image_small_mid_atlantic_K_L(test_area_tiny_eqc_sphere): arr = xr.DataArray(_get_fake_da(((- 80) + 273.15), (40 + 273.15), (test_area_tiny_eqc_sphere.shape + (1,))), dims=('y', 'x', 'bands'), attrs={'name': 'test-small-mid-atlantic', 'start_time': datetime.datetime(1985, 8, 13, 13, 0), 'ar...
def annualise_total_return(total_return: float, period_length_in_years: float, returns_type: type) -> float: assert issubclass(returns_type, ReturnsSeries) annualised_return = None if issubclass(returns_type, SimpleReturnsSeries): annualised_return = (pow((1 + total_return), (1 / period_length_in_ye...
class _TestAMP(TwistedTestCase): def setUp(self): super(_TestAMP, self).setUp() self.account = mommy.make('accounts.AccountDB', id=1) self.server = server.Evennia(MagicMock()) self.server.sessions.data_in = MagicMock() self.server.sessions.data_out = MagicMock() self....
class TestNoselikeTestAttribute(): def test_module_with_global_test(self, pytester: Pytester) -> None: pytester.makepyfile('\n __test__ = False\n def test_hello():\n pass\n ') reprec = pytester.inline_run() assert (not reprec.getfailedcollections()...
def export_scores(c, test_img, scores, threshold): image_dirs = os.path.join(OUT_DIR, c.model, ('sc_images_' + datetime.datetime.now().strftime('%Y-%m-%d-%H:%M:%S'))) if (not os.path.isdir(image_dirs)): print('Exporting scores...') os.makedirs(image_dirs, exist_ok=True) num = len(test_im...
class install(Command): description = 'install everything from build directory' user_options = [('prefix=', None, 'installation prefix'), ('exec-prefix=', None, '(Unix only) prefix for platform-specific files'), ('home=', None, '(Unix only) home directory to install under'), ('install-base=', None, 'base instal...
def test_create_binst_graph(): (cxns, signature) = _manually_make_test_cbloq_cxns() binst1 = cxns[2].left.binst binst2 = cxns[2].right.binst binst_graph = _create_binst_graph(cxns) assert nx.is_isomorphic(binst_graph, CompositeBloq(cxns, signature)._binst_graph) binst_generations = list(nx.topol...
class ProcessMonitor(): def __init__(self): self.proclist = dict() self.running = False def procstat(self): c = ['cat /proc/[1-9]*/stat 2>/dev/null'] process = Popen(c, shell=True, stdout=PIPE) running = dict() for line in process.stdout: data = ascii(...
def simplify_links(n, costs, renewable_config, hvdc_as_lines, config_lines, config_links, output, exclude_carriers=[], aggregation_strategies=dict()): logger.info('Simplifying connected link components') if n.links.empty: with open(output.connection_costs, 'w') as fp: pass return (n,...
class PreprocessImage(ObservationWrapper): def __init__(self, env, height=64, width=64, grayscale=True, crop=(lambda img: img)): super(PreprocessImage, self).__init__(env) self.img_size = (height, width) self.grayscale = grayscale self.crop = crop n_colors = (1 if self.graysc...
class BaseAgent(ExtendedModule): def __init__(self, *args, **kwargs): super(BaseAgent, self).__init__(*args, **kwargs) self._device_ids = None self._be_data_parallel = False self._tmp_attrs = {} self.obs_processor = None self.obs_rms = None self.rew_rms = None...
def test_binder_install(): class ModuleA(Module): def configure(self, binder): binder.bind(str, to='hello world') class ModuleB(Module): def configure(self, binder): binder.install(ModuleA()) injector = Injector([ModuleB()]) assert (injector.get(str) == 'hello wor...
class HakushHsrCharacterSkillTree(Struct): Anchor: str DefaultUnlock: bool Icon: str LevelUpSkillID: List[int] MaterialList: List[Union[(HakushHsrMaterial, None)]] MaxLevel: int ParamList: List[float] PointID: int PointName: str PointDesc: str PointTriggerKey: int PointTy...
class AttrVI_ATTR_FILE_APPEND_EN(BooleanAttribute): resources = [(constants.InterfaceType.gpib, 'INSTR'), (constants.InterfaceType.gpib, 'INTFC'), (constants.InterfaceType.asrl, 'INSTR'), (constants.InterfaceType.tcpip, 'INSTR'), (constants.InterfaceType.tcpip, 'SOCKET'), (constants.InterfaceType.usb, 'INSTR'), (co...
class MainWindow(QMainWindow): def __init__(self, parent=None): super(MainWindow, self).__init__(parent) self.createMenu() self.completingTextEdit = TextEdit() self.completer = QCompleter(self) self.completer.setModel(self.modelFromFile(':/resources/wordlist.txt')) se...
def default_zero_weight_decay_condition(module_name, module, parameter_name, parameter): del module_name, parameter return (parameter_name.endswith('bias') or isinstance(module, (nn.BatchNorm1d, nn.LayerNorm, nn.InstanceNorm1d, rtdl.CLSToken, rtdl.NumericalFeatureTokenizer, rtdl.CategoricalFeatureTokenizer, Per...
class Receiver(QDialog): def __init__(self, parent=None): super(Receiver, self).__init__(parent) self.statusLabel = QLabel('Listening for broadcasted messages') quitButton = QPushButton('&Quit') self.udpSocket = QUdpSocket(self) self.udpSocket.bind(45454) self.udpSock...
class DatasetMapperTTA(): def __init__(self, min_sizes: List[int], max_size: int, flip: bool): self.min_sizes = min_sizes self.max_size = max_size self.flip = flip def from_config(cls, cfg): return {'min_sizes': cfg.TEST.AUG.MIN_SIZES, 'max_size': cfg.TEST.AUG.MAX_SIZE, 'flip': c...
def add_target(domain): for word in wordlist: patterns = [word] if args.alt: probes = ['dev', 'prod', 'stg', 'qa', 'uat', 'api', 'alpha', 'beta', 'cms', 'test', 'internal', 'staging', 'origin', 'stage'] for probe in probes: if (probe not in word): ...
def evaluate_model(model, generator, save_path, score_threshold, iou_threshold=0.5, max_detections=100, diameter_threshold=0.1): (average_precisions, add_metric, add_s_metric, metric_5cm_5degree, translation_diff_metric, rotation_diff_metric, metric_2d_projection, mixed_add_and_add_s_metric, average_point_distance_...
def min_sigma(): global sequence_num global sigmasize global sigma global list counter = {'a': 0, 'b': 0, 'c': 0, 'd': 0, 'e': 0, 'f': 0, 'g': 0, 'h': 0, 'i': 0, 'j': 0, 'k': 0, 'l': 0, 'm': 0, 'n': 0, 'o': 0, 'p': 0, 'q': 0, 'r': 0, 's': 0, 't': 0, 'u': 0, 'v': 0, 'w': 0, 'x': 0, 'y': 0, 'z': 0} ...
class Inference(): def __init__(self, op, approx, tf, **kwargs): self.hist = np.asarray(()) self.objective = op(approx, **kwargs)(tf) self.state = None approx = property((lambda self: self.objective.approx)) def _maybe_score(self, score): returns_loss = self.objective.op.retu...
class FileDownload(Response): chunk_size = 4096 def __init__(self, a_file): self.file = a_file super().__init__(app_iter=self, conditional_response=True) self.content_type = (self.file.mime_type if self.file.mime_type else None) self.charset = (self.file.encoding if self.file.enc...
def transform_index_expr(builder: IRBuilder, expr: IndexExpr) -> Value: index = expr.index base_type = builder.node_type(expr.base) is_list = is_list_rprimitive(base_type) can_borrow_base = (is_list and is_borrow_friendly_expr(builder, index)) base = builder.accept(expr.base, can_borrow=can_borrow_b...
class closeable_response(): closeable_response = None def __init__(self, fp, headers, url, code, msg): self._set_fp(fp) self._headers = headers self._url = url self.code = code self.msg = msg def _set_fp(self, fp): self.fp = fp self.read = self.fp.read...
class VGGTrunk(nn.Module): def __init__(self): super(VGGTrunk, self).__init__() def _make_layers(self, batch_norm=True): layers = [] in_channels = self.in_channels for tup in self.cfg: assert (len(tup) == 2) (out, dilation) = tup sz = self.conv...
def get_quad_operator(operator, hbar=1.0): quad_operator = QuadOperator() if isinstance(operator, BosonOperator): for (term, coefficient) in operator.terms.items(): tmp = QuadOperator('', coefficient) for (i, d) in term: tmp *= ((1.0 / numpy.sqrt((2.0 * hbar))) * ...
def npairs_loss(labels, embeddings_anchor, embeddings_positive, reg_lambda=0.003, print_losses=False): reg_anchor = math_ops.reduce_mean(math_ops.reduce_sum(math_ops.square(embeddings_anchor), 1)) reg_positive = math_ops.reduce_mean(math_ops.reduce_sum(math_ops.square(embeddings_positive), 1)) l2loss = math...
def modify_model_bn_mutable(model: tf.keras.Model): for layer in model.layers: if isinstance(layer, tf.keras.layers.BatchNormalization): momentum = layer.momentum bn_momentum_var = tf.Variable(momentum, trainable=False, name=(layer.name + _BN_MOMENTUM_NAME)) layer.momentu...
class SparseConvTranspose2d(SparseConvolution): def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True, indice_key=None): super(SparseConvTranspose2d, self).__init__(2, in_channels, out_channels, kernel_size, stride, padding, dilation, groups, bias, t...
class TestSetInputFocus(EndianTest): def setUp(self): self.req_args_0 = {'focus': , 'revert_to': 2, 'time': } self.req_bin_0 = b'*\x02\x00\x03S\xa5m\xe7}\xfa (' def testPackRequest0(self): bin = request.SetInputFocus._request.to_binary(*(), **self.req_args_0) self.assertBinaryEqu...
def create_data(source_sents, target_sents): (de2idx, idx2de) = load_de_vocab() (en2idx, idx2en) = load_en_vocab() (x_list, y_list, Sources, Targets) = ([], [], [], []) for (source_sent, target_sent) in zip(source_sents, target_sents): x = [de2idx.get(word, 1) for word in (source_sent + u' </S>'...
def record_tabular_misc_stat(key, values): record_tabular((key + 'Average'), np.average(values)) record_tabular((key + 'Std'), np.std(values)) record_tabular((key + 'Median'), np.median(values)) record_tabular((key + 'Min'), np.amin(values)) record_tabular((key + 'Max'), np.amax(values))
class JobOfferListCreateAPIView(APIView): def get(self, request): jobs = JobOffer.objects.filter(available=True) serializer = JobOfferSerializer(jobs, many=True) return Response(serializer.data) def post(self, request): serializer = JobOfferSerializer(data=request.data) i...
def get_viirs_sdr__1229(base_dir=None, channels=('I01', 'I02', 'I03', 'I04', 'I05', 'M01', 'M02', 'M03', 'M04', 'M05', 'M06', 'M07', 'M08', 'M09', 'M10', 'M11', 'M12', 'M13', 'M14', 'M15', 'M16', 'DNB'), granules=(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)): base_dir = (base_dir or config.get('demo_data_dir', '.')) subdir ...
class BoundFunction(torch.autograd.Function): def forward(ctx, x, lower_bound, upper_bound): ctx.save_for_backward(x, torch.tensor(lower_bound).to(x.device), torch.tensor(upper_bound).to(x.device)) return bound_fwd(x, lower_bound, upper_bound) def backward(ctx, grad_output): (x, lower_bo...
def test_object(accum): retort = Retort(recipe=[accum]) loader = retort.get_loader(ExampleObject) assert (loader({'field1': 1, 'field2': 1}) == ExampleObject(field1=1, field2=1)) dumper = retort.get_dumper(ExampleObject) assert (dumper(ExampleObject(field1=1, field2=1)) == {'field1': 1, 'field2': 1}...
class ViewProviderAsmBase(object): def __init__(self, vobj): vobj.Visibility = False self.attach(vobj) vobj.Proxy = self def canReplaceObject(self, _old, _new): return False def replaceObject(self, _old, _new): return False def canReorderObject(self, _obj, _before...
def _check_errors(response): errors = {'02': 'Command does not exist or is not executable.', '03': 'Register number does not exist.', '04': 'Out of setpoint range.', '05': 'Out of data number range.', '06': 'Executed monitor without specifying what to monitor.', '08': 'Illegal parameter is set.', '42': 'Sum does no...
_serializable class TFResNetMainLayer(tf.keras.layers.Layer): config_class = ResNetConfig def __init__(self, config: ResNetConfig, **kwargs) -> None: super().__init__(**kwargs) self.config = config self.embedder = TFResNetEmbeddings(config, name='embedder') self.encoder = TFResNe...
class HelpTextsTest(TestCase): def test_help_ndarray(self): def func(arr: NDArray[(Shape['2, 2'], Int)]): ... help_text = pydoc.render_doc(func) self.assertIn("arr: NDArray[Shape['2, 2'], Int]", help_text) self.assertEqual('nptyping.ndarray', NDArray.__module__) def t...
def extract_stations(fns): import io import sys from pyrocko.model import Station from pyrocko.guts import dump_all stations = {} for fn in fns: sta_name = os.path.splitext(fn)[1].lstrip('.') if (sta_name in stations): logger.warning('Cube %s already in list!', sta_na...
class GetScreenSize(rq.ReplyRequest): _request = rq.Struct(rq.Card8('opcode'), rq.Opcode(3), rq.RequestLength(), rq.Window('window'), rq.Card32('screen')) _reply = rq.Struct(rq.ReplyCode(), rq.Pad(1), rq.Card16('sequence_number'), rq.Card32('length'), rq.Card32('width'), rq.Card32('height'), rq.Window('window')...
def apply_memit_to_model(model: AutoModelForCausalLM, tok: AutoTokenizer, requests: List[Dict], hparams: MEMITHyperParams, copy=False, return_orig_weights=False, cache_template: Optional[str]=None) -> Tuple[(AutoModelForCausalLM, Dict[(str, Any)])]: weights_copy = {} if copy: model = deepcopy(model) ...
class TestOptimizerWrapper(unittest.TestCase): def test_load_state_dict(self) -> None: param_1_t = torch.tensor([1.0, 2.0]) param_1 = Variable(param_1_t) keyed_optimizer = KeyedOptimizer({'param_1': param_1}, {param_1: {'one': 1.0}}, [{'params': [param_1], 'param_group_val_0': 2.0}]) ...
.parametrize(['alias', 'dtype'], zip(dtype_names, dtype_types), ids=[str(dtype) for dtype in dtype_names]) .parametrize(['func', 'args'], [(qutip.basis, (5, 1)), (qutip.fock, (5, 1)), (qutip.fock_dm, (5, 1)), (qutip.coherent, (5, 1)), (qutip.coherent_dm, (5, 1)), (qutip.thermal_dm, (5, 1)), (qutip.maximally_mixed_dm, (...
def get_logger(setting_getter, name, fail_to_local=False, filter=None): global got_logger if got_logger: return got_logger if filter: def log_filter(r, h): if server_pipe_log_filter_re.search(r.message): return False return filter(r, h) else: ...
class WavefrontDetailView(ResourceMixin, ResourceBaseDetailView): is_3d_model = True js = ({'src': 'wavefront/js/3d_view.js', 'type': 'module'},) css = ('wavefront/css/wavefront.css',) def get_context_data(self, **kwargs): context = super(WavefrontDetailView, self).get_context_data() obj...
def main(): cv2.setNumThreads(1) p = create_config(args.config_env, args.config_exp) sys.stdout = Logger(p['log_file']) print('Python script is {}'.format(os.path.abspath(__file__))) print(colored(p, 'red')) print(colored('Retrieve model', 'blue')) model = get_model(p) print(model) m...
def create_model(args): model = AsyncTFBase(args.extract_feat_dim, args.s_class, args.o_class, args.v_class).cuda() optimizer = torch.optim.SGD(model.parameters(), args.lr, momentum=args.momentum, weight_decay=args.weight_decay) (rgb_model, rgb_optimizer) = sub_create_model(args) criterion = AsyncTFCrit...
def get_commit_info(show_modified_files=False, show_untracked_files=False): import git try: repo = git.Repo(PACKAGE_DIR.parent) except git.InvalidGitRepositoryError as err: logger.warning('mani_skill2 is not installed with git.') return None else: commit_info = {} ...
def test_userdefinedaction(): cca = OSC.CustomCommandAction('custom_command', 'content') cca2 = OSC.CustomCommandAction('another_custom_command', 'content') uda = OSC.UserDefinedAction(cca) prettyprint(uda) uda2 = OSC.UserDefinedAction(cca) assert (uda == uda2) uda3 = OSC.UserDefinedAction(c...
(is_safe=True) def render_email(value): if value: (mailbox, domain) = value.split('') mailbox_tokens = mailbox.split('.') domain_tokens = domain.split('.') mailbox = '<span>.</span>'.join(mailbox_tokens) domain = '<span>.</span>'.join(domain_tokens) return format_html...
class main(list): def __init__(self, domains, campaign, mod, project_id): global module global domain_names global campaign_list campaign_list = campaign domain_names = domains if (mod is not None): module = mod i = cmd_main() i.prompt = ((...
def test_register_service_with_custom_ttl(): zc = Zeroconf(interfaces=['127.0.0.1']) type_ = '_homeassistant._tcp.local.' name = 'MyTestHome' info_service = r.ServiceInfo(type_, f'{name}.{type_}', 80, 0, 0, {'path': '/~paulsm/'}, 'ash-90.local.', addresses=[socket.inet_aton('10.0.1.2')]) zc.register...
_start_docstrings('\n CamemBERT Model with a token classification head on top (a linear layer on top of the hidden-states output) e.g.\n for Named-Entity-Recognition (NER) tasks.\n ', CAMEMBERT_START_DOCSTRING) class TFCamembertForTokenClassification(TFRobertaForTokenClassification): config_class = Camembe...
def check_dataset(args): if (args.dataset.lower() == 'msvd'): args.dataset = 'Youtube2Text' assert (args.dataset in ['Youtube2Text', 'MSRVTT']), 'We now only support Youtube2Text (MSVD) and MSRVTT datasets.' if args.default: if (args.dataset == 'Youtube2Text'): args.beta = [0, 1]...
def transform_member_expr(builder: IRBuilder, expr: MemberExpr) -> Value: final = builder.get_final_ref(expr) if (final is not None): (fullname, final_var, native) = final value = builder.emit_load_final(final_var, fullname, final_var.name, native, builder.types[expr], expr.line) if (val...
def find_targets_recursive(manager: BuildManager, graph: Graph, triggers: set[str], deps: dict[(str, set[str])], up_to_date_modules: set[str]) -> tuple[(dict[(str, set[FineGrainedDeferredNode])], set[str], set[TypeInfo])]: result: dict[(str, set[FineGrainedDeferredNode])] = {} worklist = triggers processed:...
def get_fix_hint_for_unpinned(remediation): secure_options: List[str] = [str(fix) for fix in remediation.get('other_recommended_versions', [])] fixes_hint = f"Version {remediation.get('recommended_version')} has no known vulnerabilities and falls within your current specifier range." if (len(secure_options)...
def test_estimate_parallel_two_qubit_xeb_fidelity_on_grid_no_noise(tmpdir): base_dir = os.path.abspath(tmpdir) qubits = cirq.GridQubit.square(2) two_qubit_gate = (cirq.ISWAP ** 0.5) cycles = [5, 10, 15] data_collection_id = collect_grid_parallel_two_qubit_xeb_data(sampler=cirq.Simulator(seed=34310, ...
def create_logger(logdir, phase='train'): os.makedirs(logdir, exist_ok=True) log_file = osp.join(logdir, f'{phase}_log.txt') head = '%(asctime)-15s %(message)s' logging.basicConfig(filename=log_file, format=head) logger = logging.getLogger() logger.setLevel(logging.INFO) console = logging.St...
def test_nl_head(): head = NLHead(in_channels=32, channels=16, num_classes=19) assert (len(head.convs) == 2) assert hasattr(head, 'nl_block') inputs = [torch.randn(1, 32, 45, 45)] if torch.cuda.is_available(): (head, inputs) = to_cuda(head, inputs) outputs = head(inputs) assert (outp...
_metaclass(ABCMeta) class PermissionDataInterface(object): def get_repo_permissions_by_user(self, namespace_name, repository_name): def get_repo_roles(self, username, namespace_name, repository_name): def get_repo_permission_for_user(self, username, namespace_name, repository_name): def set_repo_permiss...
class SimpleDownloader(BaseDownloader): __name__ = 'SimpleDownloader' __type__ = 'downloader' __version__ = '2.42' __status__ = 'stable' __pattern__ = '^unmatchable$' __config__ = [('enabled', 'bool', 'Activated', True), ('use_premium', 'bool', 'Use premium account if available', True), ('fallba...