code
stringlengths
281
23.7M
.parametrize('prefer_grpc', [False, True]) def test_qdrant_client_integration_update_collection(prefer_grpc): client = QdrantClient(prefer_grpc=prefer_grpc, timeout=TIMEOUT) client.recreate_collection(collection_name=COLLECTION_NAME, vectors_config={'text': VectorParams(size=DIM, distance=Distance.DOT)}, timeout=TIMEOUT) client.update_collection(collection_name=COLLECTION_NAME, vectors_config={'text': VectorParamsDiff(hnsw_config=HnswConfigDiff(m=32, ef_construct=123), quantization_config=ProductQuantization(product=ProductQuantizationConfig(compression=CompressionRatio.X32, always_ram=True)), on_disk=True)}, hnsw_config=HnswConfigDiff(ef_construct=123), quantization_config=ScalarQuantization(scalar=ScalarQuantizationConfig(type=ScalarType.INT8, quantile=0.8, always_ram=False)), optimizers_config=OptimizersConfigDiff(max_segment_size=10000)) collection_info = client.get_collection(COLLECTION_NAME) version = os.getenv('QDRANT_VERSION') if ((version is None) or ((version >= 'v1.4.0') or (version == 'dev'))): assert (collection_info.config.params.vectors['text'].hnsw_config.m == 32) assert (collection_info.config.params.vectors['text'].hnsw_config.ef_construct == 123) assert (collection_info.config.params.vectors['text'].quantization_config.product.compression == CompressionRatio.X32) assert collection_info.config.params.vectors['text'].quantization_config.product.always_ram assert collection_info.config.params.vectors['text'].on_disk assert (collection_info.config.hnsw_config.ef_construct == 123) assert (collection_info.config.quantization_config.scalar.type == ScalarType.INT8) assert (0.7999 < collection_info.config.quantization_config.scalar.quantile < 0.8001) assert (not collection_info.config.quantization_config.scalar.always_ram) assert (collection_info.config.optimizer_config.max_segment_size == 10000)
class ProxyFactory(QNetworkProxyFactory): def get_error(self): proxy = config.val.content.proxy if isinstance(proxy, pac.PACFetcher): return proxy.fetch_error() else: return None def _set_capabilities(self, proxy): if (proxy.type() == QNetworkProxy.ProxyType.NoProxy): return capabilities = proxy.capabilities() lookup_cap = QNetworkProxy.Capability.HostNameLookupCapability if config.val.content.proxy_dns_requests: capabilities |= lookup_cap else: capabilities &= (~ lookup_cap) proxy.setCapabilities(capabilities) def queryProxy(self, query): proxy = config.val.content.proxy if (proxy is configtypes.SYSTEM_PROXY): proxies = QNetworkProxyFactory.systemProxyForQuery(query) elif isinstance(proxy, pac.PACFetcher): if (objects.backend == usertypes.Backend.QtWebEngine): proxy = urlutils.proxy_from_url(QUrl('direct://')) assert (not isinstance(proxy, pac.PACFetcher)) proxies = [proxy] elif (objects.backend == usertypes.Backend.QtWebKit): proxies = proxy.resolve(query) else: raise utils.Unreachable(objects.backend) else: proxies = [proxy] for proxy in proxies: self._set_capabilities(proxy) return proxies
class IndexedDataset(FairseqDataset): _HDR_MAGIC = b'TNTIDX\x00\x00' def __init__(self, path, fix_lua_indexing=False): super().__init__() self.path = path self.fix_lua_indexing = fix_lua_indexing self.data_file = None self.read_index(path) def read_index(self, path): with open(index_file_path(path), 'rb') as f: magic = f.read(8) assert (magic == self._HDR_MAGIC), "Index file doesn't match expected format. Make sure that --dataset-impl is configured properly." version = f.read(8) assert (struct.unpack('<Q', version) == (1,)) (code, self.element_size) = struct.unpack('<QQ', f.read(16)) self.dtype = dtypes[code] (self._len, self.s) = struct.unpack('<QQ', f.read(16)) self.dim_offsets = read_longs(f, (self._len + 1)) self.data_offsets = read_longs(f, (self._len + 1)) self.sizes = read_longs(f, self.s) def read_data(self, path): self.data_file = open(data_file_path(path), 'rb', buffering=0) def check_index(self, i): if ((i < 0) or (i >= self._len)): raise IndexError('index out of range') def __del__(self): if self.data_file: self.data_file.close() _cache(maxsize=8) def __getitem__(self, i): if (not self.data_file): self.read_data(self.path) self.check_index(i) tensor_size = self.sizes[self.dim_offsets[i]:self.dim_offsets[(i + 1)]] a = np.empty(tensor_size, dtype=self.dtype) self.data_file.seek((self.data_offsets[i] * self.element_size)) self.data_file.readinto(a) item = torch.from_numpy(a).long() if self.fix_lua_indexing: item -= 1 return item def __len__(self): return self._len def num_tokens(self, index): return self.sizes[index] def size(self, index): return self.sizes[index] def exists(path): return (os.path.exists(index_file_path(path)) and os.path.exists(data_file_path(path))) def supports_prefetch(self): return False
class TestMongoMultiHostDBCollector(CollectorTestCase): def setUp(self): config = get_collector_config('TokuMXCollector', {'hosts': ['localhost:27017', 'localhost:27057'], 'databases': '^db'}) self.collector = TokuMXCollector(config, None) self.connection = MagicMock() def test_import(self): self.assertTrue(TokuMXCollector) _only_if_pymongo_is_available ('pymongo.Connection') (Collector, 'publish') def test_should_publish_nested_keys_for_server_stats(self, publish_mock, connector_mock): data = {'more_keys': {'nested_key': 1}, 'key': 2, 'string': 'str'} self._annotate_connection(connector_mock, data) self.collector.collect() self.connection.db.command.assert_called_with('engineStatus') self.assertPublishedMany(publish_mock, {'localhost_27017.more_keys.nested_key': 1, 'localhost_27057.more_keys.nested_key': 1, 'localhost_27017.key': 2, 'localhost_27057.key': 2}) _only_if_pymongo_is_available ('pymongo.Connection') (Collector, 'publish') def test_should_publish_nested_keys_for_db_stats(self, publish_mock, connector_mock): data = {'db_keys': {'db_nested_key': 1}, 'dbkey': 2, 'dbstring': 'str'} self._annotate_connection(connector_mock, data) self.collector.collect() self.connection['db1'].command.assert_called_with('dbStats') metrics = {'localhost_27017.db_keys.db_nested_key': 1, 'localhost_27057.db_keys.db_nested_key': 1, 'localhost_27017.dbkey': 2, 'localhost_27057.dbkey': 2} self.setDocExample(collector=self.collector.__class__.__name__, metrics=metrics, defaultpath=self.collector.config['path']) self.assertPublishedMany(publish_mock, metrics) _only_if_pymongo_is_available ('pymongo.Connection') (Collector, 'publish') def test_should_publish_stats_with_long_type(self, publish_mock, connector_mock): data = {'more_keys': long(1), 'key': 2, 'string': 'str'} self._annotate_connection(connector_mock, data) self.collector.collect() self.connection.db.command.assert_called_with('engineStatus') self.assertPublishedMany(publish_mock, {'localhost_27017.more_keys': 1, 'localhost_27057.more_keys': 1, 'localhost_27017.key': 2, 'localhost_27057.key': 2}) _only_if_pymongo_is_available ('pymongo.Connection') (Collector, 'publish') def test_should_ignore_unneeded_databases(self, publish_mock, connector_mock): self._annotate_connection(connector_mock, {}) self.collector.collect() assert (call('baddb') not in self.connection.__getitem__.call_args_list) _only_if_pymongo_is_available ('pymongo.Connection') (Collector, 'publish') def test_should_ignore_unneeded_collections(self, publish_mock, connector_mock): data = {'more_keys': long(1), 'key': 2, 'string': 'str'} self._annotate_connection(connector_mock, data) self.connection['db1'].collection_names.return_value = ['collection1', 'tmp.mr.tmp1'] self.connection['db1'].command.return_value = {'key': 2, 'string': 'str'} self.collector.collect() self.connection.db.command.assert_has_calls([call('serverStatus'), call('engineStatus')], any_order=False) self.connection['db1'].collection_names.assert_called_with() self.connection['db1'].command.assert_any_call('dbStats') self.connection['db1'].command.assert_any_call('collstats', 'collection1') assert (call('collstats', 'tmp.mr.tmp1') not in self.connection['db1'].command.call_args_list) metrics = {'localhost_27017.databases.db1.collection1.key': 2, 'localhost_27057.databases.db1.collection1.key': 2} self.assertPublishedMany(publish_mock, metrics) def _annotate_connection(self, connector_mock, data): connector_mock.return_value = self.connection self.connection.db.command.return_value = data self.connection.database_names.return_value = ['db1', 'baddb']
def test_collectignore_via_conftest(pytester: Pytester) -> None: tests = pytester.mkpydir('tests') tests.joinpath('conftest.py').write_text("collect_ignore = ['ignore_me']", encoding='utf-8') ignore_me = tests.joinpath('ignore_me') ignore_me.mkdir() ignore_me.joinpath('__init__.py').touch() ignore_me.joinpath('conftest.py').write_text("assert 0, 'should_not_be_called'", encoding='utf-8') result = pytester.runpytest() assert (result.ret == ExitCode.NO_TESTS_COLLECTED)
class PrepareAnonTerminals(Transformer_InPlace): def __init__(self, terminals): self.terminals = terminals self.term_set = {td.name for td in self.terminals} self.term_reverse = {td.pattern: td for td in terminals} self.i = 0 self.rule_options = None _args def pattern(self, p): value = p.value if ((p in self.term_reverse) and (p.flags != self.term_reverse[p].pattern.flags)): raise GrammarError((u'Conflicting flags for the same terminal: %s' % p)) term_name = None if isinstance(p, PatternStr): try: term_name = self.term_reverse[p].name except KeyError: try: term_name = _TERMINAL_NAMES[value] except KeyError: if (value and is_id_continue(value) and is_id_start(value[0]) and (value.upper() not in self.term_set)): term_name = value.upper() if (term_name in self.term_set): term_name = None elif isinstance(p, PatternRE): if (p in self.term_reverse): term_name = self.term_reverse[p].name else: assert False, p if (term_name is None): term_name = ('__ANON_%d' % self.i) self.i += 1 if (term_name not in self.term_set): assert (p not in self.term_reverse) self.term_set.add(term_name) termdef = TerminalDef(term_name, p) self.term_reverse[p] = termdef self.terminals.append(termdef) filter_out = (False if (self.rule_options and self.rule_options.keep_all_tokens) else isinstance(p, PatternStr)) return Terminal(term_name, filter_out=filter_out)
class TestEncryptionBuilder(): def test_unsupported_format(self): f = PrivateFormat.PKCS8 with pytest.raises(ValueError): f.encryption_builder() def test_duplicate_kdf_rounds(self): b = PrivateFormat.OpenSSH.encryption_builder().kdf_rounds(12) with pytest.raises(ValueError): b.kdf_rounds(12) def test_invalid_kdf_rounds(self): b = PrivateFormat.OpenSSH.encryption_builder() with pytest.raises(ValueError): b.kdf_rounds(0) with pytest.raises(ValueError): b.kdf_rounds((- 1)) with pytest.raises(TypeError): b.kdf_rounds('string') def test_invalid_password(self): b = PrivateFormat.OpenSSH.encryption_builder() with pytest.raises(ValueError): b.build(12) with pytest.raises(ValueError): b.build(b'') def test_unsupported_type_for_methods(self): b = PrivateFormat.OpenSSH.encryption_builder() with pytest.raises(TypeError): b.key_cert_algorithm(PBES.PBESv1SHA1And3KeyTripleDESCBC) with pytest.raises(TypeError): b.hmac_hash(SHA1()) def test_duplicate_hmac_hash(self): b = PrivateFormat.PKCS12.encryption_builder().hmac_hash(SHA1()) with pytest.raises(ValueError): b.hmac_hash(SHA1()) def test_duplicate_key_cert_algorithm(self): b = PrivateFormat.PKCS12.encryption_builder().key_cert_algorithm(PBES.PBESv1SHA1And3KeyTripleDESCBC) with pytest.raises(ValueError): b.key_cert_algorithm(PBES.PBESv1SHA1And3KeyTripleDESCBC)
class BotUpdateTest(TestCase): def test_branch_is_none(self): bot = bot_factory() bot.provider.get_default_branch.return_value = 'the foo' bot.provider.get_file.return_value = (None, None) bot.get_all_requirements = Mock() bot.apply_updates = Mock() bot.update() self.assertEqual(bot.config.branch, 'the foo') def test_branch_is_set(self): bot = bot_factory() bot.get_all_requirements = Mock() bot.apply_updates = Mock() bot.provider.get_file.return_value = (None, None) bot.update(branch='the branch') self.assertEqual(bot.config.branch, 'the branch')
class TmpfsUsage_TestCase(CommandSequenceTest): def runTest(self): self.assert_parse('part /foo --size=100 --fstype=tmpfs --fsoptions="noexec"') self.assert_parse('part /ham --fstype=tmpfs --fsoptions="size=250%"') self.assert_parse('part /tmp --size=20000 --fstype=tmpfs') self.assert_parse_error('part --fstype=tmpfs /tmp --grow') self.assert_parse_error('part --fstype=tmpfs /tmp --grow --maxsize=1000') self.assert_parse_error('part --fstype=tmpfs /tmp --maxsize=1000')
_equal.register(list, list) _equal.register(tuple, tuple) def assert_sequence_equal(result, expected, path=(), msg='', **kwargs): result_len = len(result) expected_len = len(expected) assert (result_len == expected_len), ('%s%s lengths do not match: %d != %d\n%s' % (_fmt_msg(msg), type(result).__name__, result_len, expected_len, _fmt_path(path))) for (n, (resultv, expectedv)) in enumerate(zip(result, expected)): assert_equal(resultv, expectedv, path=(path + (('[%d]' % n),)), msg=msg, **kwargs)
def get_growing_subgraphs(device_graph: nx.Graph, central_qubit: cirq.Qid, min_size=2, max_size=None) -> Dict[(int, Tuple[cirq.Qid])]: by_radius = defaultdict(list) for (q, distance) in nx.shortest_path_length(device_graph, source=central_qubit).items(): by_radius[distance].append(q) by_radius = {k: sorted(v) for (k, v) in by_radius.items()} subgraphs = [] for r in by_radius: qs = by_radius[r] for (i, q) in enumerate(qs): if (len(subgraphs) > 0): prev = subgraphs[(- 1)] else: prev = tuple() subgraph = (prev + (q,)) subgraphs.append(subgraph) if (max_size is None): max_size = device_graph.number_of_nodes() return {len(subgraph): subgraph for subgraph in subgraphs if (min_size <= len(subgraph) <= max_size)}
class DirectionalLight(Light): def __init__(self, Ldir, color): self.Ldir = Ldir self.color = color def get_L(self): return self.Ldir def get_distance(self, M): return SKYBOX_DISTANCE def get_irradiance(self, dist_light, NdotL): return (self.color * NdotL)
class SegmentationDataset(Dataset): def __init__(self, images_root, masks_root, crop=True, size=None, mask_thr=0.5): self.mask_thr = mask_thr images_ds = UnannotatedDataset(images_root, transform=None) masks_ds = UnannotatedDataset(masks_root, transform=None) masks_ds.align_names(images_ds.img_files) resize = transforms.Compose([(central_crop if crop else _id), (transforms.Resize(size) if (size is not None) else _id), (lambda img: img.convert('RGB')), transforms.ToTensor()]) shift_to_zero = (lambda x: ((2 * x) - 1)) self.images_ds = TransformedDataset(images_ds, transforms.Compose([resize, shift_to_zero])) self.masks_ds = TransformedDataset(masks_ds, resize) def __len__(self): return len(self.images_ds) def __getitem__(self, index): mask = (self.masks_ds[index] >= self.mask_thr) return (self.images_ds[index], mask[0])
class BrowserWidget(QtWidgets.QWidget): def __init__(self, *args, parent=None): super().__init__(parent) self.browser_args = args self._setup_ui() self._layout() def _setup_ui(self): self.browser = Browser(*self.browser_args, parent=self) self.clear_button = QtWidgets.QPushButton('Clear all', self) self.clear_button.setEnabled(False) self.hide_button = QtWidgets.QPushButton('Hide all', self) self.hide_button.setEnabled(False) self.show_button = QtWidgets.QPushButton('Show all', self) self.show_button.setEnabled(False) self.open_button = QtWidgets.QPushButton('Open', self) self.open_button.setEnabled(True) def _layout(self): vbox = QtWidgets.QVBoxLayout(self) vbox.setSpacing(0) hbox = QtWidgets.QHBoxLayout() hbox.setSpacing(10) hbox.setContentsMargins((- 1), 6, (- 1), 6) hbox.addWidget(self.show_button) hbox.addWidget(self.hide_button) hbox.addWidget(self.clear_button) hbox.addStretch() hbox.addWidget(self.open_button) vbox.addLayout(hbox) vbox.addWidget(self.browser) self.setLayout(vbox)
def __match_identation_stack(identation_stack, level, level_limits, folding_ranges, current_line): upper_level = identation_stack.pop(0) while (upper_level >= level): level_start = level_limits.pop(upper_level) folding_ranges.append((level_start, current_line)) upper_level = identation_stack.pop(0) identation_stack.insert(0, upper_level) return (identation_stack, folding_ranges)
class RRDB(nn.Module): def __init__(self, nf, gc=32): super(RRDB, self).__init__() self.RDB1 = ResidualDenseBlock_5C(nf, gc) self.RDB2 = ResidualDenseBlock_5C(nf, gc) self.RDB3 = ResidualDenseBlock_5C(nf, gc) def forward(self, x): out = self.RDB1(x) out = self.RDB2(out) out = self.RDB3(out) return ((out * 0.2) + x)
_grad() def convert_wav2vec2_checkpoint(checkpoint_path, pytorch_dump_folder_path, config_path=None, dict_path=None, is_finetuned=True): if (config_path is not None): config = Wav2Vec2Config.from_pretrained(config_path) else: config = Wav2Vec2Config() if is_finetuned: if dict_path: target_dict = Dictionary.load(dict_path) config.bos_token_id = target_dict.pad_index config.pad_token_id = target_dict.bos_index config.eos_token_id = target_dict.eos_index config.vocab_size = len(target_dict.symbols) vocab_path = os.path.join(pytorch_dump_folder_path, 'vocab.json') if (not os.path.isdir(pytorch_dump_folder_path)): logger.error('--pytorch_dump_folder_path ({}) should be a directory'.format(pytorch_dump_folder_path)) return os.makedirs(pytorch_dump_folder_path, exist_ok=True) vocab_dict = target_dict.indices vocab_dict['<pad>'] = 0 vocab_dict['<s>'] = 1 with open(vocab_path, 'w', encoding='utf-8') as vocab_handle: json.dump(vocab_dict, vocab_handle) tokenizer = Wav2Vec2CTCTokenizer(vocab_path, unk_token=target_dict.unk_word, pad_token=target_dict.pad_word, bos_token=target_dict.bos_word, eos_token=target_dict.eos_word, word_delimiter_token='|', do_lower_case=False) return_attention_mask = (True if (config.feat_extract_norm == 'layer') else False) feature_extractor = Wav2Vec2FeatureExtractor(feature_size=1, sampling_rate=16000, padding_value=0, do_normalize=True, return_attention_mask=return_attention_mask) processor = Wav2Vec2Processor(feature_extractor=feature_extractor, tokenizer=tokenizer) processor.save_pretrained(pytorch_dump_folder_path) hf_wav2vec = Wav2Vec2ForCTC(config) else: hf_wav2vec = Wav2Vec2ForPreTraining(config) if is_finetuned: (model, _, _) = fairseq.checkpoint_utils.load_model_ensemble_and_task([checkpoint_path], arg_overrides={'data': '/'.join(dict_path.split('/')[:(- 1)])}) else: (model, _, _) = fairseq.checkpoint_utils.load_model_ensemble_and_task([checkpoint_path]) model = model[0].eval() recursively_load_weights(model, hf_wav2vec, (not is_finetuned)) hf_wav2vec.save_pretrained(pytorch_dump_folder_path)
class Effect6636(BaseEffect): type = 'passive' def handler(fit, src, context, projectionRange, **kwargs): fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Capital Hybrid Turret')), 'damageMultiplier', src.getModifiedItemAttr('shipBonusTitanG1'), skill='Gallente Titan', **kwargs)
def main(argv): parser = argparse.ArgumentParser(description='Config file') parser.add_argument('--config_file', type=str, default='./configs/catcher.json', help='Configuration file for the chosen model') parser.add_argument('--config_idx', type=int, default=1, help='Configuration index') parser.add_argument('--slurm_dir', type=str, default='', help='slurm tempory directory') args = parser.parse_args() sweeper = Sweeper(args.config_file) cfg = sweeper.generate_config_for_idx(args.config_idx) cfg.setdefault('network_update_steps', 1) cfg['env'].setdefault('max_episode_steps', (- 1)) cfg.setdefault('show_tb', False) cfg.setdefault('render', False) cfg.setdefault('gradient_clip', (- 1)) cfg.setdefault('hidden_act', 'ReLU') cfg.setdefault('output_act', 'Linear') cfg['exp'] = args.config_file.split('/')[(- 1)].split('.')[0] if (len(args.slurm_dir) > 0): cfg['logs_dir'] = f"{args.slurm_dir}/{cfg['exp']}/{cfg['config_idx']}/" make_dir(cfg['logs_dir']) else: cfg['logs_dir'] = f"./logs/{cfg['exp']}/{cfg['config_idx']}/" make_dir(f"./logs/{cfg['exp']}/{cfg['config_idx']}/") cfg['train_log_path'] = (cfg['logs_dir'] + 'result_Train.feather') cfg['test_log_path'] = (cfg['logs_dir'] + 'result_Test.feather') cfg['model_path'] = (cfg['logs_dir'] + 'model.pt') cfg['cfg_path'] = (cfg['logs_dir'] + 'config.json') exp = Experiment(cfg) exp.run()
class Migration(migrations.Migration): dependencies = [('questions', '0055_catalog_locked')] operations = [migrations.AddField(model_name='question', name='is_optional', field=models.BooleanField(default=False, help_text='Designates whether this question is optional.', verbose_name='is optional'))]
_grad() def evaluation(model, data_loader, device, config): model.eval() model_without_ddp = model if hasattr(model, 'module'): model_without_ddp = model.module metric_logger = utils.MetricLogger(delimiter=' ') header = 'Caption generation:' print_freq = 50 result = [] for batch in metric_logger.log_every(data_loader, print_freq, header): captions = model_without_ddp.generate(image=batch.get('image'), clip_image_embs=batch.get('clip_image_embs'), lang=config['eval_lang'], sample=False, num_beams=config['num_beams'], max_length=config['max_length'], min_length=config['min_length']) for (caption, img_id) in zip(captions, batch['image_id']): result.append({'image_id': img_id.item(), 'caption': caption}) return result
() def empty_database(): old_db = database.db try: test_db = SqliteDatabase(':memory:') database.db = test_db with test_db.bind_ctx(database.all_classes): test_db.connect(reuse_if_open=True) (yield test_db) finally: database.db = old_db
def get_last_checkpoint(work_dir, steps=None): checkpoint = None last_ckpt_path = None ckpt_paths = get_all_ckpts(work_dir, steps) if (len(ckpt_paths) > 0): last_ckpt_path = ckpt_paths[0] checkpoint = torch.load(last_ckpt_path, map_location='cpu') logging.info(f'load module from checkpoint: {last_ckpt_path}') return (checkpoint, last_ckpt_path)
class GRAMLoss(BaseLoss): def __init__(self, runner, d_loss_kwargs=None, g_loss_kwargs=None): self.d_loss_kwargs = (d_loss_kwargs or dict()) self.g_loss_kwargs = (g_loss_kwargs or dict()) self.r1_gamma = self.d_loss_kwargs.get('r1_gamma', 10.0) self.camera_gamma = self.d_loss_kwargs.get('position_gamma', 15.0) self.batch_split = self.d_loss_kwargs.get('batch_split', 1) runner.running_stats.add('Loss/D Real', log_name=f'loss_d_real', log_format='.3f', log_strategy='AVERAGE') runner.running_stats.add('Loss/D Fake', log_name=f'loss_d_fake', log_format='.3f', log_strategy='AVERAGE') runner.running_stats.add('Loss/G', log_name=f'loss_g', log_format='.3f', log_strategy='AVERAGE') if (self.r1_gamma > 0.0): runner.running_stats.add('Loss/Real Grad Penalty', log_name='loss_gp_real', log_format='.2e', log_strategy='AVERAGE') if (self.camera_gamma > 0.0): runner.running_stats.add(f'Loss/G Fake ID Penalty', log_format='.3f', log_name='loss_g_id', log_strategy='AVERAGE') runner.running_stats.add(f'Loss/D Fake ID Penalty', log_name='loss_d_ld', log_format='.3f', log_strategy='AVERAGE') runner.logger.info('real gradient penalty:', indent_level=1) runner.logger.info(f'r1_gamma: {self.r1_gamma}', indent_level=2) def run_G(runner, batch_size=None, sync=True, split=1, _G_kwargs=dict()): G = runner.ddp_models['generator'] G_kwargs = runner.model_kwargs_train['generator'] batch_size = (batch_size or runner.batch_size) assert ((batch_size % split) == 0) split_batch_size = (batch_size // split) latent_dim = runner.models['generator'].latent_dim label_dim = runner.models['generator'].label_dim latents = torch.randn((batch_size, *latent_dim), device=runner.device) labels = None if (label_dim > 0): rnd_labels = torch.randint(0, label_dim, (batch_size,), device=runner.device) labels = F.one_hot(rnd_labels, num_classes=label_dim) with ddp_sync(G, sync=sync): results = {} for batch_idx in range(0, batch_size, split_batch_size): latent = latents[batch_idx:(batch_idx + split_batch_size)] label = (labels[batch_idx:(batch_idx + split_batch_size)] if (labels is not None) else labels) result = G(latent, label, **G_kwargs, **_G_kwargs) for (key, val) in result.items(): if (key in results): if isinstance(val, (torch.Tensor,)): results[key] = torch.cat([results[key], val]) elif (val is None): results[key] = None else: raise NotImplementedError else: results[key] = val return results def run_D(runner, images, labels, sync=True, split=1, _D_kwargs=dict()): batch_size = images.shape[0] assert ((batch_size % split) == 0) split_batch_size = (batch_size // split) D = runner.ddp_models['discriminator'] D_kwargs = runner.model_kwargs_train['discriminator'] with ddp_sync(D, sync=sync): results = {} for batch_idx in range(0, batch_size, split_batch_size): image = images[batch_idx:(batch_idx + split_batch_size)] label = (labels[batch_idx:(batch_idx + split_batch_size)] if (labels is not None) else None) result = D(runner.augment(image, **runner.augment_kwargs), label, **D_kwargs, **_D_kwargs) for (key, val) in result.items(): if (key in results): if isinstance(val, (torch.Tensor,)): results[key] = torch.cat([results[key], val]) elif (val is None): results[key] = None else: raise NotImplementedError else: results[key] = val return results def compute_grad_penalty(images, scores, amp_scaler): scores = amp_scaler.scale(scores) image_grad = torch.autograd.grad(outputs=[scores.sum()], inputs=[images], create_graph=True, retain_graph=True, only_inputs=True)[0] if amp_scaler.is_enabled(): image_grad = (image_grad / amp_scaler.get_scale()) with autocast(enabled=amp_scaler.is_enabled()): penalty = image_grad.square().sum((1, 2, 3)) return penalty def d_loss(self, runner, data, sync=True): _G_kwargs = dict() _D_kwargs = dict() real_images = data['image'].detach() real_images.requires_grad_((self.r1_gamma > 0.0)) real_labels = data.get('label', None) real_poses = data.get('pose', None) real_pred_results = self.run_D(runner, images=real_images, labels=real_labels, sync=sync, split=1, _D_kwargs=_D_kwargs) real_scores = real_pred_results['score'] with autocast(enabled=runner.enable_amp): d_real_loss = F.softplus((- real_scores)) runner.running_stats.update({'Loss/D Real': d_real_loss}) d_real_loss = runner.amp_scaler.scale(d_real_loss) if hasattr(runner.augment, 'prob_tracker'): runner.augment.prob_tracker.update(real_scores.sign()) with torch.no_grad(): fake_results = self.run_G(runner, sync=False, split=self.batch_split, _G_kwargs=_G_kwargs) fake_pred_results = self.run_D(runner, images=fake_results['image'], labels=fake_results['label'], sync=sync, split=1, _D_kwargs=_D_kwargs) fake_scores = fake_pred_results['score'] with autocast(enabled=runner.enable_amp): d_fake_loss = F.softplus(fake_scores) runner.running_stats.update({'Loss/D Fake': d_fake_loss}) d_fake_loss = runner.amp_scaler.scale(d_fake_loss) r1_penalty = torch.zeros_like(d_real_loss) if (self.r1_gamma > 0.0): r1_penalty = self.compute_grad_penalty(images=real_images, scores=real_scores, amp_scaler=runner.amp_scaler) runner.running_stats.update({'Loss/Real Grad Penalty': r1_penalty}) r1_penalty = runner.amp_scaler.scale(r1_penalty) with autocast(enabled=runner.enable_amp): camera_penalty = F.mse_loss(fake_pred_results['camera'], fake_results['camera']) camera_penalty += F.mse_loss(real_pred_results['camera'], real_poses) id_penalty = (camera_penalty * self.camera_gamma) id_penalty = runner.amp_scaler.scale(id_penalty) runner.running_stats.update({'Loss/D Fake ID Penalty': id_penalty}) return (((d_real_loss + d_fake_loss) + (r1_penalty * (self.r1_gamma * 0.5))) + id_penalty).mean() def g_loss(self, runner, _data, sync=True): _G_kwargs = dict() _D_kwargs = dict() topk_num = int(runner.batch_size) fake_results = self.run_G(runner, sync=sync, split=self.batch_split, _G_kwargs=_G_kwargs) fake_pred_results = self.run_D(runner, images=fake_results['image'], labels=fake_results['label'], sync=False, split=1, _D_kwargs=_D_kwargs) with autocast(enabled=runner.enable_amp): fake_scores = torch.topk(fake_pred_results['score'], topk_num, dim=0).values g_loss = F.softplus((- fake_scores)) runner.running_stats.update({'Loss/G': g_loss}) g_loss = runner.amp_scaler.scale(g_loss) with autocast(enabled=runner.enable_amp): camera_penalty = F.mse_loss(fake_pred_results['camera'], fake_results['camera']) id_penalty = (camera_penalty * self.camera_gamma) id_penalty = runner.amp_scaler.scale(id_penalty) runner.running_stats.update({'Loss/G Fake ID Penalty': id_penalty}) return (g_loss + id_penalty).mean()
.parametrize('use_path', [True, False], ids=['Path', 'str']) .parametrize('suffix', ['', '.qu', '.dat']) def test_qsave_qload(use_path, suffix): ops_in = [qutip.sigmax(), qutip.num(_dimension), qutip.coherent_dm(_dimension, 1j)] filename = (_random_file_name() + suffix) if use_path: filename = (Path.cwd() / filename) qutip.qsave(ops_in, filename) ops_out = qutip.qload(filename) assert (ops_in == ops_out) assert Path((str(filename) + '.qu')).exists()
def load_data_for_all_tasks(json_files): data_dict = {} for json_file in json_files: dataset_json = json.load(open(json_file)) logging.info(f"loading dataset file: {json_file} for {dataset_json['task']} task") print(f"loading dataset file: {json_file} for {dataset_json['task']} task") task_data = load_data_for_one_task(dataset_json) data_dict.update(task_data) logging.info(f'from all json files, we have {len(data_dict)} examples') print(f'from all json files, we have {len(data_dict)} examples') return data_dict
_options_exempt def pp_inst(request, inst_index): try: inst_index = int(inst_index) except ValueError: return Http404() html_path = (PROJECT_APP_PATH + '/frontend/templates/frontend/particle-picking-inst/inst{}.html'.format(inst_index)) if (not os.path.exists(html_path)): return Http404 return render(request, html_path)
class CustomIcon(Icon): _template = Template('\n {% macro script(this, kwargs) %}\n var {{ this.get_name() }} = L.icon({{ this.options|tojson }});\n {{ this._parent.get_name() }}.setIcon({{ this.get_name() }});\n {% endmacro %}\n ') def __init__(self, icon_image: Any, icon_size: Optional[Tuple[(int, int)]]=None, icon_anchor: Optional[Tuple[(int, int)]]=None, shadow_image: Any=None, shadow_size: Optional[Tuple[(int, int)]]=None, shadow_anchor: Optional[Tuple[(int, int)]]=None, popup_anchor: Optional[Tuple[(int, int)]]=None): super(Icon, self).__init__() self._name = 'CustomIcon' self.options = parse_options(icon_url=image_to_url(icon_image), icon_size=icon_size, icon_anchor=icon_anchor, shadow_url=(shadow_image and image_to_url(shadow_image)), shadow_size=shadow_size, shadow_anchor=shadow_anchor, popup_anchor=popup_anchor)
def _check_required_metadata(metadata): for md in PLUGIN_REQUIRED_METADATA: if ((md not in dict(metadata)) or (not dict(metadata)[md])): raise ValidationError((_('Cannot find metadata <strong>%s</strong> in metadata source <code>%s</code>.<br />For further informations about metadata, please see: <a target="_blank" href=" documentation</a>') % (md, dict(metadata).get('metadata_source'))))
class CustomTestSet(qpbenchmark.TestSet): def description(self) -> str: return 'Unit test test set' def title(self) -> str: return 'Unit test test set' def sparse_only(self) -> bool: return False def __iter__(self): (yield custom_problem(name='custom'))
def add_player_class_ex(teamid: int, model_id: int, spawn_x: float, spawn_y: float, spawn_z: float, z_angle: float, weapon1: int, weapon1_ammo: int, weapon2: int, weapon2_ammo: int, weapon3: int, weapon3_ammo: int) -> int: return AddPlayerClassEx(teamid, model_id, spawn_x, spawn_y, spawn_z, z_angle, weapon1, weapon1_ammo, weapon2, weapon2_ammo, weapon3, weapon3_ammo)
class VERSE(): def __init__(self, cpath=None): path = (os.path.dirname(os.path.realpath(__file__)) if (cpath is None) else cpath) try: sofile = (glob.glob(os.path.join(path, 'verse*.so')) + glob.glob(os.path.join(path, '*verse*.dll')))[0] self.C = ctypes.cdll.LoadLibrary(os.path.join(path, sofile)) except (IndexError, OSError): raise RuntimeError('Cannot find/open VERSE shared library') self.C.verse_ppr_train.restype = ctypes.c_int self.C.verse_ppr_train.argtypes = [ndpointer(ctypes.c_float), ndpointer(ctypes.c_int), ndpointer(ctypes.c_int), ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.c_float, ctypes.c_float, ctypes.c_int, ctypes.c_int] self.C.verse_neigh_train.restype = ctypes.c_int self.C.verse_neigh_train.argtypes = [ndpointer(ctypes.c_float), ndpointer(ctypes.c_int), ndpointer(ctypes.c_int), ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.c_float, ctypes.c_int, ctypes.c_int] def verse_ppr(self, graph, w=None, n_hidden=128, alpha=0.85, steps=100000, n_neg_samples=3, lr=0.0025, rng_seed=0, n_threads=(- 1)): nv = graph.shape[0] ne = graph.nnz if (w is None): w = (np.random.rand(nv, n_hidden).astype(np.float32) - 0.5) if (n_threads < 0): n_threads = ((multiprocessing.cpu_count() + 1) + n_threads) if (n_threads == 0): raise RuntimeError('Number of threds can not be zero!') self.C.verse_ppr_train(w, graph.indptr, graph.indices, nv, ne, n_hidden, steps, n_neg_samples, lr, alpha, rng_seed, n_threads) return w def verse_neigh(self, graph, w=None, n_hidden=128, steps=100000, n_neg_samples=3, lr=0.0025, rng_seed=0, n_threads=(- 1)): nv = graph.shape[0] ne = graph.nnz if (w is None): w = (np.random.rand(nv, n_hidden).astype(np.float32) - 0.5) if (n_threads < 0): n_threads = ((multiprocessing.cpu_count() + 1) + n_threads) if (n_threads == 0): raise RuntimeError('Number of threds can not be zero!') self.C.verse_neigh_train(w, graph.indptr, graph.indices, nv, ne, n_hidden, steps, n_neg_samples, lr, rng_seed, n_threads) return w
class nnUNetTrainerDA5_10epochs(nnUNetTrainerDA5): def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool=True, device: torch.device=torch.device('cuda')): super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device) self.num_epochs = 10
class SneakerSchema(BaseModel): brand_name: str = Field(example='Nike') name: str = Field(example="Nike Air Force 1 '07") description: str = Field(example=DESC_EXAMPLE) size: conint(ge=38, le=53) = Field(example=42) color: str = Field(example='White') free_delivery: Optional[bool] = Field(example=False)
def test_licenses_deprecated(dummy_dist, monkeypatch, tmp_path): dummy_dist.joinpath('setup.cfg').write_text('[metadata]\nlicense_file=licenses/DUMMYFILE', encoding='utf-8') monkeypatch.chdir(dummy_dist) subprocess.check_call([sys.executable, 'setup.py', 'bdist_wheel', '-b', str(tmp_path), '--universal']) with WheelFile('dist/dummy_dist-1.0-py2.py3-none-any.whl') as wf: license_files = {'dummy_dist-1.0.dist-info/DUMMYFILE'} assert (set(wf.namelist()) == (DEFAULT_FILES | license_files))
.parametrize('template, expected', [('{{ func1(conf.aliases) }} {{ func2(conf.backend) }}', ['aliases', 'backend']), ('{{ conf.aliases["a"].propname }}', ['aliases']), ('{{ conf.auto_save.interval + conf.hints.min_chars }}', ['auto_save.interval', 'hints.min_chars']), ('{{ notconf.a.b.c }}', [])]) def test_template_config_variables(template, expected, config_stub): assert (jinja.template_config_variables(template) == frozenset(expected))
class UserInterfacePluginHandler(PluginHandler): def __init__(self): self.__plugins = {} self.__sidebars = {} def plugin_handle(self, plugin): return issubclass(plugin.cls, UserInterfacePlugin) def plugin_enable(self, plugin): self.__plugins[plugin.cls] = pl_obj = plugin.get_instance() sidebar = pl_obj.create_sidebar() app.window.hide_side_book() if sidebar: print_d(('Enabling sidebar for %s' % plugin.cls)) self.__sidebars[plugin] = app.window.add_sidebar(sidebar, name=plugin.name) sidebar.show_all() def plugin_disable(self, plugin): widget = self.__sidebars.get(plugin) if widget: print_d(('Removing sidebar %s' % widget)) app.window.remove_sidebar(widget) self.__plugins.pop(plugin.cls)
def _link_objs(value): result = '' delims = '(\\s*[\\[\\]\\(\\),]\\s*)' delims_re = re.compile(delims) sub_targets = re.split(delims, value.strip()) for sub_target in sub_targets: sub_target = sub_target.strip() if delims_re.match(sub_target): result += f'{sub_target}' if sub_target.endswith(','): result += ' ' else: result += '\\ ' elif sub_target: result += f':py:obj:`{sub_target}`\ ' return result[:(- 2)]
.usefixtures('config_tmpdir') class TestFile(): (params=[configtypes.File, unrequired_class]) def klass(self, request): return request.param def test_to_py_does_not_exist_file(self, os_mock): os_mock.path.isfile.return_value = False with pytest.raises(configexc.ValidationError): configtypes.File().to_py('foobar') def test_to_py_does_not_exist_optional_file(self, os_mock): os_mock.path.isfile.return_value = False assert (unrequired_class().to_py('foobar') == 'foobar') .parametrize('val, expected', [('/foobar', '/foobar'), ('~/foobar', '/home/foo/foobar'), ('$HOME/foobar', '/home/foo/foobar')]) def test_to_py_exists_abs(self, klass, os_mock, val, expected): os_mock.path.isfile.return_value = True assert (klass().to_py(val) == expected) def test_to_py_exists_rel(self, klass, os_mock, monkeypatch): monkeypatch.setattr('qutebrowser.config.configtypes.standarddir.config', (lambda : '/home/foo/.config')) os_mock.path.isfile.return_value = True os_mock.path.isabs.return_value = False assert (klass().to_py('foobar') == '/home/foo/.config/foobar') os_mock.path.join.assert_called_once_with('/home/foo/.config', 'foobar') def test_to_py_expanduser(self, klass, os_mock): os_mock.path.isfile.side_effect = (lambda path: (path == '/home/foo/foobar')) os_mock.path.isabs.return_value = True assert (klass().to_py('~/foobar') == '/home/foo/foobar') def test_to_py_expandvars(self, klass, os_mock): os_mock.path.isfile.side_effect = (lambda path: (path == '/home/foo/foobar')) os_mock.path.isabs.return_value = True assert (klass().to_py('$HOME/foobar') == '/home/foo/foobar') def test_to_py_invalid_encoding(self, klass, os_mock, unicode_encode_err): os_mock.path.isfile.side_effect = unicode_encode_err os_mock.path.isabs.side_effect = unicode_encode_err with pytest.raises(configexc.ValidationError): klass().to_py('foobar')
def test_volume_sample_i(volume: wp.uint64, points: wp.array(dtype=wp.vec3)): tid = wp.tid() p = points[tid] i = round(p[0]) j = round(p[1]) k = round(p[2]) expected = int(((i * j) * k)) if ((abs(i) > 10.0) or (abs(j) > 10.0) or (abs(k) > 10.0)): expected = 10 expect_eq(wp.volume_sample_i(volume, p), expected) q = wp.volume_index_to_world(volume, p) q_inv = wp.volume_world_to_index(volume, q) expect_eq(p, q_inv)
def get_config(): config = get_default_configs() training = config.training training.batch_size = 64 training.n_iters = 2400001 training.snapshot_sampling = True training.sde = 'vesde' training.continuous = True evaluate = config.eval evaluate.num_samples = 50000 evaluate.ckpt_id = 101 evaluate.batch_size = 128 sampling = config.sampling sampling.method = 'pc' sampling.predictor = 'reverse_diffusion' sampling.corrector = 'none' sampling.iradon_K = 1.8 sampling.snr = 0.0 sampling.coeff = 0.27 sampling.task = 'sparse_mar' sampling.n_projections = 180 sampling.expansion = 4 sampling.cs_solver = 'projection' data = config.data data.dataset = 'ct2d_320' data.image_size = 320 data.num_channels = 1 data.centered = False data.random_flip = False data.uniform_dequantization = False model = config.model model.name = 'ncsnpp' model.scale_by_sigma = True model.sigma_max = 128.0 model.num_scales = 1000 model.ema_rate = 0.999 model.sigma_min = 0.01 model.beta_min = 0.1 model.beta_max = 20.0 model.normalization = 'GroupNorm' model.nonlinearity = 'swish' model.nf = 32 model.ch_mult = (1, 1, 2, 2, 2, 2, 2) model.num_res_blocks = 2 model.attn_resolutions = (20,) model.dropout = 0.0 model.resamp_with_conv = True model.conditional = True model.fir = True model.fir_kernel = [1, 3, 3, 1] model.skip_rescale = True model.resblock_type = 'biggan' model.progressive = 'output_skip' model.progressive_input = 'input_skip' model.progressive_combine = 'sum' model.attention_type = 'ddpm' model.init_scale = 0.0 model.fourier_scale = 16 model.conv_size = 3 optim = config.optim optim.weight_decay = 0 optim.optimizer = 'Adam' optim.lr = 0.0002 optim.beta1 = 0.9 optim.amsgrad = False optim.eps = 1e-08 optim.warmup = 5000 optim.grad_clip = 1.0 config.seed = 42 return config
def init_distributed_device(args): args.distributed = False args.world_size = 1 args.rank = 0 args.local_rank = 0 dist_backend = getattr(args, 'dist_backend', 'nccl') dist_url = getattr(args, 'dist_url', 'env://') if is_distributed_env(): if ('SLURM_PROCID' in os.environ): (args.local_rank, args.rank, args.world_size) = world_info_from_env() os.environ['LOCAL_RANK'] = str(args.local_rank) os.environ['RANK'] = str(args.rank) os.environ['WORLD_SIZE'] = str(args.world_size) torch.distributed.init_process_group(backend=dist_backend, init_method=dist_url, world_size=args.world_size, rank=args.rank) else: (args.local_rank, _, _) = world_info_from_env() torch.distributed.init_process_group(backend=dist_backend, init_method=dist_url) args.world_size = torch.distributed.get_world_size() args.rank = torch.distributed.get_rank() args.distributed = True if torch.cuda.is_available(): if args.distributed: device = ('cuda:%d' % args.local_rank) else: device = 'cuda:0' torch.cuda.set_device(device) else: device = 'cpu' args.device = device device = torch.device(device) return device
def transform_all_binary_images(root_path): if os.path.isdir(root_path): files = os.listdir(root_path) files = [file for file in files if ((file[0] != '.') and (file[:2] != '__'))] for file in files: try: transform_all_binary_images(os.path.join(root_path, file)) except: continue elif os.path.isfile(root_path): target_contour_name = (root_path[:(- 4)] + '.txt') if (('.txt' not in root_path) and (not os.path.exists(target_contour_name))): export_contour_as_text(target_contour_name, read_binary_image(root_path)) else: raise FileNotFoundError('Invalid Filename')
def calculate_jaccard_index(R1, R2): subset_overlap = [] for n in range(len(R1)): sim_per_pair = [] for i in range(len(R1[n])): s1 = R1[n][i] s2 = R2[n][i] sim = jaccard_similarity(s1, s2) sim_per_pair.append(sim) subset_overlap.append(sim_per_pair) return subset_overlap
class Net(nn.Module): def __init__(self) -> None: super(Net, self).__init__() self.conv1 = nn.Conv2d(1, 32, 3, 1) self.conv2 = nn.Conv2d(32, 64, 3, 1) self.dropout1 = nn.Dropout(0.25) self.dropout2 = nn.Dropout(0.5) self.fc1 = nn.Linear(9216, 128) self.fc2 = nn.Linear(128, 10) def forward(self, x: Tensor) -> Tensor: x = self.conv1(x) x = F.relu(x) x = self.conv2(x) x = F.relu(x) x = F.max_pool2d(x, 2) x = self.dropout1(x) x = torch.flatten(x, 1) x = self.fc1(x) x = F.relu(x) x = self.dropout2(x) x = self.fc2(x) output = F.log_softmax(x, dim=1) return output
def build_auth(xpaths, username, password): auth = {} try: auth['username'] = [xpaths.pop('username'), username] except: raise ValueError('username not in predefined') try: auth['password'] = [xpaths.pop('password'), password] except: raise ValueError('password not in predefined') try: auth['submit'] = [xpaths.pop('submit')] except: raise ValueError('submit not in predefined') return auth
class H2Protocol(Protocol): def __init__(self, root): config = H2Configuration(client_side=False) self.conn = H2Connection(config=config) self.known_proto = None self.root = root self._flow_control_deferreds = {} def connectionMade(self): self.conn.initiate_connection() self.transport.write(self.conn.data_to_send()) def dataReceived(self, data): if (not self.known_proto): self.known_proto = True try: events = self.conn.receive_data(data) except ProtocolError: if self.conn.data_to_send: self.transport.write(self.conn.data_to_send()) self.transport.loseConnection() else: for event in events: if isinstance(event, RequestReceived): self.requestReceived(event.headers, event.stream_id) elif isinstance(event, DataReceived): self.dataFrameReceived(event.stream_id) elif isinstance(event, WindowUpdated): self.windowUpdated(event) if self.conn.data_to_send: self.transport.write(self.conn.data_to_send()) def requestReceived(self, headers, stream_id): headers = dict(headers) assert (headers[b':method'] == b'GET') path = headers[b':path'].lstrip(b'/') full_path = os.path.join(self.root, path) if (not os.path.exists(full_path)): response_headers = ((':status', '404'), ('content-length', '0'), ('server', 'twisted-h2')) self.conn.send_headers(stream_id, response_headers, end_stream=True) self.transport.write(self.conn.data_to_send()) else: self.sendFile(full_path, stream_id) return def dataFrameReceived(self, stream_id): self.conn.reset_stream(stream_id) self.transport.write(self.conn.data_to_send()) def sendFile(self, file_path, stream_id): filesize = os.stat(file_path).st_size (content_type, content_encoding) = mimetypes.guess_type(file_path.decode('utf-8')) response_headers = [(':status', '200'), ('content-length', str(filesize)), ('server', 'twisted-h2')] if content_type: response_headers.append(('content-type', content_type)) if content_encoding: response_headers.append(('content-encoding', content_encoding)) self.conn.send_headers(stream_id, response_headers) self.transport.write(self.conn.data_to_send()) f = open(file_path, 'rb') d = self._send_file(f, stream_id) d.addErrback(functools.partial(close_file, f)) def windowUpdated(self, event): stream_id = event.stream_id if (stream_id and (stream_id in self._flow_control_deferreds)): d = self._flow_control_deferreds.pop(stream_id) d.callback(event.delta) elif (not stream_id): for d in self._flow_control_deferreds.values(): d.callback(event.delta) self._flow_control_deferreds = {} return def _send_file(self, file, stream_id): keep_reading = True while keep_reading: while (not self.conn.remote_flow_control_window(stream_id)): (yield self.wait_for_flow_control(stream_id)) chunk_size = min(self.conn.remote_flow_control_window(stream_id), READ_CHUNK_SIZE) data = file.read(chunk_size) keep_reading = (len(data) == chunk_size) self.conn.send_data(stream_id, data, (not keep_reading)) self.transport.write(self.conn.data_to_send()) if (not keep_reading): break file.close() def wait_for_flow_control(self, stream_id): d = Deferred() self._flow_control_deferreds[stream_id] = d return d
class ImportNodeTest(resources.SysPathSetup, unittest.TestCase): def setUp(self) -> None: super().setUp() self.module = resources.build_file('data/module.py', 'data.module') self.module2 = resources.build_file('data/module2.py', 'data.module2') def test_import_self_resolve(self) -> None: myos = next(self.module2.igetattr('myos')) self.assertTrue(isinstance(myos, nodes.Module), myos) self.assertEqual(myos.name, 'os') self.assertEqual(myos.qname(), 'os') self.assertEqual(myos.pytype(), 'builtins.module') def test_from_self_resolve(self) -> None: namenode = next(self.module.igetattr('NameNode')) self.assertTrue(isinstance(namenode, nodes.ClassDef), namenode) self.assertEqual(namenode.root().name, 'astroid.nodes.node_classes') self.assertEqual(namenode.qname(), 'astroid.nodes.node_classes.Name') self.assertEqual(namenode.pytype(), 'builtins.type') abspath = next(self.module2.igetattr('abspath')) self.assertTrue(isinstance(abspath, nodes.FunctionDef), abspath) self.assertEqual(abspath.root().name, 'os.path') self.assertEqual(abspath.pytype(), 'builtins.function') if (sys.platform != 'win32'): self.assertEqual(abspath.qname(), 'os.path.abspath') def test_real_name(self) -> None: from_ = self.module['NameNode'] self.assertEqual(from_.real_name('NameNode'), 'Name') imp_ = self.module['os'] self.assertEqual(imp_.real_name('os'), 'os') self.assertRaises(AttributeInferenceError, imp_.real_name, 'os.path') imp_ = self.module['NameNode'] self.assertEqual(imp_.real_name('NameNode'), 'Name') self.assertRaises(AttributeInferenceError, imp_.real_name, 'Name') imp_ = self.module2['YO'] self.assertEqual(imp_.real_name('YO'), 'YO') self.assertRaises(AttributeInferenceError, imp_.real_name, 'data') def test_as_string(self) -> None: ast = self.module['modutils'] self.assertEqual(ast.as_string(), 'from astroid import modutils') ast = self.module['NameNode'] self.assertEqual(ast.as_string(), 'from astroid.nodes.node_classes import Name as NameNode') ast = self.module['os'] self.assertEqual(ast.as_string(), 'import os.path') code = 'from . import here\nfrom .. import door\nfrom .store import bread\nfrom ..cave import wine\n\n' ast = abuilder.string_build(code) self.assertMultiLineEqual(ast.as_string(), code) def test_bad_import_inference(self) -> None: code = '\n try:\n from pickle import PickleError\n except ImportError:\n from nonexistent import PickleError\n\n try:\n pass\n except PickleError:\n pass\n ' module = builder.parse(code) handler_type = module.body[1].handlers[0].type excs = list(nodes.unpack_infer(handler_type)) self.assertIsInstance(excs[0], nodes.ClassDef) self.assertEqual(excs[0].name, 'PickleError') self.assertIs(excs[(- 1)], util.Uninferable) def test_absolute_import(self) -> None: module = resources.build_file('data/absimport.py') ctx = InferenceContext() ctx.lookupname = 'message' next(module['message'].infer(ctx)) ctx.lookupname = 'email' m = next(module['email'].infer(ctx)) self.assertFalse(m.file.startswith(os.path.join('data', 'email.py'))) def test_more_absolute_import(self) -> None: module = resources.build_file('data/module1abs/__init__.py', 'data.module1abs') self.assertIn('sys', module.locals) _pickle_names = ('dump',) def test_conditional(self) -> None: module = resources.build_file('data/conditional_import/__init__.py') ctx = InferenceContext() for name in self._pickle_names: ctx.lookupname = name some = list(module[name].infer(ctx)) assert (Uninferable not in some), name def test_conditional_import(self) -> None: module = resources.build_file('data/conditional.py') ctx = InferenceContext() for name in self._pickle_names: ctx.lookupname = name some = list(module[name].infer(ctx)) assert (Uninferable not in some), name
_cache(maxsize=1000, typed=False) def eight_band_strain_hamiltonian(kx, ky, kz, Ev0, Ec0, exx, ezz, me_eff, gamma1, gamma2, gamma3, a0, Delta, ac, av, b, Ep): 'Hamiltonian to calculate cb, hh, lh, so bands and include strain for the biaxial (along [001]) special case.\n \n See Hamiltonian in ref. but remove row 1 and 6 and column 1 and 6.\n science_reference('k.p Hamiltonian', 'Stanko Tomic et al., Electronic structure of InyGa1yAs1xNxGaAs(N) quantum dots by ten-band k.p theory. Phys. Rev. B 73, 125348 (2006)') av = abs(av) Eg = (Ec0 - Ev0) sqrt2 = np.sqrt(2) sqrt3 = np.sqrt(3) sqrt6 = np.sqrt(6) sqrt3o2 = np.sqrt((3 / 2)) m0 = constants.electron_mass P0 = np.sqrt(((Ep * (constants.hbar ** 2)) / (2 * m0))) gc = ((1 / (me_eff / constants.electron_mass)) - ((Ep / 3) * ((2 / Eg) + (1 / (Eg + Delta))))) g1 = (gamma1 - (Ep / ((3 * Eg) + Delta))) g2 = (gamma2 - (Ep / ((6 * Eg) + (2 * Delta)))) g3 = (gamma3 - (Ep / ((6 * Eg) + (2 * Delta)))) Ck = ((constants.hbar ** 2) / (2 * m0)) Ok = (lambda kx, ky, kz: ((Ck * gc) * (((kx ** 2) + (ky ** 2)) + (kz ** 2)))) Pk = (lambda kx, ky, kz: ((Ck * g1) * (((kx ** 2) + (ky ** 2)) + (kz ** 2)))) Qk = (lambda kx, ky, kz: ((Ck * g2) * (((kx ** 2) + (ky ** 2)) - (2 * (kz ** 2))))) Rk = (lambda kx, ky, kz: ((Ck * sqrt3) * ((g2 * ((kx ** 2) - (ky ** 2))) - ((((2 * 1j) * g3) * kx) * ky)))) Sk = (lambda kx, ky, kz: ((((Ck * sqrt6) * g3) * (kx - (1j * ky))) * kz)) Tk = (lambda kx, ky, kz: (((1 / sqrt6) * P0) * (kx + (1j * ky)))) Uk = (lambda kx, ky, kz: (((1 / sqrt3) * P0) * kz)) Rkc = (lambda kx, ky, kz: ((Ck * sqrt3) * ((g2 * ((kx ** 2) - (ky ** 2))) + ((((2 * 1j) * g3) * kx) * ky)))) Skc = (lambda kx, ky, kz: ((((Ck * sqrt6) * g3) * (kx + (1j * ky))) * kz)) Tkc = (lambda kx, ky, kz: (((1 / sqrt6) * P0) * (kx - (1j * ky)))) Oe = (lambda exx, eyy, ezz: (ac * ((exx + eyy) + ezz))) Pe = (lambda exx, eyy, ezz: ((- av) * ((exx + eyy) + ezz))) Qe = (lambda exx, eyy, ezz: (((- b) / 2) * ((exx + eyy) - (2 * ezz)))) O = (lambda kx, ky, kz, exx, eyy, ezz: (Ok(kx, ky, kz) + Oe(exx, eyy, ezz))) P = (lambda kx, ky, kz, exx, eyy, ezz: (Pk(kx, ky, kz) + Pe(exx, eyy, ezz))) Q = (lambda kx, ky, kz, exx, eyy, ezz: (Qk(kx, ky, kz) + Qe(exx, eyy, ezz))) R = (lambda kx, ky, kz, exx, eyy, ezz: Rk(kx, ky, kz)) S = (lambda kx, ky, kz, exx, eyy, ezz: Sk(kx, ky, kz)) T = (lambda kx, ky, kz, exx, eyy, ezz: Tk(kx, ky, kz)) U = (lambda kx, ky, kz, exx, eyy, ezz: Uk(kx, ky, kz)) Rc = (lambda kx, ky, kz, exx, eyy, ezz: Rkc(kx, ky, kz)) Sc = (lambda kx, ky, kz, exx, eyy, ezz: Skc(kx, ky, kz)) Tc = (lambda kx, ky, kz, exx, eyy, ezz: Tkc(kx, ky, kz)) Ecb = (lambda kx, ky, kz, exx, eyy, ezz: (Ec0 + O(kx, ky, kz, exx, eyy, ezz))) Ehh = (lambda kx, ky, kz, exx, eyy, ezz: (Ev0 - (P(kx, ky, kz, exx, eyy, ezz) + Q(kx, ky, kz, exx, eyy, ezz)))) Elh = (lambda kx, ky, kz, exx, eyy, ezz: (Ev0 - (P(kx, ky, kz, exx, eyy, ezz) - Q(kx, ky, kz, exx, eyy, ezz)))) Eso = (lambda kx, ky, kz, exx, eyy, ezz: (Ev0 - (P(kx, ky, kz, exx, eyy, ezz) + Delta))) eyy = exx p = P(kx, ky, kz, exx, eyy, ezz) q = Q(kx, ky, kz, exx, eyy, ezz) r = R(kx, ky, kz, exx, eyy, ezz) s = S(kx, ky, kz, exx, eyy, ezz) t = T(kx, ky, kz, exx, eyy, ezz) u = U(kx, ky, kz, exx, eyy, ezz) rc = Rc(kx, ky, kz, exx, eyy, ezz) sc = Sc(kx, ky, kz, exx, eyy, ezz) tc = Tc(kx, ky, kz, exx, eyy, ezz) cb = Ecb(kx, ky, kz, exx, eyy, ezz) hh = Ehh(kx, ky, kz, exx, eyy, ezz) lh = Elh(kx, ky, kz, exx, eyy, ezz) so = Eso(kx, ky, kz, exx, eyy, ezz) H_ST = np.mat([[cb, ((- sqrt3) * t), (sqrt2 * u), (- u), 0, 0, (- tc), ((- sqrt2) * tc)], [((- sqrt3) * tc), hh, (sqrt2 * s), (- s), 0, 0, (- r), ((- sqrt2) * r)], [(sqrt2 * u), (sqrt2 * sc), lh, ((- sqrt2) * q), tc, r, 0, (sqrt3 * s)], [(- u), (- sc), ((- sqrt2) * q), so, (sqrt2 * tc), (sqrt2 * r), ((- sqrt3) * s), 0], [0, 0, t, (sqrt2 * t), cb, ((- sqrt3) * tc), (sqrt2 * u), (- u)], [0, 0, rc, (sqrt2 * rc), ((- sqrt3) * t), hh, (sqrt2 * sc), (- sc)], [(- t), (- rc), 0, ((- sqrt3) * sc), (sqrt2 * u), (sqrt2 * s), lh, ((- sqrt2) * q)], [((- sqrt2) * t), ((- sqrt2) * rc), (sqrt3 * sc), 0, (- u), (- s), ((- sqrt2) * q), so]]) H_ST = H_ST.transpose() (E, Psi) = eig(H_ST) bands = np.array(sorted(E.real)) return bands
class IdentityObservationsData(ground_truth_data.GroundTruthData): def num_factors(self): return 10 def observation_shape(self): return 10 def factors_num_values(self): return ([1] * 10) def sample_factors(self, num, random_state): return random_state.random_integers(10, size=(num, self.num_factors)) def sample_observations_from_factors(self, factors, random_state): return factors
class ISC(EarthquakeCatalog): def __init__(self, catalog=None): self.events = {} def flush(self): self.events = {} def append_time_params(self, a, time_range): (date_start_s, tstart_s) = util.time_to_str(time_range[0], format='%Y-%m-%d %H:%M:%S').split() (date_end_s, tend_s) = util.time_to_str(time_range[1], format='%Y-%m-%d %H:%M:%S').split() date_start_s = date_start_s.split('-') date_end_s = date_end_s.split('-') a(('start_year=%s' % date_start_s[0])) a(('start_month=%s' % date_start_s[1])) a(('start_day=%s' % date_start_s[2])) a(('start_time=%s' % tstart_s)) a(('end_year=%s' % date_end_s[0])) a(('end_month=%s' % date_end_s[1])) a(('end_day=%s' % date_end_s[2])) a(('end_time=%s' % tend_s)) def iter_event_names(self, time_range=None, magmin=None, magmax=None, latmin=(- 90.0), latmax=90.0, lonmin=(- 180.0), lonmax=180.0): p = [] a = p.append a('out_format=CATQuakeML') a('request=REVIEWED') a('searchshape=RECT') self.append_time_params(a, time_range) if magmin: a(('min_mag=%g' % magmin)) if magmax: a(('max_mag=%g' % magmax)) a(('bot_lat=%g' % latmin)) a(('top_lat=%g' % latmax)) a(('left_lon=%g' % lonmin)) a(('right_lon=%g' % lonmax)) url = (' + '&'.join(p)) logger.debug(('Opening URL: %s' % url)) page = urlopen(url).read().decode() logger.debug(('Received page (%i bytes)' % len(page))) if ('The search could not be run due to problems' in page): logger.warning(('%s\nurl: %s' % (page, url))) return elif ('No events were found.' in page): logger.info('No events were found.') events = [] else: try: data = quakeml.QuakeML.load_xml(string=page) except Exception: if (page[:500].find('Please try again in a few minutes') != (- 1)): raise ISCBlocked(((((('Apparently, we have queried ISC too eagerly:\n' + ('-' * 79)) + '\n') + page) + '\n') + ('-' * 79))) else: raise ISCError(((((("Couldn't parse XML results from ISC:\n" + ('-' * 79)) + '\n') + page) + '\n') + ('-' * 79))) events = data.get_pyrocko_events() for ev in events: self.events[ev.name] = ev for ev in events: if ((time_range[0] <= ev.time) and (ev.time <= time_range[1])): (yield ev.name) def get_event(self, name): if (name not in self.events): t = self._name_to_date(name) for name2 in self.iter_event_names(time_range=((t - ((24 * 60) * 60)), (t + ((24 * 60) * 60)))): if (name2 == name): break return self.events[name] def get_phase_markers(self, time_range, station_codes, phases): p = [] a = p.append a('out_format=QuakeML') a('request=STNARRIVALS') if (station_codes == 'global'): a('stnsearch=GLOBAL') else: a('stnsearch=STN') a(('sta_list=%s' % ','.join(station_codes))) a(('phaselist=%s' % ','.join(phases))) self.append_time_params(a, time_range) url = (' + '&'.join(p)) logger.debug(('Opening URL: %s' % url)) page = urlopen(url) page = page.read().decode() if ('No stations were found.' in page): logger.info('No stations were found.') return [] logger.debug(('Received page (%i bytes)' % len(page))) data = quakeml.QuakeML.load_xml(string=page) markers = data.get_pyrocko_phase_markers() markers = self.replace_isc_codes(markers) return markers def replace_isc_codes(self, markers): for m in markers: new_nslc_ids = [] for (n, s, l_, c) in m.get_nslc_ids(): l_ = l_.replace('--', '') c = c.replace('???', '*') new_nslc_ids.append((n, s, l_, c)) m.nslc_ids = new_nslc_ids return markers def _name_to_date(self, name): ds = name[(- 23):] t = util.str_to_time(ds, format='%Y-%m-%d_%H-%M-%S.3FRAC') return t
def _warn_incorrect_binary_bitness(exe_name): if (os.path.isabs(exe_name) and os.path.isfile(exe_name) and handleprops.is64bitbinary(exe_name) and (not is_x64_Python())): warnings.warn('64-bit binary from 32-bit Python may work incorrectly (please use 64-bit Python instead)', UserWarning, stacklevel=2)
class TBRangeCharacter(DefaultCharacter): def at_object_creation(self): self.db.max_hp = 100 self.db.hp = self.db.max_hp def at_before_move(self, destination): if is_in_combat(self): self.msg("You can't exit a room while in combat!") return False if (self.db.HP <= 0): self.msg("You can't move, you've been defeated!") return False return True
def test_box_on_line(): box1 = [0, 0, 1, 0, 1, 1, 0, 1] box2 = [2, 0.5, 3, 0.5, 3, 1.5, 2, 1.5] box3 = [4, 0.8, 5, 0.8, 5, 1.8, 4, 1.8] assert is_on_same_line(box1, box2, 0.5) assert (not is_on_same_line(box1, box3, 0.5)) box4 = [0, 0, 1, 1, 1, 2, 0, 1] box5 = [2, 1.5, 3, 1.5, 3, 2.5, 2, 2.5] box6 = [2, 1.6, 3, 1.6, 3, 2.6, 2, 2.6] assert is_on_same_line(box4, box5, 0.5) assert (not is_on_same_line(box4, box6, 0.5))
_fixtures(WebFixture, PopupAFixture) def test_customising_dialog_buttons(web_fixture, popup_a_fixture): class PopupTestPanel(Div): def __init__(self, view): super().__init__(view) popup_a = self.add_child(PopupA(view, view.as_bookmark(), '#contents')) popup_a.add_js_button('Butt1') popup_a.add_js_button('Butt2') popup_contents = self.add_child(P(view, text='this is the content of the popup')) popup_contents.set_id('contents') wsgi_app = web_fixture.new_wsgi_app(enable_js=True, child_factory=PopupTestPanel.factory()) web_fixture.reahl_server.set_app(wsgi_app) browser = web_fixture.driver_browser button1_xpath = XPath.button_labelled('Butt1') button2_xpath = XPath.button_labelled('Butt2') browser.open('/') browser.click(XPath.link().with_text('Home page')) browser.wait_for(popup_a_fixture.is_popped_up) assert browser.is_element_present(button1_xpath) assert browser.is_element_present(button2_xpath)
def get_payer_channel(channelidentifiers_to_channels: Dict[(ChannelID, NettingChannelState)], transfer_pair: MediationPairState) -> Optional[NettingChannelState]: payer_channel_identifier = transfer_pair.payer_transfer.balance_proof.channel_identifier return channelidentifiers_to_channels.get(payer_channel_identifier)
def test_vf_row_ground_2d(test_system_fixed_tilt): (ts, _, _) = test_system_fixed_tilt vf = utils.vf_row_ground_2d(ts['surface_tilt'], ts['gcr'], 0.0) expected = (0.5 * (1.0 - cosd(ts['surface_tilt']))) assert np.isclose(vf, expected) fx = np.array([0.0, 0.5, 1.0]) vf = utils.vf_row_ground_2d(ts['surface_tilt'], ts['gcr'], fx) phi = ground_angle(ts['surface_tilt'], ts['gcr'], fx) expected = (0.5 * (1 - cosd((phi - ts['surface_tilt'])))) assert np.allclose(vf, expected)
def test_cof_list_input(): with pytest.raises(Call) as err: cof_func(name='blah', instruction_type=Call, context=Context({'key': ['b', 'c']}), context_key='key') cof = err.value assert isinstance(cof, Call) assert (cof.groups == ['b', 'c']) assert (not cof.success_group) assert (not cof.failure_group) assert (cof.original_config == ('key', ['b', 'c']))
class PointCloudField(): def __init__(self, file_name): self.file_name = file_name def load(self, model_path): file_path = os.path.join(model_path, self.file_name) pointcloud_dict = np.load(file_path) points = pointcloud_dict['points'].astype(np.float32) data = {'cloud': points} return data
.skipif((sys.version_info[0] < 3), reason='Python 3+ required for timezone support') def test_flexible_datetime_with_timezone_that_has_colons(): from datetime import timezone r = parse.parse('{dt:%Y-%m-%d %H:%M:%S %z}', '2023-11-21 13:23:27 +00:00:00') assert (r.named['dt'] == datetime(2023, 11, 21, 13, 23, 27, tzinfo=timezone.utc))
def create_qr_from_map(design, url, mode, error): (bits, version) = get_raw_qr_data(design, error, mode) string = (bitstring_to_bin(bits) if (mode == 'binary') else bitstring_to_alphanumeric(bits)) with_url = ((url + '/') + string[(len(url) + 1):]) qr = pyqrcode.create(with_url, error=error, mode=mode, version=version) return qr
def convert_xlnet_checkpoint_to_pytorch(tf_checkpoint_path, bert_config_file, pytorch_dump_folder_path, finetuning_task=None): config = XLNetConfig.from_json_file(bert_config_file) finetuning_task = (finetuning_task.lower() if (finetuning_task is not None) else '') if (finetuning_task in GLUE_TASKS_NUM_LABELS): print(f'Building PyTorch XLNetForSequenceClassification model from configuration: {config}') config.finetuning_task = finetuning_task config.num_labels = GLUE_TASKS_NUM_LABELS[finetuning_task] model = XLNetForSequenceClassification(config) elif ('squad' in finetuning_task): config.finetuning_task = finetuning_task model = XLNetForQuestionAnswering(config) else: model = XLNetLMHeadModel(config) load_tf_weights_in_xlnet(model, config, tf_checkpoint_path) pytorch_weights_dump_path = os.path.join(pytorch_dump_folder_path, WEIGHTS_NAME) pytorch_config_dump_path = os.path.join(pytorch_dump_folder_path, CONFIG_NAME) print(f'Save PyTorch model to {os.path.abspath(pytorch_weights_dump_path)}') torch.save(model.state_dict(), pytorch_weights_dump_path) print(f'Save configuration file to {os.path.abspath(pytorch_config_dump_path)}') with open(pytorch_config_dump_path, 'w', encoding='utf-8') as f: f.write(config.to_json_string())
def draw_sample(font_file): HEIGHT = 500 WIDTH = 800 background = Image.new('RGBA', (WIDTH, HEIGHT), ImageColor.getrgb('white')) foreground = Image.new('RGBA', (WIDTH, HEIGHT), (255, 255, 255, 0)) draw_b = ImageDraw.Draw(background) draw_f = ImageDraw.Draw(foreground) label_font = ImageFont.truetype(font_file, size=15) y = 0 for size in range(15, 55, 5): sample_font = ImageFont.truetype(font_file, size=size) offset = (size * 0.7) y += offset draw_b.line(((0, (y + (size * 0.2))), (WIDTH, (y + (size * 0.2)))), LINE_COLOR, 1) draw_b.line(((0, (y + offset)), (WIDTH, (y + offset))), LINE_COLOR, 1) draw_f.text((0, y), str(size), TEXT_COLOR, font=label_font) draw_f.text((20, y), SAMPLE_TEXT, TEXT_COLOR, font=sample_font) draw_f.text((20, 480), font_file, TEXT_COLOR, font=label_font) return Image.alpha_composite(background, foreground)
class Repo(common.Common, unittest.TestCase): def test_full(self): self.run_test('test/demoapp', False, '.', False) def test_script_only(self): self.run_test('test/demoapp-script-only', True, '.', False) def test_project_in_subdir(self): self.run_test('test/demoapp', False, 'project', False) def test_no_tag_prefix(self): self.run_test('test/demoapp', False, '.', False, tag_prefix='') def test_pyproject(self): self.run_test('test/demoapp-pyproject', False, '.', True) def run_test(self, demoapp_dir, script_only, project_sub_dir, pep518, tag_prefix=None): self.testdir = tempfile.mkdtemp() if VERBOSE: print(('testdir: %s' % (self.testdir,))) if os.path.exists(self.testdir): self.rmtree(self.testdir) self.gitdir = os.path.join(self.testdir, 'demoapp') self.project_sub_dir = project_sub_dir self.projdir = os.path.join(self.testdir, self.gitdir, self.project_sub_dir) self.extra_git_dir = os.path.join(self.testdir, 'extra_git') os.mkdir(self.testdir) os.mkdir(self.extra_git_dir) self.git('init', workdir=self.extra_git_dir) shutil.copytree(demoapp_dir, self.projdir) setup_cfg_fn = self.project_file('setup.cfg') if os.path.exists(setup_cfg_fn): with open(setup_cfg_fn, 'r') as f: setup_cfg = f.read() setup_cfg = setup_cfg.replace('', 'git') tag_prefix_regex = 'tag_prefix = (.*)' if (tag_prefix is None): tag_prefix = re.search(tag_prefix_regex, setup_cfg).group(1) else: setup_cfg = re.sub(tag_prefix_regex, f'tag_prefix = {tag_prefix}', setup_cfg) with open(setup_cfg_fn, 'w') as f: f.write(setup_cfg) if pep518: pyproject_path = Path(self.project_file('pyproject.toml')) versioneer_source_root = Path(__file__).absolute().parent.parent.parent vsr = str(versioneer_source_root).replace('\\', '/') pyproject_toml = pyproject_path.read_text() pyproject_toml = pyproject_toml.replace('', f'file://{vsr}') pyproject_toml = pyproject_toml.replace('', 'git') tag_prefix_regex = 'tag_prefix = "(.*)"' if (tag_prefix is None): tag_prefix = re.search(tag_prefix_regex, pyproject_toml).group(1) else: pyproject_toml = re.sub(tag_prefix_regex, f'tag_prefix = "{tag_prefix}"', pyproject_toml) pyproject_path.write_text(pyproject_toml) else: shutil.copyfile('versioneer.py', self.project_file('versioneer.py')) self.git('init') self.git('add', '--all') self.git('commit', '-m', 'comment') full = self.git('rev-parse', 'HEAD') v = self.python('setup.py', '--version') self.assertEqual(v, ('0+untagged.1.g%s' % full[:7])) v = self.python(self.project_file('setup.py'), '--version', workdir=self.testdir) self.assertEqual(v, ('0+untagged.1.g%s' % full[:7])) if pep518: out = self.python('-m', 'versioneer', 'install', '--no-vendor').splitlines() else: out = self.python('versioneer.py', 'setup').splitlines() self.assertEqual(out[0], 'creating src/demo/_version.py') init = os.path.join('src/demo', '__init__.py') if script_only: self.assertEqual(out[1], f" {init} doesn't exist, ok") else: self.assertEqual(out[1], f' appending to {init}') def remove_pyc(s): return [f for f in s if (not (f.startswith('?? ') and (f.endswith('.pyc') or f.endswith('__pycache__/'))))] out = set(remove_pyc(self.git('status', '--porcelain').splitlines())) def pf(fn): return posixpath.normpath(posixpath.join(self.project_sub_dir, fn)) expected = {('A %s' % pf('.gitattributes')), ('A %s' % pf('src/demo/_version.py'))} if (not script_only): expected.add(('M %s' % pf('src/demo/__init__.py'))) self.assertEqual(out, expected) if (not script_only): with open(self.project_file('src/demo/__init__.py')) as fobj: i = fobj.read().splitlines() self.assertEqual(i[(- 2)], 'from . import _version') self.assertEqual(i[(- 1)], "__version__ = _version.get_versions()['version']") self.git('commit', '-m', 'add _version stuff') if pep518: out = self.python('-m', 'versioneer', 'install', '--no-vendor').splitlines() else: out = self.python('versioneer.py', 'setup').splitlines() self.assertEqual(out[0], 'creating src/demo/_version.py') if script_only: self.assertEqual(out[1], f" {init} doesn't exist, ok") else: self.assertEqual(out[1], f' {init} unmodified') out = set(remove_pyc(self.git('status', '--porcelain').splitlines())) self.assertEqual(out, set()) UNABLE = 'unable to compute version' NOTAG = 'no suitable tags' full = self.git('rev-parse', 'HEAD') short = ('0+untagged.2.g%s' % full[:7]) self.do_checks('S1', {'TA': [short, full, False, None], 'TB': ['0+unknown', None, None, UNABLE], 'TC': [short, full, False, None], 'TD': ['0+unknown', full, False, NOTAG], 'TE': [short, full, False, None], 'TF': [short, full, False, None]}) with open(self.project_file('setup.py'), 'a') as fobj: fobj.write('# dirty\n') full = self.git('rev-parse', 'HEAD') short = ('0+untagged.2.g%s.dirty' % full[:7]) self.do_checks('S2', {'TA': [short, full, True, None], 'TB': ['0+unknown', None, None, UNABLE], 'TC': [short, full, True, None], 'TD': ['0+unknown', full, False, NOTAG], 'TE': [short, full, True, None], 'TF': [short, full, True, None]}) self.git('add', self.project_file('setup.py')) self.git('commit', '-m', 'dirty') self.git('tag', f'{tag_prefix}1.0') self.git('tag', 'aaa-999') full = self.git('rev-parse', 'HEAD') short = '1.0' if VERBOSE: print(('FULL %s' % full)) self.do_checks('S3', {'TA': [short, full, False, None], 'TB': ['0+unknown', None, None, UNABLE], 'TC': [short, full, False, None], 'TD': [short, full, False, None], 'TE': [short, full, False, None], 'TF': [short, full, False, None]}) with open(self.project_file('setup.py'), 'a') as fobj: fobj.write('# dirty\n') full = self.git('rev-parse', 'HEAD') short = ('1.0+0.g%s.dirty' % full[:7]) self.do_checks('S4', {'TA': [short, full, True, None], 'TB': ['0+unknown', None, None, UNABLE], 'TC': [short, full, True, None], 'TD': ['1.0', full, False, None], 'TE': [short, full, True, None], 'TF': [short, full, True, None]}) self.git('add', self.project_file('setup.py')) self.git('commit', '-m', 'dirty') full = self.git('rev-parse', 'HEAD') short = ('1.0+1.g%s' % full[:7]) self.do_checks('S5', {'TA': [short, full, False, None], 'TB': ['0+unknown', None, None, UNABLE], 'TC': [short, full, False, None], 'TD': ['0+unknown', full, False, NOTAG], 'TE': [short, full, False, None], 'TF': [short, full, False, None]}) with open(self.project_file('setup.py'), 'a') as fobj: fobj.write('# more dirty\n') full = self.git('rev-parse', 'HEAD') short = ('1.0+1.g%s.dirty' % full[:7]) self.do_checks('S6', {'TA': [short, full, True, None], 'TB': ['0+unknown', None, None, UNABLE], 'TC': [short, full, True, None], 'TD': ['0+unknown', full, False, NOTAG], 'TE': [short, full, True, None], 'TF': [short, full, True, None]}) def do_checks(self, state, exps): if os.path.exists(self.subpath('out')): self.rmtree(self.subpath('out')) self.check_version(self.projdir, state, 'TA', exps['TA']) GIT_DIR = os.path.join(self.extra_git_dir, '.git') with mock.patch.dict(os.environ, {'GIT_DIR': GIT_DIR}): self.check_version(self.projdir, state, 'TA', exps['TA']) target = self.subpath('out/demoapp-TB') shutil.copytree(self.projdir, target) if os.path.exists(os.path.join(target, '.git')): self.rmtree(os.path.join(target, '.git')) self.check_version(target, state, 'TB', exps['TB']) target = self.subpath('out/demo-1.1') shutil.copytree(self.projdir, target) if os.path.exists(os.path.join(target, '.git')): self.rmtree(os.path.join(target, '.git')) self.check_version(target, state, 'TC', ['1.1', None, False, None]) target = self.subpath('out/TD/demoapp-TD') self.git('archive', '--format=tar', '--prefix=demoapp-TD/', '--output=../demo.tar', 'HEAD') os.mkdir(self.subpath('out/TD')) with tarfile.TarFile(self.subpath('demo.tar')) as t: t.extractall(path=self.subpath('out/TD')) self.check_version(os.path.join(target, self.project_sub_dir), state, 'TD', exps['TD']) dist_path = os.path.join(self.projdir, 'dist') if os.path.exists(dist_path): self.rmtree(dist_path) self.python('setup.py', 'sdist', '--formats=tar') files = os.listdir(dist_path) self.assertTrue((len(files) == 1), files) distfile = files[0] self.assertEqual(distfile, ('demo-%s.tar' % exps['TE'][0])) fn = os.path.join(dist_path, distfile) os.mkdir(self.subpath('out/TE')) with tarfile.TarFile(fn) as t: t.extractall(path=self.subpath('out/TE')) target = self.subpath(('out/TE/demo-%s' % exps['TE'][0])) self.assertTrue(os.path.isdir(target)) self.check_version(target, state, 'TE', exps['TE']) pyproject_path = (Path(self.projdir) / 'pyproject.toml') if (not pyproject_path.exists()): return dist_path = (Path(self.projdir) / 'dist') if dist_path.exists(): self.rmtree(dist_path) self.python('-m', 'build', '--sdist', '--no-isolation') files = os.listdir(dist_path) self.assertTrue((len(files) == 1), files) distfile = files[0] self.assertEqual(distfile, ('demo-%s.tar.gz' % exps['TF'][0])) fn = os.path.join(dist_path, distfile) os.mkdir(self.subpath('out/TF')) with tarfile.open(fn) as t: t.extractall(path=self.subpath('out/TF')) target = self.subpath(('out/TF/demo-%s' % exps['TF'][0])) self.assertTrue(os.path.isdir(target)) self.check_version(target, state, 'TF', exps['TF']) def check_version(self, workdir, state, tree, exps): (exp_version, exp_full, exp_dirty, exp_error) = exps if VERBOSE: print(('== starting %s %s' % (state, tree))) if VERBOSE: print(self.python('setup.py', 'version', workdir=workdir)) v = self.python('setup.py', '--version', workdir=workdir) self.compare(v, exp_version, state, tree, 'RA1') self.assertPEP440(v, state, tree, 'RA1') v = self.python(os.path.join(workdir, 'setup.py'), '--version', workdir=self.testdir) self.compare(v, exp_version, state, tree, 'RA2') self.assertPEP440(v, state, tree, 'RA2') if os.path.exists(os.path.join(workdir, 'build')): self.rmtree(os.path.join(workdir, 'build')) self.python('setup.py', 'build', '--build-lib=build/lib', '--build-scripts=build/lib', workdir=workdir) build_lib = os.path.join(workdir, 'build', 'lib') out = self.python('rundemo', '--version', workdir=build_lib) data = dict((line.split(':', 1) for line in out.splitlines())) self.compare(data['__version__'], exp_version, state, tree, 'RB') self.assertPEP440(data['__version__'], state, tree, 'RB') self.compare(data['version'], exp_version, state, tree, 'RB') self.compare(data['dirty'], str(exp_dirty), state, tree, 'RB') self.compare(data['full-revisionid'], str(exp_full), state, tree, 'RB') self.compare(data['error'], str(exp_error), state, tree, 'RB') def compare(self, got, expected, state, tree, runtime): where = '/'.join([state, tree, runtime]) self.assertEqual(got, expected, ("%s: got '%s' != expected '%s'" % (where, got, expected))) if VERBOSE: print((' good %s' % where)) def assertPEP440(self, got, state, tree, runtime): where = '/'.join([state, tree, runtime]) pv = parse_version(got) self.assertFalse(('Legacy' in pv.__class__.__name__), ("%s: '%s' was not pep440-compatible" % (where, got))) self.assertEqual(str(pv), got, ("%s: '%s' pep440-normalized to '%s'" % (where, got, str(pv))))
def _click_through_rate_input_check(input: torch.Tensor, weights: Union[(torch.Tensor, float, int)], *, num_tasks: int) -> None: if ((input.ndim != 1) and (input.ndim != 2)): raise ValueError(f'`input` should be a one or two dimensional tensor, got shape {input.shape}.') if (isinstance(weights, torch.Tensor) and (weights.shape != input.shape)): raise ValueError(f'tensor `weights` should have the same shape as tensor `input`, got shapes {weights.shape} and {input.shape}, respectively.') if (num_tasks == 1): if (len(input.shape) > 1): raise ValueError(f'`num_tasks = 1`, `input` is expected to be one-dimensional tensor, but got shape ({input.shape}).') elif ((len(input.shape) == 1) or (input.shape[0] != num_tasks)): raise ValueError(f"`num_tasks = {num_tasks}`, `input`'s shape is expected to be ({num_tasks}, num_samples), but got shape ({input.shape}).")
def convert_batchnorm_parameters(model: torch.nn.Module, bn: Union[(torch.nn.BatchNorm1d, torch.nn.BatchNorm2d)]): with utils.in_eval_mode(model), torch.no_grad(): gamma = bn.weight beta = bn.bias running_mean = bn.running_mean inv_sigma = torch.rsqrt((bn.running_var + bn.eps)) weight = (gamma * inv_sigma) bias = (beta - (running_mean * weight)) bn.eps = 0 bn.track_running_stats = False bn.weight.copy_(weight.clone().detach()) bn.bias.copy_(bias.clone().detach()) bn.running_mean = torch.zeros(bn.running_mean.shape, device=bn.running_mean.device, dtype=bn.running_mean.dtype) bn.running_var = torch.ones(bn.running_var.shape, device=bn.running_var.device, dtype=bn.running_var.dtype)
.fast def test_spectrum_get_methods(verbose=True, plot=True, close_plots=True, *args, **kwargs): from radis.test.utils import getTestFile from radis.tools.database import load_spec from radis.tools.slit import get_FWHM if (plot and close_plots): import matplotlib.pyplot as plt plt.close('all') s = load_spec(getTestFile('N2C_specair_380nm.spec'), binary=False) if verbose: print(s) dir(s) assert (s.get_name() == 'N2C_specair_380nm') assert all((s.get_radiance_noslit(Iunit='W/m2/sr/nm') == s.get('radiance_noslit', Iunit='W/m2/sr/nm')[1])) assert all((nm2cm(s.get_wavelength(medium='vacuum')) == s.get_wavenumber())) assert np.isclose(s.get_power(unit='W/cm2/sr'), 2632.) assert (s.get_waveunit() == 'nm') assert np.isclose(s.get_power(unit='W/cm2/sr'), s.get_integral('radiance_noslit', wunit='nm_vac', Iunit='W/cm2/sr/nm')) assert (s.get_conditions()['Tgas'] == 1500) assert (len(s.get_vars()) == 2) assert (s.is_at_equilibrium() == False) assert (s.is_optically_thin() == False) s.apply_slit(0.5) (wslit, Islit) = s.get_slit() wstep = np.diff(wslit)[0] assert np.isclose(get_FWHM(*s.get_slit()), 0.5, atol=(1.1 * wstep)) if plot: s.plot_slit() if verbose: print('Tested Spectrum methods:') print('...print(Spectrum)') print('.. get_name()') print('.. get_radiance_noslit() vs get()') print('.. get_wavelength() vs get_wavenumber') print('.. get_power()') print('.. get_waveunit()') print('.. get_power() vs get_integral()') print('.. get_conditions()') print('.. get_vars()') print('.. is_at_equilibrium()') print('.. is_optically_thin()') print('.. get_slit()')
class Edges(): def __init__(self): self.edges = [] def e(self, source, target, label, color, italicize=False, weight=1): if italicize: quoted_label = f'<<i>{label}</i>>' else: quoted_label = f'<{label}>' self.edges.append(f'''{source} -> {target} [ label={quoted_label}, color="{color}", fontcolor="{color}", weight={weight}, ] ''') def write(self, f): self.edges.sort() f.write(''.join(self.edges))
def sum_regularizer(regularizer_list, scope=None): regularizer_list = [reg for reg in regularizer_list if (reg is not None)] if (not regularizer_list): return None def sum_reg(weights): with ops.name_scope(scope, 'sum_regularizer', [weights]) as name: regularizer_tensors = [] for reg in regularizer_list: tensor = reg(weights) if (tensor is not None): regularizer_tensors.append(tensor) return (math_ops.add_n(regularizer_tensors, name=name) if regularizer_tensors else None) return sum_reg
def read_config(filename, fail): devices = [] if os.path.exists((pypilot_dir + filename)): try: f = open((pypilot_dir + filename), 'r') while True: device = f.readline() if (not device): break devices.append(device.strip()) f.close() return devices except Exception as e: print(_('error reading'), (pypilot_dir + filename)) return fail
class PresetStartingArea(PresetTab, Ui_PresetStartingArea, NodeListHelper): starting_area_quick_fill_default: QtWidgets.QPushButton _starting_location_for_region: dict[(str, QtWidgets.QCheckBox)] _starting_location_for_area: dict[(AreaIdentifier, QtWidgets.QCheckBox)] _starting_location_for_node: dict[(NodeIdentifier, QtWidgets.QCheckBox)] _num_quick_fill_buttons: int def __init__(self, editor: PresetEditor, game_description: GameDescription, window_manager: WindowManager): super().__init__(editor, game_description, window_manager) self.setupUi(self) self.starting_area_layout.setAlignment(QtCore.Qt.AlignmentFlag.AlignTop) (self._starting_location_for_region, self._starting_location_for_area, self._starting_location_for_node) = self.create_node_list_selection(self.starting_locations_contents, self.starting_locations_layout, StartingLocationList.nodes_list(self.game_description.game), self._on_starting_area_check_changed) desc = self.startingarea_description.text().format(quick_fill_text=self.quick_fill_description) self.startingarea_description.setText(desc) self._num_quick_fill_buttons = 0 self.create_quick_fill_buttons() def create_quick_fill_buttons(self): self.starting_area_quick_fill_default = self._quick_fill_button('Default', self._starting_location_on_select_default) def _quick_fill_button(self, text: str, connection: Callable[([PresetStartingArea], None)]) -> QtWidgets.QPushButton: self._num_quick_fill_buttons += 1 button = QtWidgets.QPushButton(text) self.starting_area_quick_fill_layout.addWidget(button) button.clicked.connect(connection) return button def tab_title(cls) -> str: return 'Starting Area' def uses_patches_tab(cls) -> bool: return True def game_enum(self) -> RandovaniaGame: return self.game_description.game def quick_fill_description(self) -> str: default_name = self.game_description.region_list.correct_area_identifier_name(self.game_description.starting_location) return f'Default: Just {default_name}, the vanilla location.' def _on_starting_area_check_changed(self, areas, checked: bool): with self._editor as editor: editor.set_configuration_field('starting_location', editor.configuration.starting_location.ensure_has_locations(areas, checked)) def _starting_location_on_select_default(self): with self._editor as editor: editor.set_configuration_field('starting_location', editor.configuration.starting_location.with_elements([self.game_description.starting_location], self.game_enum)) def on_preset_changed(self, preset: Preset): self.update_node_list(preset.configuration.starting_location.locations, False, self._starting_location_for_region, self._starting_location_for_area, self._starting_location_for_node)
_auth def pull_asset(request): if (request.method == 'POST'): test_auth = request.POST.get('test_auth') conf_ids = request.POST.get('conf_ids') if test_auth: access_id = request.POST.get('access_id') access_key = request.POST.get('access_key') cloud_region = request.POST.get('cloud_region') ali = AliAPI(access_id, access_key, cloud_region) error_msg = ali.test_auth() return JsonResponse({'code': 200, 'msg': error_msg}) if conf_ids: conf_ids = literal_eval(conf_ids) for conf_id in conf_ids: conf_obj = PullAssetConf.objects.get(id=conf_id) ali = AliAPI(conf_obj.access_id, conf_obj.access_key, conf_obj.cloud_region) try: ali.sync_to_cmdb(conf_obj) except Exception as e: return JsonResponse({'code': 500, 'msg': f'!{e}'}) return JsonResponse({'code': 200, 'msg': '!'}) cloud_names = PullAssetConf.cloud_names pull_asset_confs = PullAssetConf.objects.all() users = UserProfile.objects.values_list('id', 'username') return render(request, 'assets/pull_asset.html', locals())
class CatalogNestedSerializer(CatalogSerializer): elements = serializers.SerializerMethodField() class Meta(CatalogSerializer.Meta): fields = (*CatalogSerializer.Meta.fields, 'elements') def get_elements(self, obj): for element in obj.elements: (yield SectionNestedSerializer(element, context=self.context).data)
def test_L3_ifc_view_index(): a = CaseArrayBits32IfcInComp.DUT() a.elaborate() a.apply(StructuralRTLIRGenL3Pass(gen_connections(a))) connections = a.get_metadata(StructuralRTLIRGenL2Pass.connections) comp = CurComp(a, 's') assert (connections == [(InterfaceAttr(InterfaceViewIndex(CurCompAttr(comp, 'in_'), 1), 'foo'), CurCompAttr(comp, 'out'))])
def test_token_network_registry_max_token_networks(deploy_client, token_network_registry_address, contract_manager): proxy_manager = ProxyManager(rpc_client=deploy_client, contract_manager=contract_manager, metadata=ProxyManagerMetadata(token_network_registry_deployed_at=GENESIS_BLOCK_NUMBER, filters_start_at=GENESIS_BLOCK_NUMBER)) confirmed_block_identifier = deploy_client.get_confirmed_blockhash() token_network_registry_proxy = proxy_manager.token_network_registry(to_canonical_address(token_network_registry_address), block_identifier=confirmed_block_identifier) assert (token_network_registry_proxy.get_max_token_networks(block_identifier=BLOCK_ID_LATEST) == (UINT256_MAX - 1))
class LRUCache(object): def __init__(self, capacity): self.capacity = capacity self.cache = {} self.queue = [] def updateQueue(self, key): self.queue.remove(key) self.queue.insert(0, key) def get(self, key): if (key in self.cache): self.updateQueue(key) return self.cache[key] else: return (- 1) def put(self, key, value): if (key in self.cache): self.queue.remove(key) elif (len(self.queue) == self.capacity): del self.cache[self.queue.pop((- 1))] self.cache[key] = value self.queue.insert(0, key)
.parametrize('store_graph', [False, True]) def test_tracker_candidate_graph(test_real_objects, store_graph): tracker = full_tracker_example(test_real_objects, store_candidate_graph=store_graph) assert (tracker.store_candidate_graph == store_graph) edges = tracker.candidate_graph_edges() assert (bool(edges) == store_graph), f'Found {len(edges)} edges in candidate graph.'
class Effect2056(BaseEffect): type = 'passive' def handler(fit, skill, context, projectionRange, **kwargs): fit.modules.filteredItemBoost((lambda mod: (mod.item.group.name == 'Shield Resistance Amplifier')), 'thermalDamageResistanceBonus', (skill.getModifiedItemAttr('hardeningBonus') * skill.level), **kwargs)
class TCN_GCN_unit_7(nn.Module): def __init__(self, in_channels, out_channels, A, stride=1, residual=True): super(TCN_GCN_unit_7, self).__init__() self.gcn1 = unit_gtcn_7(in_channels, out_channels, A) self.tcn1 = unit_tcn(out_channels, out_channels, stride=stride) self.relu = nn.ReLU() if (not residual): self.residual = (lambda x: 0) elif ((in_channels == out_channels) and (stride == 1)): self.residual = (lambda x: x) else: self.residual = unit_tcn(in_channels, out_channels, kernel_size=1, stride=stride) def forward(self, x): x = (self.tcn1(self.gcn1(x)) + self.residual(x)) return self.relu(x)
def add_defaults(cfg: DictConfig) -> None: from fairseq.registry import REGISTRIES from fairseq.tasks import TASK_DATACLASS_REGISTRY from fairseq.models import ARCH_MODEL_NAME_REGISTRY, MODEL_DATACLASS_REGISTRY from fairseq.dataclass.utils import merge_with_parent from typing import Any OmegaConf.set_struct(cfg, False) for (k, v) in FairseqConfig.__dataclass_fields__.items(): field_cfg = cfg.get(k) if ((field_cfg is not None) and (v.type == Any)): dc = None if isinstance(field_cfg, str): field_cfg = DictConfig({'_name': field_cfg}) field_cfg.__dict__['_parent'] = field_cfg.__dict__['_parent'] name = field_cfg.get('_name') if (k == 'task'): dc = TASK_DATACLASS_REGISTRY.get(name) elif (k == 'model'): name = ARCH_MODEL_NAME_REGISTRY.get(name, name) dc = MODEL_DATACLASS_REGISTRY.get(name) elif (k in REGISTRIES): dc = REGISTRIES[k]['dataclass_registry'].get(name) if (dc is not None): cfg[k] = merge_with_parent(dc, field_cfg)
class LossFunction(nn.Module): def __init__(self, gpu, init_w=10.0, init_b=(- 5.0), **kwargs): super(LossFunction, self).__init__() self.gpu = gpu self.w = nn.Parameter(torch.tensor(init_w)) self.b = nn.Parameter(torch.tensor(init_b)) self.w.requires_grad = True self.b.requires_grad = True self.cce = nn.CrossEntropyLoss() print('Initialised AngleProto') def forward(self, out_anchor, out_positive): stepsize = out_anchor.size()[0] cos_sim_matrix = F.cosine_similarity(out_positive.unsqueeze((- 1)), out_anchor.unsqueeze((- 1)).transpose(0, 2)) torch.clamp(self.w, 1e-06) cos_sim_matrix = ((cos_sim_matrix * self.w) + self.b) label = torch.from_numpy(np.asarray(range(0, stepsize))).cuda(self.gpu) criterion = self.cce loss = criterion(cos_sim_matrix, label) return loss
def file_based_input_fn_builder(input_file, seq_length, is_training, drop_remainder): name_to_features = {'input_ids': tf.FixedLenFeature([seq_length], tf.int64), 'input_mask': tf.FixedLenFeature([seq_length], tf.int64), 'segment_ids': tf.FixedLenFeature([seq_length], tf.int64), 'label_ids': tf.FixedLenFeature([], tf.int64)} def _decode_record(record, name_to_features): example = tf.parse_single_example(record, name_to_features) for name in list(example.keys()): t = example[name] if (t.dtype == tf.int64): t = tf.to_int32(t) example[name] = t return example def input_fn(params): batch_size = params['batch_size'] d = tf.data.TFRecordDataset(input_file) if is_training: d = d.repeat() d = d.shuffle(buffer_size=100) d = d.apply(tf.contrib.data.map_and_batch((lambda record: _decode_record(record, name_to_features)), batch_size=batch_size, drop_remainder=drop_remainder)) return d return input_fn
class DataModuleFromConfig(pl.LightningDataModule): def __init__(self, batch_size, train=None, validation=None, test=None, predict=None, wrap=False, num_workers=None, shuffle_test_loader=False, use_worker_init_fn=False, shuffle_val_dataloader=False): super().__init__() self.batch_size = batch_size self.dataset_configs = dict() self.num_workers = (num_workers if (num_workers is not None) else (batch_size * 2)) self.use_worker_init_fn = use_worker_init_fn if (train is not None): self.dataset_configs['train'] = train self.train_dataloader = self._train_dataloader if (validation is not None): self.dataset_configs['validation'] = validation self.val_dataloader = partial(self._val_dataloader, shuffle=shuffle_val_dataloader) if (test is not None): self.dataset_configs['test'] = test self.test_dataloader = partial(self._test_dataloader, shuffle=shuffle_test_loader) if (predict is not None): self.dataset_configs['predict'] = predict self.predict_dataloader = self._predict_dataloader self.wrap = wrap def prepare_data(self): for data_cfg in self.dataset_configs.values(): instantiate_from_config(data_cfg) def setup(self, stage=None): self.datasets = dict(((k, instantiate_from_config(self.dataset_configs[k])) for k in self.dataset_configs)) if self.wrap: for k in self.datasets: self.datasets[k] = WrappedDataset(self.datasets[k]) def _train_dataloader(self): is_iterable_dataset = isinstance(self.datasets['train'], Txt2ImgIterableBaseDataset) if (is_iterable_dataset or self.use_worker_init_fn): init_fn = worker_init_fn else: init_fn = None return DataLoader(self.datasets['train'], batch_size=self.batch_size, num_workers=self.num_workers, shuffle=(False if is_iterable_dataset else True), worker_init_fn=init_fn) def _val_dataloader(self, shuffle=False): if (isinstance(self.datasets['validation'], Txt2ImgIterableBaseDataset) or self.use_worker_init_fn): init_fn = worker_init_fn else: init_fn = None return DataLoader(self.datasets['validation'], batch_size=self.batch_size, num_workers=self.num_workers, worker_init_fn=init_fn, shuffle=shuffle) def _test_dataloader(self, shuffle=False): is_iterable_dataset = isinstance(self.datasets['train'], Txt2ImgIterableBaseDataset) if (is_iterable_dataset or self.use_worker_init_fn): init_fn = worker_init_fn else: init_fn = None shuffle = (shuffle and (not is_iterable_dataset)) return DataLoader(self.datasets['test'], batch_size=self.batch_size, num_workers=self.num_workers, worker_init_fn=init_fn, shuffle=shuffle) def _predict_dataloader(self, shuffle=False): if (isinstance(self.datasets['predict'], Txt2ImgIterableBaseDataset) or self.use_worker_init_fn): init_fn = worker_init_fn else: init_fn = None return DataLoader(self.datasets['predict'], batch_size=self.batch_size, num_workers=self.num_workers, worker_init_fn=init_fn)
def main(): logging.basicConfig(level=logging.WARNING) parser = argparse.ArgumentParser() parser.add_argument('path', help='path to file(s) to reserialize') parser.add_argument('-a', '--all', action='store_true', help='reserialize all JSON files under path') args = parser.parse_args() if args.all: (category_paths, video_paths) = get_json_files(args.path) paths = video_paths for path in paths: pull_links_from_file(path) else: pull_links_from_file(args.path)
def get_user_inputs(onnx_input_names, input_info, inputs, kwargs, device): def _expand_inputs(current_input, non_none_inputs): if ((current_input is None) or isinstance(current_input, str)): return if isinstance(current_input, abc.Sequence): for inp in current_input: _expand_inputs(inp, non_none_inputs) elif isinstance(current_input, abc.Mapping): for (_, val) in current_input.items(): _expand_inputs(val, non_none_inputs) else: non_none_inputs.append(current_input) non_none_inputs = [] _expand_inputs(inputs, non_none_inputs) result = [] for (input_idx, name) in enumerate(onnx_input_names): inp = None if ((name in kwargs) and (kwargs[name] is not None)): inp = kwargs[name] if (inp is None): try: if (name != input_info.names[input_idx]): input_idx = input_info.names.index(name) inp = non_none_inputs[input_idx] except (IndexError, ValueError): pass if (inp is not None): if _io._PrimitiveType.is_primitive_type(inp): inp = _io._PrimitiveType.get_tensor(inp, device) result.append(inp) else: raise RuntimeError(f'Input is present in ONNX graph but not provided: {name}.') return result
class strtr(): def st(self): while True: if (system == 'termux'): Ux() print("\x07\n\n\n\n\n\n\n\n\n\x1b[01;32m __ __ ____\n | \\/ |_ _/ ___| ___ _ \x1b[01;31m____ _____ \x1b[01;32m_ __\n | |\\/| | | | \\___ \\ / _ \\ '__\x1b[01;31m\\ \\ / / \x1b[01;32m_ \\ '__|\n | | | | |_| |___) | __/ | \x1b[01;31m\\ V / \x1b[01;32m __/ |\n |_| |_|\\__, |____/ \\___|_| \x1b[01;31m\\_/ \x1b[01;32m\\___|_|\n |___/\n\x1b[00m\n\n\n") sleep(0.7) Ux() print("\x07\n\n\n\n\n\n\n\n\n\x1b[01;33m __ __ ____\n | \\/ |_ _/ ___| ___ _ \x1b[01;31m____ _____ \x1b[01;33m_ __\n | |\\/| | | | \\___ \\ / _ \\ '__\x1b[01;31m\\ \\ / / \x1b[01;33m_ \\ '__|\n | | | | |_| |___) | __/ | \x1b[01;31m\\ V / \x1b[01;33m __/ |\n |_| |_|\\__, |____/ \\___|_| \x1b[01;31m\\_/ \x1b[01;33m\\___|_|\n |___/\n\x1b[00m\n\n\n") sleep(0.1) Ux() print("\x07\n\n\n\n\n\n\n\n\n\x1b[01;32m __ __ ____\n | \\/ |_ _/ ___| ___ _ \x1b[01;31m____ _____ \x1b[01;32m_ __\n | |\\/| | | | \\___ \\ / _ \\ '__\x1b[01;31m\\ \\ / / \x1b[01;32m_ \\ '__|\n | | | | |_| |___) | __/ | \x1b[01;31m\\ V / \x1b[01;32m __/ |\n |_| |_|\\__, |____/ \\___|_| \x1b[01;31m\\_/ \x1b[01;32m\\___|_|\n |___/\n\x1b[00m\n\n\n") sleep(0.5) break else: Ux() print("\x07\n\n\n\n\n\n\n\x1b[01;32m __ __ ____\n | \\/ |_ _/ ___| ___ _ \x1b[01;31m____ _____ \x1b[01;32m_ __\n | |\\/| | | | \\___ \\ / _ \\ '__\x1b[01;31m\\ \\ / / \x1b[01;32m_ \\ '__|\n | | | | |_| |___) | __/ | \x1b[01;31m\\ V / \x1b[01;32m __/ |\n |_| |_|\\__, |____/ \\___|_| \x1b[01;31m\\_/ \x1b[01;32m\\___|_|\n |___/\n\x1b[00m\n\n\n") sleep(0.7) Ux() print("\x07\n\n\n\n\n\n\n\x1b[01;33m __ __ ____\n | \\/ |_ _/ ___| ___ _ \x1b[01;31m____ _____ \x1b[01;33m_ __\n | |\\/| | | | \\___ \\ / _ \\ '__\x1b[01;31m\\ \\ / / \x1b[01;33m_ \\ '__|\n | | | | |_| |___) | __/ | \x1b[01;31m\\ V / \x1b[01;33m __/ |\n |_| |_|\\__, |____/ \\___|_| \x1b[01;31m\\_/ \x1b[01;33m\\___|_|\n |___/\n\x1b[00m\n\n\n") sleep(0.1) Ux() print("\x07\n\n\n\n\n\n\n\x1b[01;32m __ __ ____\n | \\/ |_ _/ ___| ___ _ \x1b[01;31m____ _____ \x1b[01;32m_ __\n | |\\/| | | | \\___ \\ / _ \\ '__\x1b[01;31m\\ \\ / / \x1b[01;32m_ \\ '__|\n | | | | |_| |___) | __/ | \x1b[01;31m\\ V / \x1b[01;32m __/ |\n |_| |_|\\__, |____/ \\___|_| \x1b[01;31m\\_/ \x1b[01;32m\\___|_|\n |___/\n\x1b[00m\n\n\n") sleep(0.5) break
class TestTruncatedNormalLowerTau(BaseTestDistributionRandom): pymc_dist = pm.TruncatedNormal (lower, upper, mu, tau) = ((- 2.0), np.inf, 0, 1.0) (tau, sigma) = get_tau_sigma(tau=tau, sigma=None) pymc_dist_params = {'mu': mu, 'tau': tau, 'lower': lower} expected_rv_op_params = {'mu': mu, 'sigma': sigma, 'lower': lower, 'upper': upper} checks_to_run = ['check_pymc_params_match_rv_op']
class Cfengine3Lexer(RegexLexer): name = 'CFEngine3' url = ' aliases = ['cfengine3', 'cf3'] filenames = ['*.cf'] mimetypes = [] version_added = '1.5' tokens = {'root': [('#.*?\\n', Comment), ('(body)(\\s+)(\\S+)(\\s+)(control)', bygroups(Keyword, Whitespace, Keyword, Whitespace, Keyword)), ('(body|bundle)(\\s+)(\\S+)(\\s+)(\\w+)(\\()', bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Function, Punctuation), 'arglist'), ('(body|bundle)(\\s+)(\\S+)(\\s+)(\\w+)', bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Function)), ('(")([^"]+)(")(\\s+)(string|slist|int|real)(\\s*)(=>)(\\s*)', bygroups(Punctuation, Name.Variable, Punctuation, Whitespace, Keyword.Type, Whitespace, Operator, Whitespace)), ('(\\S+)(\\s*)(=>)(\\s*)', bygroups(Keyword.Reserved, Whitespace, Operator, Text)), ('"', String, 'string'), ('(\\w+)(\\()', bygroups(Name.Function, Punctuation)), ('([\\w.!&|()]+)(::)', bygroups(Name.Class, Punctuation)), ('(\\w+)(:)', bygroups(Keyword.Declaration, Punctuation)), ('[{(][^)}]+[})]', Name.Variable), ('[(){},;]', Punctuation), ('=>', Operator), ('->', Operator), ('\\d+\\.\\d+', Number.Float), ('\\d+', Number.Integer), ('\\w+', Name.Function), ('\\s+', Whitespace)], 'string': [('\\$[{(]', String.Interpol, 'interpol'), ('\\\\.', String.Escape), ('"', String, '#pop'), ('\\n', String), ('.', String)], 'interpol': [('\\$[{(]', String.Interpol, '#push'), ('[})]', String.Interpol, '#pop'), ('[^${()}]+', String.Interpol)], 'arglist': [('\\)', Punctuation, '#pop'), (',', Punctuation), ('\\w+', Name.Variable), ('\\s+', Whitespace)]}
(qssp.have_backend(), 'backend qssp not available') class QSSPTestCase(unittest.TestCase): def setUp(self): self.tmpdir = tempfile.mkdtemp(prefix='pyrocko.qssp') def tearDown(self): shutil.rmtree(self.tmpdir) (('qssp.2010' in qssp.have_backend()), 'backend qssp.2010 not available') def test_qssp_build_2010(self): qssp.init(self.tmpdir, '2010', config_params=dict(source_depth_max=10000.0, distance_min=500000.0, distance_max=600000.0)) store = gf.store.Store(self.tmpdir, 'r') store.make_travel_time_tables() qssp.build(self.tmpdir) engine = gf.LocalEngine(store_dirs=[self.tmpdir]) source = gf.DCSource(lat=0.0, lon=0.0, depth=10000.0, magnitude=6.0) targets = [gf.Target(quantity='displacement', codes=('', 'STA', '', comp), lat=0.0, lon=0.0, north_shift=500000.0, east_shift=100000.0) for comp in 'NEZ'] engine.process(source, targets) (('qssp.2017' in qssp.have_backend()), 'backend qssp.2017 not available') def test_qssp_build_2017_rotational(self): qssp.init(self.tmpdir, '2017', config_params=dict(stored_quantity='rotation_displacement', component_scheme='rotational8', ncomponents=8, source_depth_max=10000.0, distance_min=500000.0, distance_max=600000.0)) store = gf.store.Store(self.tmpdir, 'r') store.make_travel_time_tables() qssp.build(self.tmpdir) del store engine = gf.LocalEngine(store_dirs=[self.tmpdir]) source = gf.DCSource(lat=0.0, lon=0.0, depth=10000.0, magnitude=6.0) targets = [gf.Target(quantity='rotation_displacement', codes=('', 'ROT', '', comp), lat=0.0, lon=0.0, north_shift=500000.0, east_shift=100000.0) for comp in 'NEZ'] engine.process(source, targets) (('qssp.2020' in qssp.have_backend()), 'backend qssp.2020 not available') def test_qssp_build_2020_rotational(self): qssp.init(self.tmpdir, '2020', config_params=dict(stored_quantity='rotation_displacement', component_scheme='rotational8', ncomponents=8, source_depth_max=10000.0, distance_min=500000.0, distance_max=600000.0)) store = gf.store.Store(self.tmpdir, 'r') store.make_travel_time_tables() qssp.build(self.tmpdir) del store engine = gf.LocalEngine(store_dirs=[self.tmpdir]) source = gf.DCSource(lat=0.0, lon=0.0, depth=10000.0, magnitude=6.0) targets = [gf.Target(quantity='rotation_displacement', codes=('', 'ROT', '', comp), lat=0.0, lon=0.0, north_shift=500000.0, east_shift=100000.0) for comp in 'NEZ'] engine.process(source, targets)
def test_select_eof(select_app, monkeypatch): read_input_mock = mock.MagicMock(name='read_input', side_effect=[EOFError, 2]) monkeypatch.setattr('cmd2.Cmd.read_input', read_input_mock) food = 'fish' (out, err) = run_cmd(select_app, 'eat {}'.format(food)) arg = 'Sauce? ' calls = [mock.call(arg), mock.call(arg)] read_input_mock.assert_has_calls(calls) assert (read_input_mock.call_count == 2)
class RawRecipeSearcher(RecipeSearcher): def __init__(self, recipe: Sequence[Provider]): self.recipe = recipe def search_candidates(self, search_offset: int, request: Request) -> Iterable[SearchResult]: for (i, provider) in enumerate(islice(self.recipe, search_offset, None), start=search_offset): (yield (provider.apply_provider, (i + 1))) def clear_cache(self): pass def get_max_offset(self) -> int: return len(self.recipe)
def get_state_dict(net_type: str='alex', version: str='0.1'): old_state_dict = torch.load('pretrained_models/alex.pth', map_location=(None if torch.cuda.is_available() else torch.device('cpu'))) new_state_dict = OrderedDict() for (key, val) in old_state_dict.items(): new_key = key new_key = new_key.replace('lin', '') new_key = new_key.replace('model.', '') new_state_dict[new_key] = val return new_state_dict
.end_to_end() .xfail((sys.platform == 'win32'), reason='Decoding issues in Gitlab Actions.') def test_execute_tasks_via_functional_api(tmp_path): source = '\n import sys\n from pathlib import Path\n from typing_extensions import Annotated\n from pytask import PathNode\n import pytask\n from pytask import PythonNode\n\n node_text = PythonNode()\n\n def create_text() -> Annotated[int, node_text]:\n return "This is the text."\n\n node_file = PathNode(path=Path("file.txt"))\n\n def create_file(content: Annotated[str, node_text]) -> Annotated[str, node_file]:\n return content\n\n if __name__ == "__main__":\n session = pytask.build(tasks=[create_file, create_text])\n assert len(session.tasks) == 2\n assert len(session.dag.nodes) == 5\n sys.exit(session.exit_code)\n ' tmp_path.joinpath('task_module.py').write_text(textwrap.dedent(source)) result = subprocess.run(('python', tmp_path.joinpath('task_module.py').as_posix()), check=False) assert (result.returncode == ExitCode.OK) assert (tmp_path.joinpath('file.txt').read_text() == 'This is the text.')
def identity_block(input_tensor, kernel_size, filters, stage, block, trainable=True): (nb_filter1, nb_filter2, nb_filter3) = filters if (K.image_dim_ordering() == 'tf'): bn_axis = 3 else: bn_axis = 1 conv_name_base = ((('res' + str(stage)) + block) + '_branch') bn_name_base = ((('bn' + str(stage)) + block) + '_branch') x = Convolution2D(nb_filter1, (1, 1), name=(conv_name_base + '2a'), trainable=trainable)(input_tensor) x = FixedBatchNormalization(axis=bn_axis, name=(bn_name_base + '2a'))(x) x = Activation('relu')(x) x = Convolution2D(nb_filter2, (kernel_size, kernel_size), padding='same', name=(conv_name_base + '2b'), trainable=trainable)(x) x = FixedBatchNormalization(axis=bn_axis, name=(bn_name_base + '2b'))(x) x = Activation('relu')(x) x = Convolution2D(nb_filter3, (1, 1), name=(conv_name_base + '2c'), trainable=trainable)(x) x = FixedBatchNormalization(axis=bn_axis, name=(bn_name_base + '2c'))(x) x = Add()([x, input_tensor]) x = Activation('relu')(x) return x
class _Actors(VersionBase): def __init__(self, selectTriggeringEntities=False): self.actors = [] self.select = convert_bool(selectTriggeringEntities) def __eq__(self, other): if isinstance(other, _Actors): if ((self.get_attributes() == other.get_attributes()) and (self.actors == other.actors)): return True return False def parse(element): trigent = convert_bool(element.attrib['selectTriggeringEntities']) actors = _Actors(trigent) entrefs = element.findall('EntityRef') for ent in entrefs: entityref = EntityRef.parse(ent) actors.add_actor(entityref.entity) return actors def add_actor(self, entity): self.actors.append(EntityRef(entity)) return self def get_attributes(self): return {'selectTriggeringEntities': get_bool_string(self.select)} def get_element(self): if (len(self.actors) == 0): Warning('No Actors are defined') element = ET.Element('Actors', attrib=self.get_attributes()) for ent in self.actors: element.append(ent.get_element()) return element
def get_subnets_info(regions): clients = [] for region in regions: client = boto3.client('ec2', region_name=region, aws_access_key_id=config.AWS_ACCESS_KEY, aws_secret_access_key=config.AWS_ACCESS_SECRET) client.region = region clients.append(client) subnet_info = OrderedDict() for client in clients: security_group = client.describe_security_groups()['SecurityGroups'][0]['GroupId'] subnets = client.describe_subnets()['Subnets'] for subnet in subnets: subnet_info[subnet['AvailabilityZone']] = dict(SubnetID=subnet['SubnetId'], Groups=security_group) return subnet_info
(short_help='Remove build artifacts') ('location', required=False) ('--target', '-t', 'targets', multiple=True, help='The target with which to remove artifacts, overriding project defaults. This may be selected multiple times e.g. `-t sdist -t wheel`') ('--hooks-only', is_flag=True, help='Whether or not to only remove artifacts from build hooks [env var: `HATCH_BUILD_HOOKS_ONLY`]') ('--no-hooks', is_flag=True, help='Whether or not to ignore artifacts from build hooks [env var: `HATCH_BUILD_NO_HOOKS`]') ('--ext', is_flag=True, help='Whether or not to only remove artifacts from build hooks for distributing binary Python packages, such as compiled extensions. Equivalent to `--hooks-only -t wheel`') _context def clean(ctx, location, targets, hooks_only, no_hooks, ext): from hatch.cli.build import build ctx.invoke(build, clean_only=True, location=location, targets=targets, hooks_only=hooks_only, no_hooks=no_hooks, ext=ext)
class PythonLSPServer(MethodDispatcher): def __init__(self, rx, tx, check_parent_process=False, consumer=None, *, endpoint_cls=None): self.workspace = None self.config = None self.root_uri = None self.watching_thread = None self.workspaces = {} self.uri_workspace_mapper = {} self._check_parent_process = check_parent_process if (rx is not None): self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) else: self._jsonrpc_stream_reader = None if (tx is not None): self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) else: self._jsonrpc_stream_writer = None endpoint_cls = (endpoint_cls or Endpoint) if (consumer is None): self._endpoint = endpoint_cls(self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS) else: self._endpoint = endpoint_cls(self, consumer, max_workers=MAX_WORKERS) self._dispatchers = [] self._shutdown = False def start(self): self._jsonrpc_stream_reader.listen(self._endpoint.consume) def consume(self, message): self._endpoint.consume(message) def __getitem__(self, item): if (self._shutdown and (item != 'exit')): log.debug('Ignoring non-exit method during shutdown: %s', item) item = 'invalid_request_after_shutdown' try: return super().__getitem__(item) except KeyError: for dispatcher in self._dispatchers: try: return dispatcher[item] except KeyError: continue raise KeyError() def m_shutdown(self, **_kwargs): for workspace in self.workspaces.values(): workspace.close() self._shutdown = True def m_invalid_request_after_shutdown(self, **_kwargs): return {'error': {'code': lsp.ErrorCodes.InvalidRequest, 'message': 'Requests after shutdown are not valid'}} def m_exit(self, **_kwargs): self._endpoint.shutdown() if (self._jsonrpc_stream_reader is not None): self._jsonrpc_stream_reader.close() if (self._jsonrpc_stream_writer is not None): self._jsonrpc_stream_writer.close() def _match_uri_to_workspace(self, uri): workspace_uri = _utils.match_uri_to_workspace(uri, self.workspaces) return self.workspaces.get(workspace_uri, self.workspace) def _hook(self, hook_name, doc_uri=None, **kwargs): workspace = self._match_uri_to_workspace(doc_uri) doc = (workspace.get_document(doc_uri) if doc_uri else None) hook_handlers = self.config.plugin_manager.subset_hook_caller(hook_name, self.config.disabled_plugins) return hook_handlers(config=self.config, workspace=workspace, document=doc, **kwargs) def capabilities(self): server_capabilities = {'codeActionProvider': True, 'codeLensProvider': {'resolveProvider': False}, 'completionProvider': {'resolveProvider': True, 'triggerCharacters': ['.']}, 'documentFormattingProvider': True, 'documentHighlightProvider': True, 'documentRangeFormattingProvider': True, 'documentSymbolProvider': True, 'definitionProvider': True, 'executeCommandProvider': {'commands': flatten(self._hook('pylsp_commands'))}, 'hoverProvider': True, 'referencesProvider': True, 'renameProvider': True, 'foldingRangeProvider': True, 'signatureHelpProvider': {'triggerCharacters': ['(', ',', '=']}, 'textDocumentSync': {'change': lsp.TextDocumentSyncKind.INCREMENTAL, 'save': {'includeText': True}, 'openClose': True}, 'notebookDocumentSync': {'notebookSelector': [{'cells': [{'language': 'python'}]}]}, 'workspace': {'workspaceFolders': {'supported': True, 'changeNotifications': True}}, 'experimental': merge(self._hook('pylsp_experimental_capabilities'))} log.info('Server capabilities: %s', server_capabilities) return server_capabilities def m_initialize(self, processId=None, rootUri=None, rootPath=None, initializationOptions=None, workspaceFolders=None, **_kwargs): log.debug('Language server initialized with %s %s %s %s', processId, rootUri, rootPath, initializationOptions) if (rootUri is None): rootUri = (uris.from_fs_path(rootPath) if (rootPath is not None) else '') self.workspaces.pop(self.root_uri, None) self.root_uri = rootUri self.config = config.Config(rootUri, (initializationOptions or {}), processId, _kwargs.get('capabilities', {})) self.workspace = Workspace(rootUri, self._endpoint, self.config) self.workspaces[rootUri] = self.workspace if workspaceFolders: for folder in workspaceFolders: uri = folder['uri'] if (uri == rootUri): continue workspace_config = config.Config(uri, self.config._init_opts, self.config._process_id, self.config._capabilities) workspace_config.update(self.config._settings) self.workspaces[uri] = Workspace(uri, self._endpoint, workspace_config) self._dispatchers = self._hook('pylsp_dispatchers') self._hook('pylsp_initialize') if (self._check_parent_process and (processId is not None) and (self.watching_thread is None)): def watch_parent_process(pid): if (not _utils.is_process_alive(pid)): log.info('parent process %s is not alive, exiting!', pid) self.m_exit() else: threading.Timer(PARENT_PROCESS_WATCH_INTERVAL, watch_parent_process, args=[pid]).start() self.watching_thread = threading.Thread(target=watch_parent_process, args=(processId,)) self.watching_thread.daemon = True self.watching_thread.start() return {'capabilities': self.capabilities(), 'serverInfo': {'name': 'pylsp', 'version': __version__}} def m_initialized(self, **_kwargs): self._hook('pylsp_initialized') def code_actions(self, doc_uri: str, range: Dict, context: Dict): return flatten(self._hook('pylsp_code_actions', doc_uri, range=range, context=context)) def code_lens(self, doc_uri): return flatten(self._hook('pylsp_code_lens', doc_uri)) def completions(self, doc_uri, position): workspace = self._match_uri_to_workspace(doc_uri) document = workspace.get_document(doc_uri) ignored_names = None if isinstance(document, Cell): notebook_document = workspace.get_maybe_document(document.notebook_uri) ignored_names = notebook_document.jedi_names(doc_uri) completions = self._hook('pylsp_completions', doc_uri, position=position, ignored_names=ignored_names) return {'isIncomplete': False, 'items': flatten(completions)} def completion_item_resolve(self, completion_item): doc_uri = completion_item.get('data', {}).get('doc_uri', None) return self._hook('pylsp_completion_item_resolve', doc_uri, completion_item=completion_item) def definitions(self, doc_uri, position): return flatten(self._hook('pylsp_definitions', doc_uri, position=position)) def document_symbols(self, doc_uri): return flatten(self._hook('pylsp_document_symbols', doc_uri)) def document_did_save(self, doc_uri): return self._hook('pylsp_document_did_save', doc_uri) def execute_command(self, command, arguments): return self._hook('pylsp_execute_command', command=command, arguments=arguments) def format_document(self, doc_uri, options): return (lambda : self._hook('pylsp_format_document', doc_uri, options=options)) def format_range(self, doc_uri, range, options): return self._hook('pylsp_format_range', doc_uri, range=range, options=options) def highlight(self, doc_uri, position): return (flatten(self._hook('pylsp_document_highlight', doc_uri, position=position)) or None) def hover(self, doc_uri, position): return (self._hook('pylsp_hover', doc_uri, position=position) or {'contents': ''}) _utils.debounce(LINT_DEBOUNCE_S, keyed_by='doc_uri') def lint(self, doc_uri, is_saved): workspace = self._match_uri_to_workspace(doc_uri) document_object = workspace.documents.get(doc_uri, None) if isinstance(document_object, Document): self._lint_text_document(doc_uri, workspace, is_saved=is_saved) elif isinstance(document_object, Notebook): self._lint_notebook_document(document_object, workspace) def _lint_text_document(self, doc_uri, workspace, is_saved): workspace.publish_diagnostics(doc_uri, flatten(self._hook('pylsp_lint', doc_uri, is_saved=is_saved))) def _lint_notebook_document(self, notebook_document, workspace): random_uri = str(uuid.uuid4()) cell_list: List[Dict[(str, Any)]] = [] offset = 0 total_source = '' for cell in notebook_document.cells: cell_uri = cell['document'] cell_document = workspace.get_cell_document(cell_uri) num_lines = cell_document.line_count data = {'uri': cell_uri, 'line_start': offset, 'line_end': ((offset + num_lines) - 1), 'source': cell_document.source} cell_list.append(data) if (offset == 0): total_source = cell_document.source else: total_source += ('\n' + cell_document.source) offset += num_lines workspace.put_document(random_uri, total_source) try: document_diagnostics = flatten(self._hook('pylsp_lint', random_uri, is_saved=True)) for cell in cell_list: cell_diagnostics = [] for diagnostic in document_diagnostics: start_line = diagnostic['range']['start']['line'] end_line = diagnostic['range']['end']['line'] if ((start_line > cell['line_end']) or (end_line < cell['line_start'])): continue diagnostic['range']['start']['line'] = (start_line - cell['line_start']) diagnostic['range']['end']['line'] = (end_line - cell['line_start']) cell_diagnostics.append(diagnostic) workspace.publish_diagnostics(cell['uri'], cell_diagnostics) finally: workspace.rm_document(random_uri) def references(self, doc_uri, position, exclude_declaration): return flatten(self._hook('pylsp_references', doc_uri, position=position, exclude_declaration=exclude_declaration)) def rename(self, doc_uri, position, new_name): return self._hook('pylsp_rename', doc_uri, position=position, new_name=new_name) def signature_help(self, doc_uri, position): return self._hook('pylsp_signature_help', doc_uri, position=position) def folding(self, doc_uri): return flatten(self._hook('pylsp_folding_range', doc_uri)) def m_completion_item__resolve(self, **completionItem): return self.completion_item_resolve(completionItem) def m_notebook_document__did_open(self, notebookDocument=None, cellTextDocuments=None, **_kwargs): workspace = self._match_uri_to_workspace(notebookDocument['uri']) workspace.put_notebook_document(notebookDocument['uri'], notebookDocument['notebookType'], cells=notebookDocument['cells'], version=notebookDocument.get('version'), metadata=notebookDocument.get('metadata')) for cell in (cellTextDocuments or []): workspace.put_cell_document(cell['uri'], notebookDocument['uri'], cell['languageId'], cell['text'], version=cell.get('version')) self.lint(notebookDocument['uri'], is_saved=True) def m_notebook_document__did_close(self, notebookDocument=None, cellTextDocuments=None, **_kwargs): workspace = self._match_uri_to_workspace(notebookDocument['uri']) for cell in (cellTextDocuments or []): workspace.publish_diagnostics(cell['uri'], []) workspace.rm_document(cell['uri']) workspace.rm_document(notebookDocument['uri']) def m_notebook_document__did_change(self, notebookDocument=None, change=None, **_kwargs): workspace = self._match_uri_to_workspace(notebookDocument['uri']) if change.get('metadata'): workspace.update_notebook_metadata(notebookDocument['uri'], change.get('metadata')) cells = change.get('cells') if cells: structure = cells.get('structure') if structure: notebook_cell_array_change = structure['array'] start = notebook_cell_array_change['start'] cell_delete_count = notebook_cell_array_change['deleteCount'] if (cell_delete_count == 0): for cell_document in structure['didOpen']: workspace.put_cell_document(cell_document['uri'], notebookDocument['uri'], cell_document['languageId'], cell_document['text'], cell_document.get('version')) workspace.add_notebook_cells(notebookDocument['uri'], notebook_cell_array_change['cells'], start) else: for cell_document in structure['didClose']: workspace.rm_document(cell_document['uri']) workspace.publish_diagnostics(cell_document['uri'], []) workspace.remove_notebook_cells(notebookDocument['uri'], start, cell_delete_count) data = cells.get('data') if data: for cell in data: pass text_content = cells.get('textContent') if text_content: for cell in text_content: cell_uri = cell['document']['uri'] workspace.update_document(cell_uri, cell['changes'][0]) self.lint(notebookDocument['uri'], is_saved=True) def m_text_document__did_close(self, textDocument=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument['uri']) workspace.publish_diagnostics(textDocument['uri'], []) workspace.rm_document(textDocument['uri']) def m_text_document__did_open(self, textDocument=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument['uri']) workspace.put_document(textDocument['uri'], textDocument['text'], version=textDocument.get('version')) self._hook('pylsp_document_did_open', textDocument['uri']) self.lint(textDocument['uri'], is_saved=True) def m_text_document__did_change(self, contentChanges=None, textDocument=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument['uri']) for change in contentChanges: workspace.update_document(textDocument['uri'], change, version=textDocument.get('version')) self.lint(textDocument['uri'], is_saved=False) def m_text_document__did_save(self, textDocument=None, **_kwargs): self.lint(textDocument['uri'], is_saved=True) self.document_did_save(textDocument['uri']) def m_text_document__code_action(self, textDocument=None, range=None, context=None, **_kwargs): return self.code_actions(textDocument['uri'], range, context) def m_text_document__code_lens(self, textDocument=None, **_kwargs): return self.code_lens(textDocument['uri']) def _cell_document__completion(self, cellDocument, position=None, **_kwargs): workspace = self._match_uri_to_workspace(cellDocument.notebook_uri) notebookDocument = workspace.get_maybe_document(cellDocument.notebook_uri) if (notebookDocument is None): raise ValueError('Invalid notebook document') cell_data = notebookDocument.cell_data() total_source = '\n'.join((data['source'] for data in cell_data.values())) with workspace.temp_document(total_source) as temp_uri: if (position is not None): position['line'] += cell_data[cellDocument.uri]['line_start'] completions = self.completions(temp_uri, position) for item in completions.get('items', []): if (item.get('data', {}).get('doc_uri') == temp_uri): item['data']['doc_uri'] = cellDocument.uri return completions def m_text_document__completion(self, textDocument=None, position=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument['uri']) document = workspace.get_document(textDocument['uri']) if isinstance(document, Cell): return self._cell_document__completion(document, position, **_kwargs) return self.completions(textDocument['uri'], position) def _cell_document__definition(self, cellDocument, position=None, **_kwargs): workspace = self._match_uri_to_workspace(cellDocument.notebook_uri) notebookDocument = workspace.get_maybe_document(cellDocument.notebook_uri) if (notebookDocument is None): raise ValueError('Invalid notebook document') cell_data = notebookDocument.cell_data() total_source = '\n'.join((data['source'] for data in cell_data.values())) with workspace.temp_document(total_source) as temp_uri: if (position is not None): position['line'] += cell_data[cellDocument.uri]['line_start'] definitions = self.definitions(temp_uri, position) for definition in definitions: if (definition['uri'] == temp_uri): for (cell_uri, data) in cell_data.items(): if (data['line_start'] <= definition['range']['start']['line'] <= data['line_end']): definition['uri'] = cell_uri definition['range']['start']['line'] -= data['line_start'] definition['range']['end']['line'] -= data['line_start'] break return definitions def m_text_document__definition(self, textDocument=None, position=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument['uri']) document = workspace.get_document(textDocument['uri']) if isinstance(document, Cell): return self._cell_document__definition(document, position, **_kwargs) return self.definitions(textDocument['uri'], position) def m_text_document__document_highlight(self, textDocument=None, position=None, **_kwargs): return self.highlight(textDocument['uri'], position) def m_text_document__hover(self, textDocument=None, position=None, **_kwargs): return self.hover(textDocument['uri'], position) def m_text_document__document_symbol(self, textDocument=None, **_kwargs): return self.document_symbols(textDocument['uri']) def m_text_document__formatting(self, textDocument=None, options=None, **_kwargs): return self.format_document(textDocument['uri'], options) def m_text_document__rename(self, textDocument=None, position=None, newName=None, **_kwargs): return self.rename(textDocument['uri'], position, newName) def m_text_document__folding_range(self, textDocument=None, **_kwargs): return self.folding(textDocument['uri']) def m_text_document__range_formatting(self, textDocument=None, range=None, options=None, **_kwargs): return self.format_range(textDocument['uri'], range, options) def m_text_document__references(self, textDocument=None, position=None, context=None, **_kwargs): exclude_declaration = (not context['includeDeclaration']) return self.references(textDocument['uri'], position, exclude_declaration) def m_text_document__signature_help(self, textDocument=None, position=None, **_kwargs): return self.signature_help(textDocument['uri'], position) def m_workspace__did_change_configuration(self, settings=None): if (self.config is not None): self.config.update((settings or {}).get('pylsp', {})) for workspace in self.workspaces.values(): workspace.update_config(settings) self._hook('pylsp_workspace_configuration_changed') for doc_uri in workspace.documents: self.lint(doc_uri, is_saved=False) def m_workspace__did_change_workspace_folders(self, event=None, **_kwargs): if (event is None): return added = event.get('added', []) removed = event.get('removed', []) for removed_info in removed: if ('uri' in removed_info): removed_uri = removed_info['uri'] self.workspaces.pop(removed_uri, None) for added_info in added: if ('uri' in added_info): added_uri = added_info['uri'] workspace_config = config.Config(added_uri, self.config._init_opts, self.config._process_id, self.config._capabilities) workspace_config.update(self.config._settings) self.workspaces[added_uri] = Workspace(added_uri, self._endpoint, workspace_config) root_workspace_removed = any(((removed_info['uri'] == self.root_uri) for removed_info in removed)) workspace_added = ((len(added) > 0) and ('uri' in added[0])) if (root_workspace_removed and workspace_added): added_uri = added[0]['uri'] self.root_uri = added_uri new_root_workspace = self.workspaces[added_uri] self.config = new_root_workspace._config self.workspace = new_root_workspace elif root_workspace_removed: if self.workspaces: log.debug('Root workspace deleted!') available_workspaces = sorted(self.workspaces) first_workspace = available_workspaces[0] new_root_workspace = self.workspaces[first_workspace] self.root_uri = first_workspace self.config = new_root_workspace._config self.workspace = new_root_workspace doc_uris = list(self.workspace._docs.keys()) for uri in doc_uris: doc = self.workspace._docs.pop(uri) new_workspace = self._match_uri_to_workspace(uri) new_workspace._docs[uri] = doc def m_workspace__did_change_watched_files(self, changes=None, **_kwargs): changed_py_files = set() config_changed = False for d in (changes or []): if d['uri'].endswith(PYTHON_FILE_EXTENSIONS): changed_py_files.add(d['uri']) elif d['uri'].endswith(CONFIG_FILEs): config_changed = True if config_changed: self.config.settings.cache_clear() elif (not changed_py_files): return for workspace in self.workspaces.values(): for doc_uri in workspace.documents: if (doc_uri not in changed_py_files): self.lint(doc_uri, is_saved=False) def m_workspace__execute_command(self, command=None, arguments=None): return self.execute_command(command, arguments)
def test_bad_alphabets() -> None: with pytest.raises(ValueError, match='has overlaps'): Fsm(alphabet={Charclass('a'), Charclass('ab')}, states={0}, initial=0, finals=(), map={0: {Charclass('a'): 0, Charclass('ab'): 0}}) with pytest.raises(ValueError, match='not a proper partition'): Fsm(alphabet={Charclass('a')}, states={0}, initial=0, finals=(), map={0: {Charclass('a'): 0}}) with pytest.raises(ValueError, match='not a proper partition'): Fsm(alphabet={(~ Charclass('b'))}, states={0}, initial=0, finals=(), map={0: {(~ Charclass('b')): 0}}) with pytest.raises(ValueError, match='not a proper partition'): Fsm(alphabet={Charclass('a'), (~ Charclass('ab'))}, states={0}, initial=0, finals=(), map={0: {Charclass('a'): 0, (~ Charclass('ab')): 0}})
class Visualizer(): def __init__(self, opt): self.use_html = (opt.isTrain and (not opt.no_html)) self.win_size = opt.display_winsize self.name = opt.name self.opt = opt self.saved = False if self.use_html: self.web_dir = os.path.join(opt.checkpoints_dir, opt.name, 'web') self.img_dir = os.path.join(self.web_dir, 'images') print(((strftime('%Y-%m-%d %H:%M:%S', time.localtime()) + '] ') + ('create web directory %s...' % self.web_dir))) util.mkdirs([self.web_dir, self.img_dir]) if opt.isTrain: self.log_name = os.path.join(opt.checkpoints_dir, opt.name, 'loss_log.txt') with open(self.log_name, 'a') as log_file: now = time.strftime('%c') log_file.write((' Training Loss (%s) \n' % now)) if (not opt.isTrain): self.real_F1_dir = os.path.join(opt.save_root_path, opt.real_F1_path, 'Image') self.Boundary_dir = os.path.join(opt.save_root_path, opt.Boundary_path, 'Image') self.Boundary_transformed_dir = os.path.join(opt.save_root_path, opt.Boundary_transformed_path, 'Image') self.fake_F2_dir = os.path.join(opt.save_root_path, opt.fake_F2_path, 'Image') if (not os.path.exists(self.real_F1_dir)): os.makedirs(self.real_F1_dir) if (not os.path.exists(self.Boundary_dir)): os.makedirs(self.Boundary_dir) if (not os.path.exists(self.Boundary_transformed_dir)): os.makedirs(self.Boundary_transformed_dir) if (not os.path.exists(self.fake_F2_dir)): os.makedirs(self.fake_F2_dir) def reset(self): self.saved = False def display_current_results(self, visuals, epoch, save_result, transformer=False): if (self.use_html and (save_result or (not self.saved))): self.saved = True for visual in visuals: for (label, image_numpy) in visual.items(): img_path = os.path.join(self.img_dir, ('epoch%.3d_%s.png' % (epoch, label))) util.save_image(image_numpy, img_path) webpage = html.HTML(self.web_dir, ('Experiment name = %s' % self.name), reflesh=1) if (not transformer): epoch_step = (- 1) else: epoch_step = (- self.opt.update_html_freq) for n in range(epoch, 0, epoch_step): webpage.add_header(('epoch [%d]' % n)) for visual in visuals: ims = [] txts = [] links = [] for (label, image_numpy) in visual.items(): img_path = ('epoch%.3d_%s.png' % (n, label)) ims.append(img_path) txts.append(label) links.append(img_path) if (('channel' in label) or ('edge' in label)): webpage.add_images(ims, txts, links, width=992, vertical=True) else: webpage.add_images(ims, txts, links, width=self.win_size) webpage.save() def print_current_errors(self, epoch, i, errors, t, transformer=False): if (not transformer): message = ('(epoch: %d, iters: %d, time: %.3f) ' % (epoch, i, t)) else: message = ('(max iters: %d, iters: %d, time: %.3f) ' % (epoch, i, t)) for (k, v) in errors.items(): message += ('%s: %.6f ' % (k, v)) print(((strftime('%Y-%m-%d %H:%M:%S', time.localtime()) + '] ') + message)) with open(self.log_name, 'a') as log_file: log_file.write(('%s\n' % message)) def save_images(self, webpage, visuals, image_path, vertical=False): image_dir = webpage.get_image_dir() short_path = ntpath.basename(image_path[0]) name = os.path.splitext(short_path)[0] webpage.add_header(name) ims = [] txts = [] links = [] for (label, image_numpy) in visuals.items(): image_name = ('%s_%s.png' % (name, label)) save_path = os.path.join(image_dir, image_name) util.save_image(image_numpy, save_path) ims.append(image_name) txts.append(label) links.append(image_name) webpage.add_images(ims, txts, links, width=self.win_size, vertical=vertical) def save_images_split(self, visuals, image_path): name = ntpath.basename(image_path[0]) for (label, image_numpy) in visuals.items(): if (label == 'real_F1'): save_path = os.path.join(self.real_F1_dir, name) elif (label == 'boundary_map'): save_path = os.path.join(self.Boundary_dir, name) elif (label == 'boundary_map_transformed'): save_path = os.path.join(self.Boundary_transformed_dir, name) elif (label == 'fake_F2'): save_path = os.path.join(self.fake_F2_dir, name) util.save_image(image_numpy, save_path)