code
stringlengths
281
23.7M
class TestHandler(): def test_slot_behaviour(self): class SubclassHandler(BaseHandler): __slots__ = () def __init__(self): super().__init__((lambda x: None)) def check_update(self, update: object): pass inst = SubclassHandler() for attr in inst.__slots__: assert (getattr(inst, attr, 'err') != 'err'), f"got extra slot '{attr}'" assert (len(mro_slots(inst)) == len(set(mro_slots(inst)))), 'duplicate slot' def test_repr(self): async def some_func(): return None class SubclassHandler(BaseHandler): __slots__ = () def __init__(self): super().__init__(callback=some_func) def check_update(self, update: object): pass sh = SubclassHandler() assert (repr(sh) == 'SubclassHandler[callback=TestHandler.test_repr.<locals>.some_func]') def test_repr_no_qualname(self): class ClassBasedCallback(): async def __call__(self, *args, **kwargs): pass def __repr__(self): return 'Repr of ClassBasedCallback' class SubclassHandler(BaseHandler): __slots__ = () def __init__(self): super().__init__(callback=ClassBasedCallback()) def check_update(self, update: object): pass sh = SubclassHandler() assert (repr(sh) == 'SubclassHandler[callback=Repr of ClassBasedCallback]')
def test_get_imgformat_png_when_setting_png(qapp, settings, item): settings.setValue('Items/image_storage_format', 'png') img = MagicMock(hasAlphaChannel=MagicMock(return_value=False), height=MagicMock(return_value=1600), width=MagicMock(return_value=1020)) assert (item.get_imgformat(img) == 'png')
class EventLocation(models.Model): calendar = models.ForeignKey(Calendar, related_name='locations', null=True, blank=True, on_delete=models.CASCADE) name = models.CharField(max_length=255) address = models.CharField(blank=True, null=True, max_length=255) url = models.URLField('URL', blank=True, null=True) class Meta(): ordering = ('name',) def __str__(self): return self.name def get_absolute_url(self): return reverse('events:eventlist_location', kwargs={'calendar_slug': self.calendar.slug, 'pk': self.pk})
class Sup_MCL_Loss(nn.Module): def __init__(self, args): super(Sup_MCL_Loss, self).__init__() self.embed_list = nn.ModuleList([]) self.args = args for i in range(args.number_net): self.embed_list.append(Embed(args.rep_dim[i], args.feat_dim)) self.contrast = SupMCL(args) def forward(self, embeddings, labels): for i in range(self.args.number_net): embeddings[i] = self.embed_list[i](embeddings[i]) (vcl_loss, soft_vcl_loss, icl_loss, soft_icl_loss) = self.contrast(embeddings, labels) return (vcl_loss, soft_vcl_loss, icl_loss, soft_icl_loss)
.parametrize('improved_sampling', [True, False]) def test_super_H(improved_sampling): size = 10 ntraj = 1000 a = qutip.destroy(size) H = qutip.num(size) state = qutip.basis(size, (size - 1)) times = np.linspace(0, 1.0, 100) coupling = 0.5 n_th = 0.05 c_ops = (np.sqrt((coupling * (n_th + 1))) * a) e_ops = [qutip.num(size)] mc_expected = mcsolve(H, state, times, c_ops, e_ops, ntraj=ntraj, target_tol=0.1, options={'map': 'serial'}) mc = mcsolve(qutip.liouvillian(H), state, times, c_ops, e_ops, ntraj=ntraj, target_tol=0.1, options={'map': 'serial', 'improved_sampling': improved_sampling}) np.testing.assert_allclose(mc_expected.expect[0], mc.expect[0], atol=0.5)
def _parse_path_args(path: str, llm_app_cls=LLMApp) -> List[LLMApp]: assert os.path.exists(path), f'Could not load model from {path}, as it does not exist.' if os.path.isfile(path): with open(path, 'r') as f: return [llm_app_cls.parse_yaml(f)] elif os.path.isdir(path): apps = [] for (root, _dirs, files) in os.walk(path): for p in files: if _is_yaml_file(p): with open(os.path.join(root, p), 'r') as f: apps.append(llm_app_cls.parse_yaml(f)) return apps else: raise ValueError(f'Could not load model from {path}, as it is not a file or directory.')
def spec_to_mel_torch(spec, n_fft, num_mels, sampling_rate, fmin, fmax): global mel_basis dtype_device = ((str(spec.dtype) + '_') + str(spec.device)) fmax_dtype_device = ((str(fmax) + '_') + dtype_device) if (fmax_dtype_device not in mel_basis): mel = librosa_mel_fn(sampling_rate, n_fft, num_mels, fmin, fmax) mel_basis[fmax_dtype_device] = torch.from_numpy(mel).to(dtype=spec.dtype, device=spec.device) spec = torch.matmul(mel_basis[fmax_dtype_device], spec) spec = spectral_normalize_torch(spec) return spec
.end_to_end() def test_dry_run_w_subsequent_task(runner, tmp_path): source = '\n import pytask\n\n .depends_on("out.txt")\n .produces("out_2.txt")\n def task_example(depends_on, produces):\n produces.touch()\n ' tmp_path.joinpath('task_example_second.py').write_text(textwrap.dedent(source)) source = '\n import pytask\n\n .produces("out.txt")\n def task_example(produces):\n produces.touch()\n ' tmp_path.joinpath('task_example_first.py').write_text(textwrap.dedent(source)) result = runner.invoke(cli, [tmp_path.as_posix()]) assert (result.exit_code == ExitCode.OK) assert ('2 Succeeded' in result.output) tmp_path.joinpath('task_example_first.py').write_text(textwrap.dedent((source + '\n'))) result = runner.invoke(cli, ['--dry-run', tmp_path.as_posix()]) assert (result.exit_code == ExitCode.OK) assert ('2 Would be executed' in result.output)
def delete_sensor_from_config(sensor_mac): global SENSORS LOGGER.info(f'Deleting sensor from config: {sensor_mac}') try: del SENSORS[sensor_mac] write_yaml_file(os.path.join(CONFIG_PATH, SENSORS_CONFIG_FILE), SENSORS) except KeyError: LOGGER.debug(f'{sensor_mac} not found in SENSORS')
class TextItem(): def __init__(self, text, font_props=None, *, ws_before='', ws_after='', allow_break=True): if (not isinstance(text, str)): raise TypeError('TextItem text must be str.') if (font_props is None): font_props = textmodule.FontProps() elif (not isinstance(font_props, textmodule.FontProps)): raise TypeError('TextItem font_props must be a FontProps object.') self._text = text self._font_props = font_props self._ws_before = ws_before self._ws_after = ws_after self._allow_break = bool(allow_break) def text(self): return self._text def font_props(self): return self._font_props def ws_before(self): return self._ws_before def ws_after(self): return self._ws_after def allow_break(self): return self._allow_break
def compute_rect_vertices(fromp, to, radius): (x1, y1) = fromp (x2, y2) = to if (abs((y1 - y2)) < 1e-06): dx = 0 dy = radius else: dx = ((radius * 1.0) / (((((x1 - x2) / (y1 - y2)) ** 2) + 1) ** 0.5)) dy = (((radius ** 2) - (dx ** 2)) ** 0.5) dy *= ((- 1) if (((x1 - x2) * (y1 - y2)) > 0) else 1) return ';'.join([','.join(map(str, r)) for r in [[(x1 + dx), (y1 + dy)], [(x2 + dx), (y2 + dy)], [(x2 - dx), (y2 - dy)], [(x1 - dx), (y1 - dy)]]])
def Print_info(fun): def work(*args, **kwargs): res = fun(*args, **kwargs) if res: if isinstance(res, str): print(res) elif isinstance(res, list): for i in res: print(i.replace('\n', '')) else: pass return fun(*args, **kwargs) return work
def _remove_variable_info_from_output(data: str, path: Any) -> str: lines = data.splitlines() index_root = next((i for (i, line) in enumerate(lines) if line.startswith('Root:'))) new_info_line = ''.join(lines[1:index_root]) for platform in ('linux', 'win32', 'darwin'): new_info_line = new_info_line.replace(platform, '<platform>') pattern = re.compile(version.VERSION_PATTERN, flags=(re.IGNORECASE | re.VERBOSE)) new_info_line = re.sub(pattern=pattern, repl='<version>', string=new_info_line) index_collected = next((i for (i, line) in enumerate(lines) if line.startswith('Collected'))) new_root_line = 'Root: <path>' new_lines = [lines[0], new_info_line, new_root_line, *lines[index_collected:]] return '\n'.join(new_lines)
def test_naturaldelta() -> None: seconds = ((((1234 * 365) * 24) * 60) * 60) assert (humanize.naturaldelta(seconds) == '1,234 years') try: humanize.i18n.activate('fr_FR') assert (humanize.naturaldelta(seconds) == '1 234 ans') humanize.i18n.activate('es_ES') assert (humanize.naturaldelta(seconds) == '1,234 anos') except FileNotFoundError: pytest.skip('Generate .mo with scripts/generate-translation-binaries.sh') finally: humanize.i18n.deactivate() assert (humanize.naturaldelta(seconds) == '1,234 years')
class TestRequestInterception(BaseTestCase): async def test_request_interception(self): (await self.page.setRequestInterception(True)) async def request_check(req): self.assertIn('empty', req.url) self.assertTrue(req.headers.get('user-agent')) self.assertEqual(req.method, 'GET') self.assertIsNone(req.postData) self.assertTrue(req.isNavigationRequest()) self.assertEqual(req.resourceType, 'document') self.assertEqual(req.frame, self.page.mainFrame) self.assertEqual(req.frame.url, 'about:blank') (await req.continue_()) self.page.on('request', (lambda req: asyncio.ensure_future(request_check(req)))) res = (await self.page.goto((self.url + 'empty'))) self.assertEqual(res.status, 200) async def test_referer_header(self): (await self.page.setRequestInterception(True)) requests = list() async def set_request(req): requests.append(req) (await req.continue_()) self.page.on('request', (lambda req: asyncio.ensure_future(set_request(req)))) (await self.page.goto((self.url + 'static/one-style.html'))) self.assertIn('/one-style.css', requests[1].url) self.assertIn('/one-style.html', requests[1].headers['referer']) async def test_response_with_cookie(self): (await self.page.goto((self.url + 'empty'))) (await self.page.setCookie({'name': 'foo', 'value': 'bar'})) (await self.page.setRequestInterception(True)) async def continue_(req): (await req.continue_()) self.page.on('request', (lambda r: asyncio.ensure_future(continue_(r)))) response = (await self.page.reload()) self.assertEqual(response.status, 200) async def test_request_interception_stop(self): (await self.page.setRequestInterception(True)) self.page.once('request', (lambda req: asyncio.ensure_future(req.continue_()))) (await self.page.goto((self.url + 'empty'))) (await self.page.setRequestInterception(False)) (await self.page.goto((self.url + 'empty'))) async def test_request_interception_custom_header(self): (await self.page.setExtraHTTPHeaders({'foo': 'bar'})) (await self.page.setRequestInterception(True)) async def request_check(req): self.assertEqual(req.headers['foo'], 'bar') (await req.continue_()) self.page.on('request', (lambda req: asyncio.ensure_future(request_check(req)))) res = (await self.page.goto((self.url + 'empty'))) self.assertEqual(res.status, 200) async def test_request_interception_custom_referer_header(self): (await self.page.goto((self.url + 'empty'))) (await self.page.setExtraHTTPHeaders({'referer': (self.url + 'empty')})) (await self.page.setRequestInterception(True)) async def request_check(req): self.assertEqual(req.headers['referer'], (self.url + 'empty')) (await req.continue_()) self.page.on('request', (lambda req: asyncio.ensure_future(request_check(req)))) res = (await self.page.goto((self.url + 'empty'))) self.assertEqual(res.status, 200) async def test_request_interception_abort(self): (await self.page.setRequestInterception(True)) async def request_check(req): if req.url.endswith('.css'): (await req.abort()) else: (await req.continue_()) failedRequests = [] self.page.on('request', (lambda req: asyncio.ensure_future(request_check(req)))) self.page.on('requestfailed', (lambda e: failedRequests.append(e))) res = (await self.page.goto((self.url + 'static/one-style.html'))) self.assertTrue(res.ok) self.assertIsNone(res.request.failure()) self.assertEqual(len(failedRequests), 1) async def test_request_interception_custom_error_code(self): (await self.page.setRequestInterception(True)) async def request_check(req): (await req.abort('internetdisconnected')) self.page.on('request', (lambda req: asyncio.ensure_future(request_check(req)))) failedRequests = [] self.page.on('requestfailed', (lambda req: failedRequests.append(req))) with self.assertRaises(PageError): (await self.page.goto((self.url + 'empty'))) self.assertEqual(len(failedRequests), 1) failedRequest = failedRequests[0] self.assertEqual(failedRequest.failure()['errorText'], 'net::ERR_INTERNET_DISCONNECTED') ('Need server-side implementation') async def test_request_interception_amend_ pass async def test_request_interception_abort_main(self): (await self.page.setRequestInterception(True)) async def request_check(req): (await req.abort()) self.page.on('request', (lambda req: asyncio.ensure_future(request_check(req)))) with self.assertRaises(PageError) as cm: (await self.page.goto((self.url + 'empty'))) self.assertIn('net::ERR_FAILED', cm.exception.args[0]) async def test_request_interception_redirects(self): (await self.page.setRequestInterception(True)) requests = [] async def check(req): (await req.continue_()) requests.append(req) self.page.on('request', (lambda req: asyncio.ensure_future(check(req)))) response = (await self.page.goto((self.url + 'redirect1'))) self.assertEqual(response.status, 200) async def test_redirect_for_subresource(self): (await self.page.setRequestInterception(True)) requests = list() async def check(req): (await req.continue_()) requests.append(req) self.page.on('request', (lambda req: asyncio.ensure_future(check(req)))) response = (await self.page.goto((self.url + 'one-style.html'))) self.assertEqual(response.status, 200) self.assertIn('one-style.html', response.url) self.assertEqual(len(requests), 5) self.assertEqual(requests[0].resourceType, 'document') self.assertEqual(requests[1].resourceType, 'stylesheet') redirectChain = requests[1].redirectChain self.assertEqual(len(redirectChain), 3) self.assertIn('/one-style.css', redirectChain[0].url) self.assertIn('/three-style.css', redirectChain[2].url) ('This test is not implemented') async def test_request_interception_abort_redirects(self): pass ('This test is not implemented') async def test_request_interception_equal_requests(self): pass async def test_request_interception_data_url(self): (await self.page.setRequestInterception(True)) requests = [] async def check(req): requests.append(req) (await req.continue_()) self.page.on('request', (lambda req: asyncio.ensure_future(check(req)))) dataURL = 'data:text/html,<div>yo</div>' response = (await self.page.goto(dataURL)) self.assertEqual(response.status, 200) self.assertEqual(len(requests), 1) self.assertEqual(requests[0].url, dataURL) async def test_request_interception_abort_data_url(self): (await self.page.setRequestInterception(True)) async def request_check(req): (await req.abort()) self.page.on('request', (lambda req: asyncio.ensure_future(request_check(req)))) with self.assertRaises(PageError) as cm: (await self.page.goto('data:text/html,No way!')) self.assertIn('net::ERR_FAILED', cm.exception.args[0]) async def test_request_interception_with_hash(self): (await self.page.setRequestInterception(True)) requests = [] async def check(req): requests.append(req) (await req.continue_()) self.page.on('request', (lambda req: asyncio.ensure_future(check(req)))) response = (await self.page.goto((self.url + 'empty#hash'))) self.assertEqual(response.status, 200) self.assertEqual(response.url, (self.url + 'empty')) self.assertEqual(len(requests), 1) self.assertEqual(requests[0].url, (self.url + 'empty')) async def test_request_interception_encoded_server(self): (await self.page.setRequestInterception(True)) async def check(req): (await req.continue_()) self.page.on('request', (lambda req: asyncio.ensure_future(check(req)))) response = (await self.page.goto((self.url + 'non existing page'))) self.assertEqual(response.status, 404) ('Need server-side implementation') async def test_request_interception_badly_encoded_server(self): pass ('Need server-side implementation') async def test_request_interception_encoded_server_2(self): pass ('This test is not implemented') async def test_request_interception_invalid_interception_id(self): pass async def test_request_interception_disabled(self): error = None async def check(req): try: (await req.continue_()) except Exception as e: nonlocal error error = e self.page.on('request', (lambda req: asyncio.ensure_future(check(req)))) (await self.page.goto((self.url + 'empty'))) self.assertIsNotNone(error) self.assertIn('Request interception is not enabled', error.args[0]) async def test_request_interception_with_file_url(self): (await self.page.setRequestInterception(True)) urls = [] async def set_urls(req): urls.append(req.url.split('/').pop()) (await req.continue_()) self.page.on('request', (lambda req: asyncio.ensure_future(set_urls(req)))) def pathToFileURL(path: Path): pathName = str(path).replace('\\', '/') if (not pathName.startswith('/')): pathName = '/{}'.format(pathName) return 'file://{}'.format(pathName) target = ((Path(__file__).parent / 'static') / 'one-style.html') (await self.page.goto(pathToFileURL(target))) self.assertEqual(len(urls), 2) self.assertIn('one-style.html', urls) self.assertIn('one-style.css', urls) async def test_request_respond(self): (await self.page.setRequestInterception(True)) async def interception(req): (await req.respond({'status': 201, 'headers': {'foo': 'bar'}, 'body': 'intercepted'})) self.page.on('request', (lambda req: asyncio.ensure_future(interception(req)))) response = (await self.page.goto((self.url + 'empty'))) self.assertEqual(response.status, 201) self.assertEqual(response.headers['foo'], 'bar') body = (await self.page.evaluate('() => document.body.textContent')) self.assertEqual(body, 'intercepted') ('Sending binary object is not implemented') async def test_request_respond_bytes(self): pass
def _get_actual_assertions_names() -> list[str]: from unittest import TestCase as DefaultTestCase from django.test import TestCase as DjangoTestCase obj = DjangoTestCase('run') def is_assert(func) -> bool: return (func.startswith('assert') and ('_' not in func)) base_methods = [name for (name, member) in inspect.getmembers(DefaultTestCase) if is_assert(name)] return [name for (name, member) in inspect.getmembers(obj) if (is_assert(name) and (name not in base_methods))]
def test_sparse_vs_pad(): node_feats = torch.tensor([[1, 2, 3], [4, 5, 6], [7, 8, 9], [9, 10, 11], [11, 11.1, 12.4], [18, 11.1, 22.4], [24, 15.31, 18.4], [16, 10.1, 17.4]]) graph_ids = torch.tensor([0, 0, 0, 1, 1, 2, 2, 2]) edges = {'asingle': torch.tensor([[0, 1, 7, 6, 3, 4], [1, 2, 6, 5, 4, 3]]), 'bdouble': torch.tensor([[2, 1], [0, 0]])} graph = graph_as_adj_list.DirectedGraphAsAdjList(node_feats, edges, graph_ids) (atom_feats, adj_mat, *_) = graph.return_padded_repr() nodes_on = torch.any((atom_feats != 0), dim=(- 1)) net_params = ggnn_base.GGNNParams(3, ['asingle', 'bdouble'], 3) torch.manual_seed(43) ggnn_sparse_net = ggnn_sparse.GGNNSparse(net_params) out_sparse = ggnn_sparse_net(graph) (node_feats_out_sparse, *_) = out_sparse.return_padded_repr() torch.manual_seed(43) ggnn_padded_net = ggnn_pad.GGNNPad(net_params) node_feats_out_pad = ggnn_padded_net(atom_feats, adj_mat, nodes_on) np.testing.assert_array_almost_equal(node_feats_out_sparse.cpu().detach().numpy(), node_feats_out_pad.cpu().detach().numpy())
class PageRendererMixin(): def render_page(self, xml, page): if (page['uri'] not in self.uris): self.uris.add(page['uri']) xml.startElement('page', {'dc:uri': page['uri']}) self.render_text_element(xml, 'uri_prefix', {}, page['uri_prefix']) self.render_text_element(xml, 'uri_path', {}, page['uri_path']) self.render_text_element(xml, 'dc:comment', {}, page['comment']) if (page['attribute'] is None): self.render_text_element(xml, 'attribute', {}, None) else: self.render_text_element(xml, 'attribute', {'dc:uri': page['attribute']['uri']}, None) self.render_text_element(xml, 'is_collection', {}, page['is_collection']) for (lang_code, lang_string, lang_field) in get_languages(): self.render_text_element(xml, 'title', {'lang': lang_code}, page[('title_%s' % lang_code)]) self.render_text_element(xml, 'help', {'lang': lang_code}, page[('help_%s' % lang_code)]) self.render_text_element(xml, 'verbose_name', {'lang': lang_code}, page[('verbose_name_%s' % lang_code)]) xml.startElement('questionsets', {}) for page_questionset in page['page_questionsets']: self.render_text_element(xml, 'questionset', {'dc:uri': page_questionset['questionset']['uri'], 'order': str(page_questionset['order'])}, None) xml.endElement('questionsets') xml.startElement('questions', {}) for page_question in page['page_questions']: self.render_text_element(xml, 'question', {'dc:uri': page_question['question']['uri'], 'order': str(page_question['order'])}, None) xml.endElement('questions') xml.startElement('conditions', {}) for condition in page['conditions']: self.render_text_element(xml, 'condition', {'dc:uri': condition['uri']}, None) xml.endElement('conditions') xml.endElement('page') if self.context.get('attributes'): if (page['attribute'] is not None): self.render_attribute(xml, page['attribute']) if self.context.get('conditions'): for condition in page['conditions']: self.render_condition(xml, condition) if self.context.get('questionsets', True): for page_questionset in page['page_questionsets']: self.render_questionset(xml, page_questionset['questionset']) if self.context.get('questions', True): for page_question in page['page_questions']: self.render_question(xml, page_question['question'])
class TestBitrate(unittest.TestCase): def test_wav(self): actual = file_info.bitrate(INPUT_FILE) expected = 706000.0 self.assertEqual(expected, actual) def test_wav_pathlib(self): actual = file_info.bitrate(Path(INPUT_FILE)) expected = 706000.0 self.assertEqual(expected, actual) def test_aiff(self): actual = file_info.bitrate(INPUT_FILE2) expected = 768000.0 self.assertEqual(expected, actual) def test_empty(self): actual = file_info.bitrate(EMPTY_FILE) expected = None self.assertEqual(expected, actual)
class _BaseHeaderFooter(BlockItemContainer): def __init__(self, sectPr: CT_SectPr, document_part: DocumentPart, header_footer_index: WD_HEADER_FOOTER): self._sectPr = sectPr self._document_part = document_part self._hdrftr_index = header_footer_index def is_linked_to_previous(self) -> bool: return (not self._has_definition) _linked_to_previous.setter def is_linked_to_previous(self, value: bool) -> None: new_state = bool(value) if (new_state == self.is_linked_to_previous): return if (new_state is True): self._drop_definition() else: self._add_definition() def part(self) -> (HeaderPart | FooterPart): return self._get_or_add_definition() def _add_definition(self) -> (HeaderPart | FooterPart): raise NotImplementedError('must be implemented by each subclass') def _definition(self) -> (HeaderPart | FooterPart): raise NotImplementedError('must be implemented by each subclass') def _drop_definition(self) -> None: raise NotImplementedError('must be implemented by each subclass') def _element(self): return self._get_or_add_definition().element def _get_or_add_definition(self) -> (HeaderPart | FooterPart): if self._has_definition: return self._definition prior_headerfooter = self._prior_headerfooter if prior_headerfooter: return prior_headerfooter._get_or_add_definition() return self._add_definition() def _has_definition(self) -> bool: raise NotImplementedError('must be implemented by each subclass') def _prior_headerfooter(self) -> ((_Header | _Footer) | None): raise NotImplementedError('must be implemented by each subclass')
class ResBottleneckBlock(nn.Module): def __init__(self, w_in, w_out, stride, bn_norm, bm=1.0, gw=1, se_r=None): super(ResBottleneckBlock, self).__init__() self.construct(w_in, w_out, stride, bn_norm, bm, gw, se_r) def _add_skip_proj(self, w_in, w_out, stride, bn_norm): self.proj = nn.Conv2d(w_in, w_out, kernel_size=1, stride=stride, padding=0, bias=False) self.bn = get_norm(bn_norm, w_out) def construct(self, w_in, w_out, stride, bn_norm, bm, gw, se_r): self.proj_block = ((w_in != w_out) or (stride != 1)) if self.proj_block: self._add_skip_proj(w_in, w_out, stride, bn_norm) self.f = BottleneckTransform(w_in, w_out, stride, bn_norm, bm, gw, se_r) self.relu = nn.ReLU(regnet_cfg.MEM.RELU_INPLACE) def forward(self, x): if self.proj_block: x = (self.bn(self.proj(x)) + self.f(x)) else: x = (x + self.f(x)) x = self.relu(x) return x
class macros(): img_root = '${img_root}' base_img_root = '${base_img_root}' app_id = '${app_id}' replica_id = '${replica_id}' rank0_env = '${rank0_env}' class Values(): img_root: str app_id: str replica_id: str rank0_env: str base_img_root: str = 'DEPRECATED' def apply(self, role: 'Role') -> 'Role': role = copy.deepcopy(role) role.args = [self.substitute(arg) for arg in role.args] role.env = {key: self.substitute(arg) for (key, arg) in role.env.items()} return role def substitute(self, arg: str) -> str: return Template(arg).safe_substitute(**asdict(self))
def test_sieve_band(tmpdir, runner): outfile = str(tmpdir.join('out.tif')) result = runner.invoke(main_group, (['calc'] + ['(sieve (band 1 1) 42)', 'tests/data/shade.tif', outfile]), catch_exceptions=False) assert (result.exit_code == 0) with rasterio.open(outfile) as src: assert (src.count == 1) assert (src.meta['dtype'] == 'uint8')
class TestLiterals(unittest.TestCase): def test_format_str_literal(self) -> None: assert (format_str_literal('') == b'\x00') assert (format_str_literal('xyz') == b'\x03xyz') assert (format_str_literal(('x' * 127)) == (b'\x7f' + (b'x' * 127))) assert (format_str_literal(('x' * 128)) == (b'\x81\x00' + (b'x' * 128))) assert (format_str_literal(('x' * 131)) == (b'\x81\x03' + (b'x' * 131))) def test_encode_str_values(self) -> None: assert (_encode_str_values({}) == [b'']) assert (_encode_str_values({'foo': 0}) == [b'\x01\x03foo', b'']) assert (_encode_str_values({'foo': 0, 'b': 1}) == [b'\x02\x03foo\x01b', b'']) assert (_encode_str_values({'foo': 0, ('x' * 70): 1}) == [b'\x01\x03foo', (bytes([1, 70]) + (b'x' * 70)), b'']) assert (_encode_str_values({('y' * 100): 0}) == [(bytes([1, 100]) + (b'y' * 100)), b'']) def test_encode_bytes_values(self) -> None: assert (_encode_bytes_values({}) == [b'']) assert (_encode_bytes_values({b'foo': 0}) == [b'\x01\x03foo', b'']) assert (_encode_bytes_values({b'foo': 0, b'b': 1}) == [b'\x02\x03foo\x01b', b'']) assert (_encode_bytes_values({b'foo': 0, (b'x' * 70): 1}) == [b'\x01\x03foo', (bytes([1, 70]) + (b'x' * 70)), b'']) assert (_encode_bytes_values({(b'y' * 100): 0}) == [(bytes([1, 100]) + (b'y' * 100)), b'']) def test_encode_int_values(self) -> None: assert (_encode_int_values({}) == [b'']) assert (_encode_int_values({123: 0}) == [b'\x01123', b'']) assert (_encode_int_values({123: 0, 9: 1}) == [b'\x02123\x009', b'']) assert (_encode_int_values({123: 0, 45: 1, (5 * (10 ** 70)): 2}) == [b'\x02123\x0045', (b'\x015' + (b'0' * 70)), b'']) assert (_encode_int_values({(6 * (10 ** 100)): 0}) == [(b'\x016' + (b'0' * 100)), b'']) def test_simple_literal_index(self) -> None: lit = Literals() lit.record_literal(1) lit.record_literal('y') lit.record_literal(True) lit.record_literal(None) lit.record_literal(False) assert (lit.literal_index(None) == 0) assert (lit.literal_index(False) == 1) assert (lit.literal_index(True) == 2) assert (lit.literal_index('y') == 3) assert (lit.literal_index(1) == 4) def test_tuple_literal(self) -> None: lit = Literals() lit.record_literal((1, 'y', None, (b'a', 'b'))) lit.record_literal((b'a', 'b')) lit.record_literal(()) assert (lit.literal_index((b'a', 'b')) == 7) assert (lit.literal_index((1, 'y', None, (b'a', 'b'))) == 8) assert (lit.literal_index(()) == 9) print(lit.encoded_tuple_values()) assert (lit.encoded_tuple_values() == ['3', '2', '5', '4', '4', '6', '3', '0', '7', '0'])
def select_2(train_embs, one_test_emb, downstream_train_examples, one_test_example, tag, given_context, phase2_selection): cos = nn.CosineSimilarity(dim=1, eps=1e-06) if (not os.path.isdir(f'{args.output_dir}/{tag}/prompts')): os.makedirs(f'{args.output_dir}/{tag}/prompts', exist_ok=True) prompt_string = f'''{conversion(table_prompt)} ''' prev_prompt_string = f'''{conversion(table_prompt)} ''' if (phase2_selection in ['similar']): test_e_reshape = one_test_emb.reshape(1, (- 1)) scores = cos(test_e_reshape, train_embs).numpy() sorted_indices = np.argsort(scores) elif (phase2_selection in ['random']): sorted_indices = np.random.permutation(range(len(downstream_train_examples))) selected_indices = [] num_indices = len(sorted_indices) count = 1 for idx in range((num_indices - 1), (- 1), (- 1)): prev_prompt_string += get_instance(count, downstream_train_examples[sorted_indices[idx]]) cur_prompt_string = (prev_prompt_string + f'''Example #{(count + 1)} ''') last_slot_values = given_context cur_prompt_string += f'''[context] {conversion(', '.join({f'{slot}: {value}' for (slot, value) in last_slot_values.items()}))} ''' last_sys_utt = one_test_example['dialog']['sys'][(- 1)] if (last_sys_utt == 'none'): last_sys_utt = '' cur_prompt_string += f'''[system] {last_sys_utt} ''' cur_prompt_string += f'''Q: [user] {one_test_example['dialog']['usr'][(- 1)]} ''' cur_prompt_string += 'SQL: SELECT * FROM' length = len(tokenizer_for_length(cur_prompt_string)['input_ids']) if (length > 3800): break selected_indices.append(idx) count += 1 indices_scores = [] for idx in selected_indices: indices_scores.append([idx, cos(train_embs[sorted_indices[idx]].reshape(1, (- 1)), one_test_emb.reshape(1, (- 1))).item()]) indices_scores = sorted(indices_scores, key=(lambda x: x[1]), reverse=True) new_selected_indices = [x[0] for x in indices_scores] if (phase2_selection in ['similar']): assert (new_selected_indices == selected_indices), f'new_selected_indices={new_selected_indices}, selected_indices={selected_indices}' selected_indices = new_selected_indices select_num = len(selected_indices) count = 0 second_phase_selected_indices = [] for idx in range((select_num - 1), (- 1), (- 1)): prompt_string += get_instance(count, downstream_train_examples[sorted_indices[selected_indices[idx]]]) second_phase_selected_indices.append([sorted_indices[selected_indices[idx]].item(), downstream_train_examples[sorted_indices[selected_indices[idx]]]['id']]) count += 1 prompt_string += f'''Example #{count} ''' last_slot_values = given_context prompt_string += f'''[context] {conversion(', '.join({f'{slot}: {value}' for (slot, value) in last_slot_values.items()}))} ''' last_sys_utt = one_test_example['dialog']['sys'][(- 1)] if (last_sys_utt == 'none'): last_sys_utt = '' prompt_string += f'''[system] {last_sys_utt} ''' prompt_string += f'''Q: [user] {one_test_example['dialog']['usr'][(- 1)]} ''' prompt_string += 'SQL: SELECT * FROM' assert (len(tokenizer_for_length(prompt_string)['input_ids']) <= 3800) print('select_2, prompt example num: ', len(second_phase_selected_indices)) with open(f"{args.output_dir}/{tag}/prompts/{one_test_example['name'].replace('.', '')}_{one_test_example['id']}.json", 'w') as f: json.dump([[one_test_example['name'].replace('.', ''), one_test_example['id'], second_phase_selected_indices], prompt_string], f, indent=4) return prompt_string
class decoder(nn.Module): def __init__(self, dim, nc=1): super(decoder, self).__init__() self.dim = dim nf = 64 self.upc1 = nn.Sequential(nn.ConvTranspose2d(dim, (nf * 8), 4, 1, 0), nn.BatchNorm2d((nf * 8)), nn.LeakyReLU(0.2, inplace=True)) self.upc2 = dcgan_upconv(((nf * 8) * 2), (nf * 4)) self.upc3 = dcgan_upconv(((nf * 4) * 2), (nf * 2)) self.upc4 = dcgan_upconv(((nf * 2) * 2), nf) self.upc5 = nn.Sequential(nn.ConvTranspose2d((nf * 2), nc, 4, 2, 1), nn.Sigmoid()) def forward(self, input): (vec, skip) = input d1 = self.upc1(vec.view((- 1), self.dim, 1, 1)) d2 = self.upc2(torch.cat([d1, skip[3]], 1)) d3 = self.upc3(torch.cat([d2, skip[2]], 1)) d4 = self.upc4(torch.cat([d3, skip[1]], 1)) output = self.upc5(torch.cat([d4, skip[0]], 1)) return output
def test_connection_create_table(): conn = Connection(REGION) kwargs = {'read_capacity_units': 1, 'write_capacity_units': 1} with pytest.raises(ValueError): conn.create_table(TEST_TABLE_NAME, **kwargs) kwargs['attribute_definitions'] = [{'attribute_name': 'key1', 'attribute_type': 'S'}, {'attribute_name': 'key2', 'attribute_type': 'S'}] with pytest.raises(ValueError): conn.create_table(TEST_TABLE_NAME, **kwargs) kwargs['key_schema'] = [{'attribute_name': 'key1', 'key_type': 'hash'}, {'attribute_name': 'key2', 'key_type': 'range'}] params = {'TableName': TEST_TABLE_NAME, 'ProvisionedThroughput': {'WriteCapacityUnits': 1, 'ReadCapacityUnits': 1}, 'AttributeDefinitions': [{'AttributeType': 'S', 'AttributeName': 'key1'}, {'AttributeType': 'S', 'AttributeName': 'key2'}], 'KeySchema': [{'KeyType': 'HASH', 'AttributeName': 'key1'}, {'KeyType': 'RANGE', 'AttributeName': 'key2'}]} with patch(PATCH_METHOD) as req: req.side_effect = BotoCoreError with pytest.raises(TableError): conn.create_table(TEST_TABLE_NAME, **kwargs) with patch(PATCH_METHOD) as req: req.return_value = None conn.create_table(TEST_TABLE_NAME, **kwargs) assert (req.call_args[0][1] == params) kwargs['global_secondary_indexes'] = [{'index_name': 'alt-index', 'key_schema': [{'KeyType': 'HASH', 'AttributeName': 'AltKey'}], 'projection': {'ProjectionType': 'KEYS_ONLY'}, 'provisioned_throughput': {'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1}}] params['GlobalSecondaryIndexes'] = [{'IndexName': 'alt-index', 'Projection': {'ProjectionType': 'KEYS_ONLY'}, 'KeySchema': [{'AttributeName': 'AltKey', 'KeyType': 'HASH'}], 'ProvisionedThroughput': {'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1}}] with patch(PATCH_METHOD) as req: req.return_value = None conn.create_table(TEST_TABLE_NAME, **kwargs) assert (req.call_args[0][1]['GlobalSecondaryIndexes'][0]['KeySchema'][0]['KeyType'] == 'HASH') assert (req.call_args[0][1] == params) del kwargs['global_secondary_indexes'] del params['GlobalSecondaryIndexes'] kwargs['local_secondary_indexes'] = [{'index_name': 'alt-index', 'projection': {'ProjectionType': 'KEYS_ONLY'}, 'key_schema': [{'AttributeName': 'AltKey', 'KeyType': 'HASH'}], 'provisioned_throughput': {'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1}}] params['LocalSecondaryIndexes'] = [{'Projection': {'ProjectionType': 'KEYS_ONLY'}, 'KeySchema': [{'KeyType': 'HASH', 'AttributeName': 'AltKey'}], 'IndexName': 'alt-index'}] with patch(PATCH_METHOD) as req: req.return_value = None conn.create_table(TEST_TABLE_NAME, **kwargs) assert (req.call_args[0][1] == params) kwargs['stream_specification'] = {'stream_enabled': True, 'stream_view_type': 'NEW_IMAGE'} params['StreamSpecification'] = {'StreamEnabled': True, 'StreamViewType': 'NEW_IMAGE'} with patch(PATCH_METHOD) as req: req.return_value = None conn.create_table(TEST_TABLE_NAME, **kwargs) assert (req.call_args[0][1] == params) kwargs['billing_mode'] = PAY_PER_REQUEST_BILLING_MODE params['BillingMode'] = PAY_PER_REQUEST_BILLING_MODE del params['ProvisionedThroughput'] with patch(PATCH_METHOD) as req: req.return_value = None conn.create_table(TEST_TABLE_NAME, **kwargs) assert (req.call_args[0][1] == params)
def gen_basic_test(): return '\n\n csrr x1, mngr2proc, < 5\n nop\n nop\n nop\n nop\n nop\n nop\n nop\n nop\n addi x3, x1, 0x0004\n nop\n nop\n nop\n nop\n nop\n nop\n nop\n nop\n csrw proc2mngr, x3 > 9\n nop\n nop\n nop\n nop\n nop\n nop\n nop\n nop\n '
class W_ImpPropertyAccessor(W_ImpPropertyFunction): errorname = 'impersonator-property-accessor' _call_method([values.W_Object, default(values.W_Object)]) def call(self, obj, fail): v = lookup_property(obj, self.descriptor) if v: return v if v: return v if fail: return fail raise SchemeException('missing impersonator property')
.parametrize('access', [_access(actions=['pull', 'push']), _access(actions=['pull', '*']), _access(actions=['*', 'push']), _access(actions=['*']), _access(actions=['pull', '*', 'push'])]) def test_token_with_access(access, initialized_db): token = _token(_token_data(access=access)) identity = _parse_token(token) assert (identity.id == TEST_USER.username), ('should be %s, but was %s' % (TEST_USER.username, identity.id)) assert (len(identity.provides) == 1) role = list(identity.provides)[0][3] if ('*' in access[0]['actions']): assert (role == 'admin') elif ('push' in access[0]['actions']): assert (role == 'write') elif ('pull' in access[0]['actions']): assert (role == 'read')
class SearchEngineUrl(BaseType): def to_py(self, value: _StrUnset) -> _StrUnsetNone: self._basic_py_validation(value, str) if isinstance(value, usertypes.Unset): return value elif (not value): return None if (not re.search('{(|0|semiquoted|unquoted|quoted)}', value)): raise configexc.ValidationError(value, 'must contain "{}"') try: format_keys = {'quoted': '', 'unquoted': '', 'semiquoted': ''} value.format('', **format_keys) except (KeyError, IndexError): raise configexc.ValidationError(value, 'may not contain {...} (use {{ and }} for literal {/})') except ValueError as e: raise configexc.ValidationError(value, str(e)) return value
def test_widgetbox_start_opened(manager_nospawn, minimal_conf_noscreen): config = minimal_conf_noscreen tbox = TextBox(text='Text Box') widget_box = WidgetBox(widgets=[tbox], start_opened=True) config.screens = [libqtile.config.Screen(top=libqtile.bar.Bar([widget_box], 10))] manager_nospawn.start(config) topbar = manager_nospawn.c.bar['top'] widgets = [w['name'] for w in topbar.info()['widgets']] assert (widgets == ['widgetbox', 'textbox'])
def main(): args = parser.parse_args() sample = dict() net_input = dict() feature = get_feature(args.wav_path) target_dict = Dictionary.load(args.target_dict_path) model = load_model(args.w2v_path, target_dict) model[0].eval() generator = W2lViterbiDecoder(target_dict) net_input['source'] = feature.unsqueeze(0) padding_mask = torch.BoolTensor(net_input['source'].size(1)).fill_(False).unsqueeze(0) net_input['padding_mask'] = padding_mask sample['net_input'] = net_input with torch.no_grad(): hypo = generator.generate(model, sample, prefix_tokens=None) hyp_pieces = target_dict.string(hypo[0][0]['tokens'].int().cpu()) print(post_process(hyp_pieces, 'letter'))
class KnowValues(unittest.TestCase): def xtest_gamma(self): cell = make_primitive_cell(([17] * 3)) mf = pbcdft.RKS(cell) mf.xc = 'lda,vwn' e1 = mf.scf() self.assertAlmostEqual(e1, (- 10.), 8) def xtest_kpt_222(self): cell = make_primitive_cell(([17] * 3)) abs_kpts = cell.make_kpts([2, 2, 2], wrap_around=True) kmf = pbcdft.KRKS(cell, abs_kpts) kmf.xc = 'lda,vwn' e1 = kmf.scf() self.assertAlmostEqual(e1, (- 11.), 8) def test_kpt_vs_supercell(self): n = 11 nk = (3, 1, 1) assert all(((np.array(nk) % 2) == np.array([1, 1, 1]))) cell = make_primitive_cell(([n] * 3)) abs_kpts = cell.make_kpts(nk, wrap_around=True) kmf = pbcdft.KRKS(cell, abs_kpts) kmf.xc = 'lda,vwn' ekpt = kmf.scf() supcell = pyscf.pbc.tools.super_cell(cell, nk) supcell.build() mf = pbcdft.RKS(supcell) mf.xc = 'lda,vwn' esup = (mf.scf() / np.prod(nk)) self.assertAlmostEqual(ekpt, esup, 5)
(frozen=True) class AnnotatedTypesCheck(CustomCheck): def can_assign(self, value: Value, ctx: CanAssignContext) -> CanAssign: for subval in flatten_values(value): original_subval = subval if isinstance(subval, AnnotatedValue): if any((((ext == self) or self.is_compatible_metadata(ext)) for ext in subval.get_custom_check_of_type(AnnotatedTypesCheck))): continue subval = unannotate(subval) if isinstance(subval, AnyValue): continue if isinstance(subval, KnownValue): try: result = self.predicate(subval.val) except Exception as e: return CanAssignError(f'Failed to check {subval.val} against predicate {self}', children=[CanAssignError(repr(e))]) else: if (not result): return CanAssignError(f'Value {subval.val} does not match predicate {self}') else: can_assign = self.can_assign_non_literal(original_subval) if isinstance(can_assign, CanAssignError): return can_assign return {} def predicate(self, value: Any) -> bool: raise NotImplementedError def is_compatible_metadata(self, metadata: 'AnnotatedTypesCheck') -> bool: return False def can_assign_non_literal(self, value: Value) -> CanAssign: return CanAssignError(f'Cannot determine whether {value} fulfills predicate {self}')
class PathPlanner(): def __init__(self, app, planner_flag='rrt_dubins', show_planner=True): self.waypoints = MsgWaypoints() if (planner_flag == 'rrt_straight'): self.rrt_straight_line = RRTStraightLine(app=app, show_planner=show_planner) if (planner_flag == 'rrt_dubins'): self.rrt_dubins = RRTDubins(app=app, show_planner=show_planner) self._planner_flag = planner_flag def update(self, world_map, state, radius): print('planning...') if (self._planner_flag == 'simple_straight'): Va = 25 self.waypoints.type = 'fillet' self.waypoints.add(np.array([[0, 0, (- 100)]]).T, Va, np.inf, np.inf, 0, 0) self.waypoints.add(np.array([[1000, 0, (- 100)]]).T, Va, np.inf, np.inf, 0, 0) self.waypoints.add(np.array([[0, 1000, (- 100)]]).T, Va, np.inf, np.inf, 0, 0) self.waypoints.add(np.array([[1000, 1000, (- 100)]]).T, Va, np.inf, np.inf, 0, 0) elif (self._planner_flag == 'simple_dubins'): Va = 25 self.waypoints.type = 'dubins' self.waypoints.add(np.array([[0, 0, (- 100)]]).T, Va, np.radians(0), np.inf, 0, 0) self.waypoints.add(np.array([[1000, 0, (- 100)]]).T, Va, np.radians(45), np.inf, 0, 0) self.waypoints.add(np.array([[0, 1000, (- 100)]]).T, Va, np.radians(45), np.inf, 0, 0) self.waypoints.add(np.array([[1000, 1000, (- 100)]]).T, Va, np.radians((- 135)), np.inf, 0, 0) elif (self._planner_flag == 'rrt_straight'): desired_airspeed = 25 desired_altitude = 100 start_pose = np.array([[state.north], [state.east], [(- desired_altitude)]]) if (np.linalg.norm(start_pose[0:2]) < (world_map.city_width / 2)): end_pose = np.array([[world_map.city_width], [world_map.city_width], [(- desired_altitude)]]) else: end_pose = np.array([[0], [0], [(- desired_altitude)]]) self.waypoints = self.rrt_straight_line.update(start_pose, end_pose, desired_airspeed, world_map, radius) elif (self._planner_flag == 'rrt_dubins'): desired_airspeed = 25 desired_altitude = 100 start_pose = np.array([[state.north], [state.east], [(- desired_altitude)], [state.chi]]) if (np.linalg.norm(start_pose[0:2]) < (world_map.city_width / 2)): end_pose = np.array([[world_map.city_width], [world_map.city_width], [(- desired_altitude)], [state.chi]]) else: end_pose = np.array([[0], [0], [(- desired_altitude)], [state.chi]]) self.waypoints = self.rrt_dubins.update(start_pose, end_pose, desired_airspeed, world_map, radius) else: print('Error in Path Planner: Undefined planner type.') self.waypoints.plot_updated = False print('...done planning.') return self.waypoints
class Volume(WorldObject): def _wgpu_get_pick_info(self, pick_value): tex = self.geometry.grid if hasattr(tex, 'texture'): tex = tex.texture values = unpack_bitfield(pick_value, wobject_id=20, x=14, y=14, z=14) texcoords_encoded = (values['x'], values['y'], values['z']) size = tex.size (x, y, z) = [(((v / 16383) * s) - 0.5) for (v, s) in zip(texcoords_encoded, size)] (ix, iy, iz) = (int((x + 0.5)), int((y + 0.5)), int((z + 0.5))) return {'index': (ix, iy, iz), 'voxel_coord': ((x - ix), (y - iy), (z - iz))}
class TorchFM(nn.Module): def __init__(self, config): super().__init__() self.gpu = config.use_gpu self.n = config.FM_n self.k = config.FM_k self.V = nn.Parameter(torch.randn(self.n, self.k), requires_grad=True) self.lin = nn.Linear(self.n, 1) if self.gpu: self.V = self.V.cuda() self.lin = self.lin.cuda() def forward(self, x): out_1 = torch.matmul(x, self.V).pow(2).sum(1, keepdim=True) out_2 = torch.matmul(x.pow(2), self.V.pow(2)).sum(1, keepdim=True) out_inter = (0.5 * (out_1 - out_2)) out_lin = self.lin(x) out = (out_inter + out_lin) return out.squeeze((- 1))
def test_KanrenRelationSub_multiout(): class MyMultiOutOp(Op): def make_node(self, *inputs): outputs = [MyType()(), MyType()()] return Apply(self, list(inputs), outputs) def perform(self, node, inputs, outputs): outputs[0] = np.array(inputs[0]) outputs[1] = np.array(inputs[0]) x = MyVariable('x') y = MyVariable('y') multi_op = MyMultiOutOp() (o1, o2) = multi_op(x, y) fgraph = FunctionGraph([x, y], [o1], clone=False) def relation(in_lv, out_lv): return eq(in_lv, out_lv) res = KanrenRelationSub(relation).transform(fgraph, fgraph.outputs[0].owner) assert (res == [o1, o2])
class MarkedIntensityHomogenuosPoisson(Intensity): def __init__(self, dim=1): self.dim = dim self.lam = ([None] * dim) def initialize(self, lam, dim=1): self.lam[dim] = lam def getValue(self, t, inds): l = len(inds) inten = ([0] * l) for i in range(l): inten[i] = self.lam[i] return inten def getUpperBound(self, from_t, to_t, inds): l = len(inds) inten = ([0] * l) for i in range(l): inten[i] = self.lam[i] return inten
class TestCompositorNodeCopy(unittest.TestCase): def setUp(self): self.node = CompositorNode(MagicMock()) self.node.add_required_nodes([MagicMock(), MagicMock()]) self.node.add_optional_nodes([MagicMock()]) self.node_copy = self.node.copy() def test_node_data_is_copied(self): assert (self.node_copy.data is not self.node.data) def test_node_data_required_nodes_are_copies(self): for (req1, req2) in zip(self.node.required_nodes, self.node_copy.required_nodes): assert (req1 is not req2) def test_node_data_optional_nodes_are_copies(self): for (req1, req2) in zip(self.node.optional_nodes, self.node_copy.optional_nodes): assert (req1 is not req2)
class F31Handler(BaseHandler): version = F31 commandMap = {'auth': commands.authconfig.F28_Authconfig, 'authconfig': commands.authconfig.F28_Authconfig, 'authselect': commands.authselect.F28_Authselect, 'autopart': commands.autopart.F29_AutoPart, 'autostep': commands.autostep.FC3_AutoStep, 'bootloader': commands.bootloader.F29_Bootloader, 'btrfs': commands.btrfs.F23_BTRFS, 'cdrom': commands.cdrom.FC3_Cdrom, 'clearpart': commands.clearpart.F28_ClearPart, 'cmdline': commands.displaymode.F26_DisplayMode, 'device': commands.device.F24_Device, 'deviceprobe': commands.deviceprobe.F29_DeviceProbe, 'dmraid': commands.dmraid.F24_DmRaid, 'driverdisk': commands.driverdisk.F14_DriverDisk, 'module': commands.module.F31_Module, 'eula': commands.eula.F20_Eula, 'fcoe': commands.fcoe.F28_Fcoe, 'firewall': commands.firewall.F28_Firewall, 'firstboot': commands.firstboot.FC3_Firstboot, 'graphical': commands.displaymode.F26_DisplayMode, 'group': commands.group.F12_Group, 'halt': commands.reboot.F23_Reboot, 'harddrive': commands.harddrive.FC3_HardDrive, 'hmc': commands.hmc.F28_Hmc, 'ignoredisk': commands.ignoredisk.F29_IgnoreDisk, 'install': commands.install.F29_Install, 'iscsi': commands.iscsi.F17_Iscsi, 'iscsiname': commands.iscsiname.FC6_IscsiName, 'keyboard': commands.keyboard.F18_Keyboard, 'lang': commands.lang.F19_Lang, 'liveimg': commands.liveimg.F19_Liveimg, 'logging': commands.logging.FC6_Logging, 'logvol': commands.logvol.F29_LogVol, 'mediacheck': commands.mediacheck.FC4_MediaCheck, 'method': commands.method.F28_Method, 'mount': commands.mount.F27_Mount, 'multipath': commands.multipath.F24_MultiPath, 'network': commands.network.F27_Network, 'nfs': commands.nfs.FC6_NFS, 'nvdimm': commands.nvdimm.F28_Nvdimm, 'ostreesetup': commands.ostreesetup.F21_OSTreeSetup, 'part': commands.partition.F29_Partition, 'partition': commands.partition.F29_Partition, 'poweroff': commands.reboot.F23_Reboot, 'raid': commands.raid.F29_Raid, 'realm': commands.realm.F19_Realm, 'reboot': commands.reboot.F23_Reboot, 'repo': commands.repo.F30_Repo, 'reqpart': commands.reqpart.F23_ReqPart, 'rescue': commands.rescue.F10_Rescue, 'rootpw': commands.rootpw.F18_RootPw, 'selinux': commands.selinux.FC3_SELinux, 'services': commands.services.FC6_Services, 'shutdown': commands.reboot.F23_Reboot, 'skipx': commands.skipx.FC3_SkipX, 'snapshot': commands.snapshot.F26_Snapshot, 'sshpw': commands.sshpw.F24_SshPw, 'sshkey': commands.sshkey.F22_SshKey, 'text': commands.displaymode.F26_DisplayMode, 'timezone': commands.timezone.F25_Timezone, 'updates': commands.updates.F7_Updates, 'url': commands.url.F30_Url, 'user': commands.user.F24_User, 'vnc': commands.vnc.F9_Vnc, 'volgroup': commands.volgroup.F21_VolGroup, 'xconfig': commands.xconfig.F14_XConfig, 'zerombr': commands.zerombr.F9_ZeroMbr, 'zfcp': commands.zfcp.F14_ZFCP} dataMap = {'BTRFSData': commands.btrfs.F23_BTRFSData, 'DriverDiskData': commands.driverdisk.F14_DriverDiskData, 'DeviceData': commands.device.F8_DeviceData, 'DmRaidData': commands.dmraid.FC6_DmRaidData, 'ModuleData': commands.module.F31_ModuleData, 'FcoeData': commands.fcoe.F28_FcoeData, 'GroupData': commands.group.F12_GroupData, 'IscsiData': commands.iscsi.F17_IscsiData, 'LogVolData': commands.logvol.F29_LogVolData, 'MountData': commands.mount.F27_MountData, 'MultiPathData': commands.multipath.FC6_MultiPathData, 'NetworkData': commands.network.F27_NetworkData, 'NvdimmData': commands.nvdimm.F28_NvdimmData, 'PartData': commands.partition.F29_PartData, 'RaidData': commands.raid.F29_RaidData, 'RepoData': commands.repo.F30_RepoData, 'SnapshotData': commands.snapshot.F26_SnapshotData, 'SshPwData': commands.sshpw.F24_SshPwData, 'SshKeyData': commands.sshkey.F22_SshKeyData, 'UserData': commands.user.F19_UserData, 'VolGroupData': commands.volgroup.F21_VolGroupData, 'ZFCPData': commands.zfcp.F14_ZFCPData}
class TestEPICL1bReader(): def _setup_h5(self, setup_hdf5_file): from satpy.readers import load_reader test_reader = load_reader(self.reader_configs) loadables = test_reader.select_files_from_pathnames([setup_hdf5_file]) test_reader.create_filehandlers(loadables) return test_reader def setup_method(self): from satpy._config import config_search_paths self.yaml_file = 'epic_l1b_h5.yaml' self.filename_test = os.path.join(tempfile.gettempdir(), 'epic_1b__03.h5') self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) def test_times(self, setup_hdf5_file): from datetime import datetime test_reader = self._setup_h5(setup_hdf5_file) assert (test_reader.start_time == datetime(2015, 6, 13, 12, 0, 37)) assert (test_reader.end_time == datetime(2015, 6, 13, 12, 5, 1)) def test_counts_calibration(self, setup_hdf5_file): from satpy.tests.utils import make_dsq test_reader = self._setup_h5(setup_hdf5_file) ds = test_reader.load([make_dsq(name='B317', calibration='counts')]) np.testing.assert_allclose(ds['B317'].data, b317_data) def test_refl_calibration(self, setup_hdf5_file): from satpy.tests.utils import make_dsq test_reader = self._setup_h5(setup_hdf5_file) ds = test_reader.load([make_dsq(name='B317', calibration='reflectance')]) np.testing.assert_allclose(ds['B317'].data, ((b317_data * CALIB_COEFS['B317']) * 100.0), rtol=1e-05) def test_bad_calibration(self, setup_hdf5_file): from satpy.tests.utils import make_dsq test_reader = self._setup_h5(setup_hdf5_file) with pytest.raises(KeyError): test_reader.load([make_dsq(name='B317', calibration='potatoes')]) def test_load_ancillary(self, setup_hdf5_file): from satpy.tests.utils import make_dsq test_reader = self._setup_h5(setup_hdf5_file) ds = test_reader.load([make_dsq(name='solar_zenith_angle'), make_dsq(name='satellite_azimuth_angle'), make_dsq(name='latitude'), make_dsq(name='longitude'), make_dsq(name='earth_mask')]) np.testing.assert_allclose(ds['solar_zenith_angle'].data, sza_data) np.testing.assert_allclose(ds['satellite_azimuth_angle'].data, vaa_data) np.testing.assert_allclose(ds['latitude'].data, lat_data) np.testing.assert_allclose(ds['longitude'].data, lon_data) np.testing.assert_allclose(ds['earth_mask'].data, mas_data)
class ShellInfo_startupScript(QtWidgets.QVBoxLayout): DISABLE_SYSTEM_DEFAULT = (sys.platform == 'darwin') SYSTEM_VALUE = '$PYTHONSTARTUP' RUN_AFTER_GUI_TEXT = '# AFTER_GUI - code below runs after integrating the GUI\n' def __init__(self, parent): QtWidgets.QVBoxLayout.__init__(self) self._edit1 = QtWidgets.QLineEdit(parent) self._edit1.textEdited.connect(self.onEditChanged) if sys.platform.startswith('win'): self._edit1.setPlaceholderText('C:\\path\\to\\script.py') else: self._edit1.setPlaceholderText('/path/to/script.py') self._edit2 = QtWidgets.QTextEdit(parent) self._edit2.zoomOut(1) self._edit2.setMaximumHeight(80) self._edit2.setMinimumWidth(200) self._edit2.textChanged.connect(self.onEditChanged) self.setSpacing(1) self.addWidget(self._edit1) self.addWidget(self._edit2) t = translate('shell', 'Use system default') self._radio_system = QtWidgets.QRadioButton(t, parent) self._radio_system.toggled.connect(self.onCheckChanged) self.addWidget(self._radio_system) if self.DISABLE_SYSTEM_DEFAULT: self._radio_system.hide() t = translate('shell', 'File to run at startup') self._radio_file = QtWidgets.QRadioButton(t, parent) self._radio_file.toggled.connect(self.onCheckChanged) self.addWidget(self._radio_file) t = translate('shell', 'Code to run at startup') self._radio_code = QtWidgets.QRadioButton(t, parent) self._radio_code.toggled.connect(self.onCheckChanged) self.addWidget(self._radio_code) self._value = '' self._valueFile = '' self._valueCode = '\n' def onEditChanged(self): if self._radio_file.isChecked(): self._value = self._valueFile = self._edit1.text().strip() elif self._radio_code.isChecked(): self._value = self._valueCode = (self._edit2.toPlainText().strip() + '\n') def onCheckChanged(self, state): if self._radio_system.isChecked(): self.setWidgetText(self.SYSTEM_VALUE) elif self._radio_file.isChecked(): self.setWidgetText(self._valueFile) elif self._radio_code.isChecked(): self.setWidgetText(self._valueCode) def setTheText(self, value): self.setWidgetText(value, True) self._value = value def setWidgetText(self, value, init=False): self._value = value if ((value == self.SYSTEM_VALUE) and (not self.DISABLE_SYSTEM_DEFAULT)): if init: self._radio_system.setChecked(True) pp = os.environ.get('PYTHONSTARTUP', '').strip() if pp: value = ('$PYTHONSTARTUP: "%s"' % pp) else: value = '$PYTHONSTARTUP: None' self._edit1.setReadOnly(True) self._edit1.show() self._edit2.hide() self._edit1.setText(value) elif ('\n' not in value): if init: self._radio_file.setChecked(True) self._edit1.setReadOnly(False) self._edit1.show() self._edit2.hide() self._edit1.setText(value) else: if init: self._radio_code.setChecked(True) self._edit1.hide() self._edit2.show() if (not value.strip()): value = self.RUN_AFTER_GUI_TEXT self._edit2.setText(value) def getTheText(self): return self._value
class _Commenter(): def __init__(self, code): self.code = code self.lines = self.code.split('\n') self.lines.append('\n') self.origs = list(range((len(self.lines) + 1))) self.diffs = ([0] * (len(self.lines) + 1)) def comment(self, lineno): start = (_logical_start(self.lines, lineno, check_prev=True) - 1) end = self._get_stmt_end(start) indents = _get_line_indents(self.lines[start]) if (0 < start): last_lineno = self._last_non_blank((start - 1)) last_line = self.lines[last_lineno] if last_line.rstrip().endswith(':'): indents = (_get_line_indents(last_line) + 4) self._set(start, ((' ' * indents) + 'pass')) for line in range((start + 1), (end + 1)): self._set(line, self.lines[start]) self._fix_incomplete_try_blocks(lineno, indents) def transferred_offset(self, offset): lineno = self.code.count('\n', 0, offset) diff = sum(self.diffs[:lineno]) return (offset + diff) def _last_non_blank(self, start): while ((start > 0) and (self.lines[start].strip() == '')): start -= 1 return start def _get_block_end(self, lineno): end_line = lineno base_indents = _get_line_indents(self.lines[lineno]) for i in range((lineno + 1), len(self.lines)): if (_get_line_indents(self.lines[i]) >= base_indents): end_line = i else: break return end_line def _get_stmt_end(self, lineno): base_indents = _get_line_indents(self.lines[lineno]) for i in range((lineno + 1), len(self.lines)): if (_get_line_indents(self.lines[i]) <= base_indents): return (i - 1) return lineno def _fix_incomplete_try_blocks(self, lineno, indents): block_start = lineno last_indents = indents while (block_start > 0): block_start = (codeanalyze.get_block_start(ArrayLinesAdapter(self.lines), block_start) - 1) if self.lines[block_start].strip().startswith('try:'): indents = _get_line_indents(self.lines[block_start]) if (indents > last_indents): continue last_indents = indents block_end = self._find_matching_deindent(block_start) line = self.lines[block_end].strip() if (not (line.startswith('finally:') or line.startswith('except ') or line.startswith('except:'))): self._insert(block_end, ((' ' * indents) + 'finally:')) self._insert((block_end + 1), ((' ' * indents) + ' pass')) def _find_matching_deindent(self, line_number): indents = _get_line_indents(self.lines[line_number]) current_line = (line_number + 1) while (current_line < len(self.lines)): line = self.lines[current_line] if ((not line.strip().startswith('#')) and (not (line.strip() == ''))): if (_get_line_indents(self.lines[current_line]) <= indents): return current_line current_line += 1 return (len(self.lines) - 1) def _set(self, lineno, line): self.diffs[self.origs[lineno]] += (len(line) - len(self.lines[lineno])) self.lines[lineno] = line def _insert(self, lineno, line): self.diffs[self.origs[lineno]] += (len(line) + 1) self.origs.insert(lineno, self.origs[lineno]) self.lines.insert(lineno, line)
class Solution(object): def productExceptSelf(self, nums): ans = ([1] * len(nums)) for i in range(1, len(nums)): ans[i] = (ans[(i - 1)] * nums[(i - 1)]) right = 1 for i in range((len(nums) - 1), (- 1), (- 1)): ans[i] *= right right *= nums[i] return ans
class Effect5958(BaseEffect): type = 'passive' def handler(fit, ship, context, projectionRange, **kwargs): fit.modules.filteredItemBoost((lambda mod: mod.item.requiresSkill('Medium Hybrid Turret')), 'trackingSpeed', ship.getModifiedItemAttr('shipBonusGC'), skill='Gallente Cruiser', **kwargs)
def test_extras_conflicts_all_extras(tester: CommandTester, mocker: MockerFixture) -> None: assert isinstance(tester.command, InstallerCommand) mocker.patch.object(tester.command.installer, 'run', return_value=0) tester.execute('--extras foo --all-extras') assert (tester.status_code == 1) assert (tester.io.fetch_error() == 'You cannot specify explicit `--extras` while installing using `--all-extras`.\n')
class ScenarioWrapperParameter(Parameter): def __init__(self, model, scenario, parameters, **kwargs): super().__init__(model, **kwargs) if (scenario.size != len(parameters)): raise ValueError('The number of parameters must equal the size of the scenario.') self.scenario = scenario self.parameters = [] for p in parameters: self.children.add(p) self.parameters.append(p) self._scenario_index = None def setup(self): super().setup() self._scenario_index = self.model.scenarios.get_scenario_index(self.scenario) def value(self, ts, scenario_index): parameter = self.parameters[scenario_index.indices[self._scenario_index]] return parameter.get_value(scenario_index) def load(cls, model, data): scenario = model.scenarios[data.pop('scenario')] parameters = [load_parameter(model, p) for p in data.pop('parameters')] return cls(model, scenario, parameters, **data)
class Declaration(object): def __init__(self): self.declarator = None self.type = Type() self.storage = None def __repr__(self): d = {'declarator': self.declarator, 'type': self.type} if self.storage: d['storage'] = self.storage l = [('%s=%r' % (k, v)) for (k, v) in d.items()] return ('Declaration(%s)' % ', '.join(l))
class ScopeSorter(): def __init__(self, settings: Settings, items: List[Item], rel_marks: List[RelativeMark[Item]], dep_marks: List[RelativeMark[Item]], session_scope: bool=False) -> None: self.settings = settings self.items = items if session_scope: self.rel_marks = rel_marks self.dep_marks = dep_marks else: self.rel_marks = filter_marks(rel_marks, self.items) self.dep_marks = filter_marks(dep_marks, self.items) def sort_items(self) -> List[Item]: if (self.settings.group_scope.value < self.settings.scope.value): if (self.settings.scope == Scope.SESSION): sorted_list = self.sort_in_session_scope() else: sorted_list = self.sort_in_module_scope() else: sorted_list = self.sort_items_in_scope(self.items, Scope.SESSION).items return sorted_list def sort_in_session_scope(self) -> List[Item]: sorted_list = [] module_items = module_item_groups(self.items) if (self.settings.group_scope == Scope.CLASS): module_groups = self.sort_class_groups(module_items) else: module_groups = [self.sort_items_in_scope(item, Scope.MODULE) for item in module_items.values()] sorter = GroupSorter(Scope.MODULE, module_groups, self.rel_marks, self.dep_marks) for group in sorter.sorted_groups()[1]: sorted_list.extend(group.items) return sorted_list def sort_in_module_scope(self) -> List[Item]: sorted_list = [] class_items = class_item_groups(self.items) class_groups = [self.sort_items_in_scope(item, Scope.CLASS) for item in class_items.values()] sorter = GroupSorter(Scope.CLASS, class_groups, self.rel_marks, self.dep_marks) for group in sorter.sorted_groups()[1]: sorted_list.extend(group.items) return sorted_list def sort_class_groups(self, module_items: Dict[(str, List[Item])]) -> List[ItemGroup]: module_groups = [] for module_item in module_items.values(): class_items = class_item_groups(module_item) class_groups = [self.sort_items_in_scope(item, Scope.CLASS) for item in class_items.values()] module_group = ItemGroup() sorter = GroupSorter(Scope.CLASS, class_groups, self.rel_marks, self.dep_marks) (group_order, class_groups) = sorter.sorted_groups() module_group.extend(class_groups, group_order) module_groups.append(module_group) return module_groups def sort_items_in_scope(self, items: List[Item], scope: Scope) -> ItemGroup: item_list = ItemList(items, self.settings, scope, self.rel_marks, self.dep_marks) for item in items: item_list.collect_markers(item) sorted_list = item_list.sort_numbered_items() still_left = 0 length = item_list.number_of_rel_groups() while (length and (still_left != length)): still_left = length item_list.handle_rel_marks(sorted_list) item_list.handle_dep_marks(sorted_list) length = item_list.number_of_rel_groups() if length: item_list.print_unhandled_items() return ItemGroup(sorted_list, item_list.group_order())
class TestBoolOp(TestNameCheckVisitorBase): _passes() def test(self): def capybara(x): if x: cond = str(x) cond2 = True else: cond = None cond2 = None assert_is_value(cond, MultiValuedValue([TypedValue(str), KnownValue(None)])) assert_is_value(cond2, MultiValuedValue([KnownValue(True), KnownValue(None)])) assert_is_value((cond and 1), MultiValuedValue([TypedValue(str), KnownValue(None), KnownValue(1)]), skip_annotated=True) assert_is_value((cond2 and 1), MultiValuedValue([KnownValue(None), KnownValue(1)]), skip_annotated=True) assert_is_value((cond or 1), MultiValuedValue([TypedValue(str), KnownValue(1)]), skip_annotated=True) assert_is_value((cond2 or 1), MultiValuedValue([KnownValue(True), KnownValue(1)]), skip_annotated=True) def hutia(x=None): assert_is_value(x, (AnyValue(AnySource.unannotated) | KnownValue(None))) assert_is_value((x or 1), (AnyValue(AnySource.unannotated) | KnownValue(1)), skip_annotated=True) y = (x or 1) assert_is_value(y, (AnyValue(AnySource.unannotated) | KnownValue(1)), skip_annotated=True) assert_is_value(((True if x else False) or None), (KnownValue(True) | KnownValue(None)))
class Mark(object): def __init__(self, name, index, line, column, buffer, pointer): self.name = name self.index = index self.line = line self.column = column self.buffer = buffer self.pointer = pointer def get_snippet(self, indent=4, max_length=75): if (self.buffer is None): return None head = '' start = self.pointer while ((start > 0) and (self.buffer[(start - 1)] not in u'\x00\r\n\x85\u2028\u2029')): start -= 1 if ((self.pointer - start) > ((max_length / 2) - 1)): head = ' ... ' start += 5 break tail = '' end = self.pointer while ((end < len(self.buffer)) and (self.buffer[end] not in u'\x00\r\n\x85\u2028\u2029')): end += 1 if ((end - self.pointer) > ((max_length / 2) - 1)): tail = ' ... ' end -= 5 break snippet = self.buffer[start:end].encode('utf-8') return (((((((' ' * indent) + head) + snippet) + tail) + '\n') + (' ' * (((indent + self.pointer) - start) + len(head)))) + '^') def __str__(self): snippet = self.get_snippet() where = (' in "%s", line %d, column %d' % (self.name, (self.line + 1), (self.column + 1))) if (snippet is not None): where += (':\n' + snippet) return where
class Migration(migrations.Migration): dependencies = [('api', '0076_merge__1941')] operations = [migrations.AlterField(model_name='botsetting', name='data', field=models.JSONField(help_text='The actual settings of this setting.')), migrations.AlterField(model_name='deletedmessage', name='embeds', field=django.contrib.postgres.fields.ArrayField(base_field=models.JSONField(validators=[]), blank=True, help_text='Embeds attached to this message.', size=None))]
_criterion('latency_augmented_label_smoothed_cross_entropy') class LatencyAugmentedLabelSmoothedCrossEntropyCriterion(LabelSmoothedCrossEntropyCriterion): def __init__(self, args, task): super().__init__(args, task) self.eps = args.label_smoothing self.latency_weight_avg = args.latency_weight_avg self.latency_weight_avg_type = args.latency_weight_avg_type self.latency_weight_var = args.latency_weight_var self.latency_weight_var_type = args.latency_weight_var_type self.mass_preservation = args.mass_preservation self.average_method = args.average_method self.latency_train = LatencyTraining(self.latency_weight_avg, self.latency_weight_var, self.latency_weight_avg_type, self.latency_weight_var_type, self.mass_preservation, self.average_method) def add_args(parser): super(LatencyAugmentedLabelSmoothedCrossEntropyCriterion, LatencyAugmentedLabelSmoothedCrossEntropyCriterion).add_args(parser) parser.add_argument('--latency-weight-avg', default=0.0, type=float, metavar='D', help='Average loss weight') parser.add_argument('--latency-weight-var', default=0.0, type=float, metavar='D', help='Variance loss weight') parser.add_argument('--latency-weight-avg-type', default='differentiable_average_lagging', help='Statistics for Average loss type') parser.add_argument('--latency-weight-var-type', default='variance_delay', help='Statistics for variance loss type') parser.add_argument('--average-method', default='weighted_average', help='Average loss type') def compute_loss(self, model, net_output, sample, reduce=True): (loss, nll_loss) = super().compute_loss(model, net_output, sample, reduce) attn_list = [item['alpha'] for item in net_output[(- 1)]['attn_list']] target_padding_mask = model.get_targets(sample, net_output).eq(self.padding_idx) source_padding_mask = net_output[(- 1)].get('encoder_padding_mask', None) latency_loss = self.latency_train.loss(attn_list, source_padding_mask, target_padding_mask) loss += latency_loss return (loss, nll_loss)
def update_mopidy_config(output: str) -> None: if settings.DOCKER: return if (output == 'pulse'): if (storage.get('feed_cava') and shutil.which('cava')): output = 'cava' else: output = 'regular' spotify_username = storage.get('spotify_username') spotify_password = storage.get('spotify_password') spotify_client_id = storage.get('spotify_mopidy_client_id') spotify_client_secret = storage.get('spotify_mopidy_client_secret') soundcloud_auth_token = storage.get('soundcloud_auth_token') jamendo_client_id = storage.get('jamendo_client_id') subprocess.call(['sudo', '/usr/local/sbin/raveberry/update_mopidy_config', output, spotify_username, spotify_password, spotify_client_id, spotify_client_secret, soundcloud_auth_token, jamendo_client_id]) restart_mopidy()
def simulation_ordered_grouped_low_depth_terms_with_info(hamiltonian, input_ordering=None, external_potential_at_end=False): n_qubits = count_qubits(hamiltonian) hamiltonian = normal_ordered(hamiltonian) ordered_terms = [] ordered_indices = [] ordered_is_hopping_operator = [] try: input_ordering = list(input_ordering) except TypeError: input_ordering = list(range(n_qubits)) final_ordering = list(reversed(input_ordering)) parity = 0 while (input_ordering != final_ordering): results = stagger_with_info(hamiltonian, input_ordering, parity, external_potential_at_end) (terms_in_layer, indices_in_layer, is_hopping_operator_in_layer) = results ordered_terms.extend(terms_in_layer) ordered_indices.extend(indices_in_layer) ordered_is_hopping_operator.extend(is_hopping_operator_in_layer) parity = (1 - parity) if external_potential_at_end: terms_in_final_layer = [] indices_in_final_layer = [] is_hopping_operator_in_final_layer = [] for qubit in range(n_qubits): coeff = hamiltonian.terms.get(((qubit, 1), (qubit, 0)), 0.0) if coeff: terms_in_final_layer.append(FermionOperator(((qubit, 1), (qubit, 0)), coeff)) indices_in_final_layer.append(set((qubit,))) is_hopping_operator_in_final_layer.append(False) ordered_terms.extend(terms_in_final_layer) ordered_indices.extend(indices_in_final_layer) ordered_is_hopping_operator.extend(is_hopping_operator_in_final_layer) return (ordered_terms, ordered_indices, ordered_is_hopping_operator)
class AssetFinder(object): (engine=coerce_string_to_eng(require_exists=True)) def __init__(self, engine, future_chain_predicates=CHAIN_PREDICATES): self.engine = engine metadata = sa.MetaData(bind=engine) metadata.reflect(only=asset_db_table_names) for table_name in asset_db_table_names: setattr(self, table_name, metadata.tables[table_name]) check_version_info(engine, self.version_info, ASSET_DB_VERSION) self._asset_cache = {} self._asset_type_cache = {} self._caches = (self._asset_cache, self._asset_type_cache) self._future_chain_predicates = (future_chain_predicates if (future_chain_predicates is not None) else {}) self._ordered_contracts = {} self._asset_lifetimes = {} def exchange_info(self): es = sa.select(self.exchanges.c).execute().fetchall() return {name: ExchangeInfo(name, canonical_name, country_code) for (name, canonical_name, country_code) in es} def symbol_ownership_map(self): out = {} for mappings in self.symbol_ownership_maps_by_country_code.values(): for (key, ownership_periods) in mappings.items(): out.setdefault(key, []).extend(ownership_periods) return out def symbol_ownership_maps_by_country_code(self): sid_to_country_code = dict(sa.select((self.equities.c.sid, self.exchanges.c.country_code)).where((self.equities.c.exchange == self.exchanges.c.exchange)).execute().fetchall()) return build_grouped_ownership_map(table=self.equity_symbol_mappings, key_from_row=(lambda row: (row.company_symbol, row.share_class_symbol)), value_from_row=(lambda row: row.symbol), group_key=(lambda row: sid_to_country_code[row.sid])) def country_codes(self): return tuple(self.symbol_ownership_maps_by_country_code) def _fuzzify_symbol_ownership_map(ownership_map): fuzzy_mappings = {} for ((cs, scs), owners) in iteritems(ownership_map): fuzzy_owners = fuzzy_mappings.setdefault((cs + scs), []) fuzzy_owners.extend(owners) fuzzy_owners.sort() return fuzzy_mappings def fuzzy_symbol_ownership_map(self): return self._fuzzify_symbol_ownership_map(self.symbol_ownership_map) def fuzzy_symbol_ownership_maps_by_country_code(self): return valmap(self._fuzzify_symbol_ownership_map, self.symbol_ownership_maps_by_country_code) def equity_supplementary_map(self): return build_ownership_map(table=self.equity_supplementary_mappings, key_from_row=(lambda row: (row.field, row.value)), value_from_row=(lambda row: row.value)) def equity_supplementary_map_by_sid(self): return build_ownership_map(table=self.equity_supplementary_mappings, key_from_row=(lambda row: (row.field, row.sid)), value_from_row=(lambda row: row.value)) def lookup_asset_types(self, sids): found = {} missing = set() for sid in sids: try: found[sid] = self._asset_type_cache[sid] except KeyError: missing.add(sid) if (not missing): return found router_cols = self.asset_router.c for assets in group_into_chunks(missing): query = sa.select((router_cols.sid, router_cols.asset_type)).where(self.asset_router.c.sid.in_(map(int, assets))) for (sid, type_) in query.execute().fetchall(): missing.remove(sid) found[sid] = self._asset_type_cache[sid] = type_ for sid in missing: found[sid] = self._asset_type_cache[sid] = None return found def group_by_type(self, sids): return invert(self.lookup_asset_types(sids)) def retrieve_asset(self, sid, default_none=False): try: asset = self._asset_cache[sid] if ((asset is None) and (not default_none)): raise SidsNotFound(sids=[sid]) return asset except KeyError: return self.retrieve_all((sid,), default_none=default_none)[0] def retrieve_all(self, sids, default_none=False): sids = list(sids) (hits, missing, failures) = ({}, set(), []) for sid in sids: try: asset = self._asset_cache[sid] if ((not default_none) and (asset is None)): raise SidsNotFound(sids=[sid]) hits[sid] = asset except KeyError: missing.add(sid) if (not missing): return [hits[sid] for sid in sids] update_hits = hits.update type_to_assets = self.group_by_type(missing) failures = {failure: None for failure in type_to_assets.pop(None, ())} update_hits(failures) self._asset_cache.update(failures) if (failures and (not default_none)): raise SidsNotFound(sids=list(failures)) update_hits(self.retrieve_equities(type_to_assets.pop('equity', ()))) update_hits(self.retrieve_futures_contracts(type_to_assets.pop('future', ()))) if type_to_assets: raise AssertionError(('Found asset types: %s' % list(type_to_assets.keys()))) return [hits[sid] for sid in sids] def retrieve_equities(self, sids): return self._retrieve_assets(sids, self.equities, Equity) def _retrieve_equity(self, sid): return self.retrieve_equities((sid,))[sid] def retrieve_futures_contracts(self, sids): return self._retrieve_assets(sids, self.futures_contracts, Future) def _select_assets_by_sid(asset_tbl, sids): return sa.select([asset_tbl]).where(asset_tbl.c.sid.in_(map(int, sids))) def _select_asset_by_symbol(asset_tbl, symbol): return sa.select([asset_tbl]).where((asset_tbl.c.symbol == symbol)) def _select_most_recent_symbols_chunk(self, sid_group): cols = self.equity_symbol_mappings.c data_cols = ((cols.sid,) + tuple((cols[name] for name in symbol_columns))) to_select = (data_cols + (sa.func.max(cols.end_date),)) return sa.select(to_select).where(cols.sid.in_(map(int, sid_group))).group_by(cols.sid) def _lookup_most_recent_symbols(self, sids): return {row.sid: {c: row[c] for c in symbol_columns} for row in concat((self.engine.execute(self._select_most_recent_symbols_chunk(sid_group)).fetchall() for sid_group in partition_all(SQLITE_MAX_VARIABLE_NUMBER, sids)))} def _retrieve_asset_dicts(self, sids, asset_tbl, querying_equities): if (not sids): return if querying_equities: def mkdict(row, exchanges=self.exchange_info, symbols=self._lookup_most_recent_symbols(sids)): d = dict(row) d['exchange_info'] = exchanges[d.pop('exchange')] return merge(d, symbols.get(row['sid'], {})) else: def mkdict(row, exchanges=self.exchange_info): d = dict(row) d['exchange_info'] = exchanges[d.pop('exchange')] return d for assets in group_into_chunks(sids): query = self._select_assets_by_sid(asset_tbl, assets) for row in query.execute().fetchall(): (yield _convert_asset_timestamp_fields(mkdict(row))) def _retrieve_assets(self, sids, asset_tbl, asset_type): if (not sids): return {} cache = self._asset_cache hits = {} querying_equities = issubclass(asset_type, Equity) filter_kwargs = (_filter_equity_kwargs if querying_equities else _filter_future_kwargs) rows = self._retrieve_asset_dicts(sids, asset_tbl, querying_equities) for row in rows: sid = row['sid'] asset = asset_type(**filter_kwargs(row)) hits[sid] = cache[sid] = asset misses = tuple((set(sids) - viewkeys(hits))) if misses: if querying_equities: raise EquitiesNotFound(sids=misses) else: raise FutureContractsNotFound(sids=misses) return hits def _lookup_symbol_strict(self, ownership_map, multi_country, symbol, as_of_date): (company_symbol, share_class_symbol) = split_delimited_symbol(symbol) try: owners = ownership_map[(company_symbol, share_class_symbol)] assert owners, ('empty owners list for %r' % symbol) except KeyError: raise SymbolNotFound(symbol=symbol) if (not as_of_date): if (len(owners) == 1): return self.retrieve_asset(owners[0].sid) options = {self.retrieve_asset(owner.sid) for owner in owners} if multi_country: country_codes = map(attrgetter('country_code'), options) if (len(set(country_codes)) > 1): raise SameSymbolUsedAcrossCountries(symbol=symbol, options=dict(zip(country_codes, options))) raise MultipleSymbolsFound(symbol=symbol, options=options) options = [] country_codes = [] for (start, end, sid, _) in owners: if (start <= as_of_date < end): asset = self.retrieve_asset(sid) if (not multi_country): return asset else: options.append(asset) country_codes.append(asset.country_code) if (not options): raise SymbolNotFound(symbol=symbol) if (len(options) == 1): return options[0] raise SameSymbolUsedAcrossCountries(symbol=symbol, options=dict(zip(country_codes, options))) def _lookup_symbol_fuzzy(self, ownership_map, multi_country, symbol, as_of_date): symbol = symbol.upper() (company_symbol, share_class_symbol) = split_delimited_symbol(symbol) try: owners = ownership_map[(company_symbol + share_class_symbol)] assert owners, ('empty owners list for %r' % symbol) except KeyError: raise SymbolNotFound(symbol=symbol) if (not as_of_date): if (len(owners) == 1): return self.retrieve_asset(owners[0].sid) options = [] for (_, _, sid, sym) in owners: if (sym == symbol): options.append(self.retrieve_asset(sid)) if (len(options) == 1): return options[0] raise MultipleSymbolsFoundForFuzzySymbol(symbol=symbol, options=self.retrieve_all((owner.sid for owner in owners))) options = {} for (start, end, sid, sym) in owners: if (start <= as_of_date < end): options[sid] = sym if (not options): raise SymbolNotFound(symbol=symbol) sid_keys = list(options.keys()) if (len(options) == 1): return self.retrieve_asset(sid_keys[0]) exact_options = [] for (sid, sym) in options.items(): if ((company_symbol, share_class_symbol) == split_delimited_symbol(sym)): asset = self.retrieve_asset(sid) if (not multi_country): return asset else: exact_options.append(asset) if (len(exact_options) == 1): return exact_options[0] raise MultipleSymbolsFoundForFuzzySymbol(symbol=symbol, options=self.retrieve_all((owner.sid for owner in owners))) def _choose_fuzzy_symbol_ownership_map(self, country_code): if (country_code is None): return self.fuzzy_symbol_ownership_map return self.fuzzy_symbol_ownership_maps_by_country_code.get(country_code) def _choose_symbol_ownership_map(self, country_code): if (country_code is None): return self.symbol_ownership_map return self.symbol_ownership_maps_by_country_code.get(country_code) def lookup_symbol(self, symbol, as_of_date, fuzzy=False, country_code=None): if (symbol is None): raise TypeError(('Cannot lookup asset for symbol of None for as of date %s.' % as_of_date)) if fuzzy: f = self._lookup_symbol_fuzzy mapping = self._choose_fuzzy_symbol_ownership_map(country_code) else: f = self._lookup_symbol_strict mapping = self._choose_symbol_ownership_map(country_code) if (mapping is None): raise SymbolNotFound(symbol=symbol) return f(mapping, (country_code is None), symbol, as_of_date) def lookup_symbols(self, symbols, as_of_date, fuzzy=False, country_code=None): if (not symbols): return [] multi_country = (country_code is None) if fuzzy: f = self._lookup_symbol_fuzzy mapping = self._choose_fuzzy_symbol_ownership_map(country_code) else: f = self._lookup_symbol_strict mapping = self._choose_symbol_ownership_map(country_code) if (mapping is None): raise SymbolNotFound(symbol=symbols[0]) memo = {} out = [] append_output = out.append for sym in symbols: if (sym in memo): append_output(memo[sym]) else: equity = memo[sym] = f(mapping, multi_country, sym, as_of_date) append_output(equity) return out def lookup_future_symbol(self, symbol): data = self._select_asset_by_symbol(self.futures_contracts, symbol).execute().fetchone() if (not data): raise SymbolNotFound(symbol=symbol) return self.retrieve_asset(data['sid']) def lookup_by_supplementary_field(self, field_name, value, as_of_date): try: owners = self.equity_supplementary_map[(field_name, value)] assert owners, ('empty owners list for %r' % (field_name, value)) except KeyError: raise ValueNotFoundForField(field=field_name, value=value) if (not as_of_date): if (len(owners) > 1): raise MultipleValuesFoundForField(field=field_name, value=value, options=set(map(compose(self.retrieve_asset, attrgetter('sid')), owners))) return self.retrieve_asset(owners[0].sid) for (start, end, sid, _) in owners: if (start <= as_of_date < end): return self.retrieve_asset(sid) raise ValueNotFoundForField(field=field_name, value=value) def get_supplementary_field(self, sid, field_name, as_of_date): try: periods = self.equity_supplementary_map_by_sid[(field_name, sid)] assert periods, ('empty periods list for %r' % (field_name, sid)) except KeyError: raise NoValueForSid(field=field_name, sid=sid) if (not as_of_date): if (len(periods) > 1): raise MultipleValuesFoundForSid(field=field_name, sid=sid, options={p.value for p in periods}) return periods[0].value for (start, end, _, value) in periods: if (start <= as_of_date < end): return value raise NoValueForSid(field=field_name, sid=sid) def _get_contract_sids(self, root_symbol): fc_cols = self.futures_contracts.c return [r.sid for r in list(sa.select((fc_cols.sid,)).where(((fc_cols.root_symbol == root_symbol) & (fc_cols.start_date != pd.NaT.value))).order_by(fc_cols.sid).execute().fetchall())] def _get_root_symbol_exchange(self, root_symbol): fc_cols = self.futures_root_symbols.c fields = (fc_cols.exchange,) exchange = sa.select(fields).where((fc_cols.root_symbol == root_symbol)).execute().scalar() if (exchange is not None): return exchange else: raise SymbolNotFound(symbol=root_symbol) def get_ordered_contracts(self, root_symbol): try: return self._ordered_contracts[root_symbol] except KeyError: contract_sids = self._get_contract_sids(root_symbol) contracts = deque(self.retrieve_all(contract_sids)) chain_predicate = self._future_chain_predicates.get(root_symbol, None) oc = OrderedContracts(root_symbol, contracts, chain_predicate) self._ordered_contracts[root_symbol] = oc return oc def create_continuous_future(self, root_symbol, offset, roll_style, adjustment): if (adjustment not in ADJUSTMENT_STYLES): raise ValueError('Invalid adjustment style {!r}. Allowed adjustment styles are {}.'.format(adjustment, list(ADJUSTMENT_STYLES))) oc = self.get_ordered_contracts(root_symbol) exchange = self._get_root_symbol_exchange(root_symbol) sid = _encode_continuous_future_sid(root_symbol, offset, roll_style, None) mul_sid = _encode_continuous_future_sid(root_symbol, offset, roll_style, 'div') add_sid = _encode_continuous_future_sid(root_symbol, offset, roll_style, 'add') cf_template = partial(ContinuousFuture, root_symbol=root_symbol, offset=offset, roll_style=roll_style, start_date=oc.start_date, end_date=oc.end_date, exchange_info=self.exchange_info[exchange]) cf = cf_template(sid=sid) mul_cf = cf_template(sid=mul_sid, adjustment='mul') add_cf = cf_template(sid=add_sid, adjustment='add') self._asset_cache[cf.sid] = cf self._asset_cache[mul_cf.sid] = mul_cf self._asset_cache[add_cf.sid] = add_cf return {None: cf, 'mul': mul_cf, 'add': add_cf}[adjustment] def _make_sids(tblattr): def _(self): return tuple(map(itemgetter('sid'), sa.select((getattr(self, tblattr).c.sid,)).execute().fetchall())) return _ sids = property(_make_sids('asset_router'), doc='All the sids in the asset finder.') equities_sids = property(_make_sids('equities'), doc='All of the sids for equities in the asset finder.') futures_sids = property(_make_sids('futures_contracts'), doc='All of the sids for futures consracts in the asset finder.') del _make_sids def _lookup_generic_scalar(self, obj, as_of_date, country_code, matches, missing): result = self._lookup_generic_scalar_helper(obj, as_of_date, country_code) if (result is not None): matches.append(result) else: missing.append(obj) def _lookup_generic_scalar_helper(self, obj, as_of_date, country_code): if isinstance(obj, (Asset, ContinuousFuture)): return obj if isinstance(obj, Integral): try: return self.retrieve_asset(int(obj)) except SidsNotFound: return None if isinstance(obj, string_types): try: return self.lookup_symbol(symbol=obj, as_of_date=as_of_date, country_code=country_code) except SymbolNotFound: try: return self.lookup_future_symbol(obj) except SymbolNotFound: return None raise NotAssetConvertible(('Input was %s, not AssetConvertible.' % obj)) def lookup_generic(self, obj, as_of_date, country_code): matches = [] missing = [] if isinstance(obj, (AssetConvertible, ContinuousFuture)): self._lookup_generic_scalar(obj=obj, as_of_date=as_of_date, country_code=country_code, matches=matches, missing=missing) try: return (matches[0], missing) except IndexError: if hasattr(obj, '__int__'): raise SidsNotFound(sids=[obj]) else: raise SymbolNotFound(symbol=obj) try: iterator = iter(obj) except TypeError: raise NotAssetConvertible('Input was not a AssetConvertible or iterable of AssetConvertible.') for obj in iterator: self._lookup_generic_scalar(obj=obj, as_of_date=as_of_date, country_code=country_code, matches=matches, missing=missing) return (matches, missing) def _compute_asset_lifetimes(self, country_codes): sids = starts = ends = [] equities_cols = self.equities.c if country_codes: results = sa.select((equities_cols.sid, equities_cols.start_date, equities_cols.end_date)).where(((self.exchanges.c.exchange == equities_cols.exchange) & self.exchanges.c.country_code.in_(country_codes))).execute().fetchall() if results: (sids, starts, ends) = zip(*results) sid = np.array(sids, dtype='i8') start = np.array(starts, dtype='f8') end = np.array(ends, dtype='f8') start[np.isnan(start)] = 0 end[np.isnan(end)] = np.iinfo(int).max return Lifetimes(sid, start.astype('i8'), end.astype('i8')) def lifetimes(self, dates, include_start_date, country_codes): if isinstance(country_codes, string_types): raise TypeError('Got string {!r} instead of an iterable of strings in AssetFinder.lifetimes.'.format(country_codes)) country_codes = frozenset(country_codes) lifetimes = self._asset_lifetimes.get(country_codes) if (lifetimes is None): self._asset_lifetimes[country_codes] = lifetimes = self._compute_asset_lifetimes(country_codes) raw_dates = as_column(dates.asi8) if include_start_date: mask = (lifetimes.start <= raw_dates) else: mask = (lifetimes.start < raw_dates) mask &= (raw_dates <= lifetimes.end) return pd.DataFrame(mask, index=dates, columns=lifetimes.sid) def equities_sids_for_country_code(self, country_code): sids = self._compute_asset_lifetimes([country_code]).sid return tuple(sids.tolist())
def _projects_params(q: Optional[str]=None, id: Optional[MultiInt]=None, not_id: Optional[MultiInt]=None, lat: Optional[float]=None, lng: Optional[float]=None, radius: int=500, featured: Optional[bool]=None, noteworthy: Optional[bool]=None, place_id: Optional[MultiInt]=None, site_id: Optional[int]=None, rule_details: Optional[bool]=None, type: Optional[MultiStr]=None, member_id: Optional[int]=None, has_params: Optional[bool]=None, has_posts: Optional[bool]=None):
def resnet_v2_152(inputs, num_classes=None, is_training=True, global_pool=True, output_stride=None, reuse=None, scope='resnet_v2_152'): blocks = [resnet_utils.Block('block1', bottleneck, (([(256, 64, 1)] * 2) + [(256, 64, 2)])), resnet_utils.Block('block2', bottleneck, (([(512, 128, 1)] * 7) + [(512, 128, 2)])), resnet_utils.Block('block3', bottleneck, (([(1024, 256, 1)] * 35) + [(1024, 256, 2)])), resnet_utils.Block('block4', bottleneck, ([(2048, 512, 1)] * 3))] return resnet_v2(inputs, blocks, num_classes, is_training=is_training, global_pool=global_pool, output_stride=output_stride, include_root_block=True, reuse=reuse, scope=scope)
def write_fst(lexicon, silprobs, sil_phone, sil_disambig, nonterminals=None, left_context_phones=None): (silbeginprob, silendcorrection, nonsilendcorrection, siloverallprob) = silprobs initial_sil_cost = (- math.log(silbeginprob)) initial_non_sil_cost = (- math.log((1.0 - silbeginprob))) sil_end_correction_cost = (- math.log(silendcorrection)) non_sil_end_correction_cost = (- math.log(nonsilendcorrection)) start_state = 0 non_sil_state = 1 sil_state = 2 next_state = 3 print('{src}\t{dest}\t{phone}\t{word}\t{cost}'.format(src=start_state, dest=non_sil_state, phone=sil_disambig, word='<eps>', cost=initial_non_sil_cost)) print('{src}\t{dest}\t{phone}\t{word}\t{cost}'.format(src=start_state, dest=sil_state, phone=sil_phone, word='<eps>', cost=initial_sil_cost)) for (word, pronprob, wordsilprob, silwordcorrection, nonsilwordcorrection, pron) in lexicon: pron_cost = (- math.log(pronprob)) word_to_sil_cost = (- math.log(wordsilprob)) word_to_non_sil_cost = (- math.log((1.0 - wordsilprob))) sil_to_word_cost = (- math.log(silwordcorrection)) non_sil_to_word_cost = (- math.log(nonsilwordcorrection)) if (len(pron) == 0): pron = ['<eps>'] new_state = next_state next_state += 1 print('{src}\t{dest}\t{phone}\t{word}\t{cost}'.format(src=non_sil_state, dest=new_state, phone=pron[0], word=word, cost=(pron_cost + non_sil_to_word_cost))) print('{src}\t{dest}\t{phone}\t{word}\t{cost}'.format(src=sil_state, dest=new_state, phone=pron[0], word=word, cost=(pron_cost + sil_to_word_cost))) cur_state = new_state for i in range(1, len(pron)): new_state = next_state next_state += 1 print('{src}\t{dest}\t{phone}\t<eps>'.format(src=cur_state, dest=new_state, phone=pron[i])) cur_state = new_state print('{src}\t{dest}\t{phone}\t{word}\t{cost}'.format(src=cur_state, dest=non_sil_state, phone=sil_disambig, word='<eps>', cost=word_to_non_sil_cost)) print('{src}\t{dest}\t{phone}\t{word}\t{cost}'.format(src=cur_state, dest=sil_state, phone=sil_phone, word='<eps>', cost=word_to_sil_cost)) if (nonterminals is not None): next_state = write_nonterminal_arcs(start_state, sil_state, non_sil_state, next_state, sil_phone, nonterminals, left_context_phones) print('{src}\t{cost}'.format(src=sil_state, cost=sil_end_correction_cost)) print('{src}\t{cost}'.format(src=non_sil_state, cost=non_sil_end_correction_cost))
def is_custom_op_loaded(): flag = False try: from ..tensorrt import is_tensorrt_plugin_loaded flag = is_tensorrt_plugin_loaded() except (ImportError, ModuleNotFoundError): pass if (not flag): try: from ..ops import get_onnxruntime_op_path ort_lib_path = get_onnxruntime_op_path() flag = os.path.exists(ort_lib_path) except (ImportError, ModuleNotFoundError): pass return (flag or (torch.__version__ == 'parrots'))
class XceptionFinalBlock(nn.Module): def __init__(self): super(XceptionFinalBlock, self).__init__() self.conv1 = dws_conv3x3_block(in_channels=1024, out_channels=1536, activate=False) self.conv2 = dws_conv3x3_block(in_channels=1536, out_channels=2048, activate=True) self.activ = nn.ReLU(inplace=True) self.pool = nn.AvgPool2d(kernel_size=10, stride=1) def forward(self, x): x = self.conv1(x) x = self.conv2(x) x = self.activ(x) x = self.pool(x) return x
def mn_decode(wlist): out = '' for i in range((len(wlist) // 3)): (word1, word2, word3) = wlist[(3 * i):((3 * i) + 3)] w1 = wordlist.index(word1) w2 = (wordlist.index(word2) % n) w3 = (wordlist.index(word3) % n) x = ((w1 + (n * ((w2 - w1) % n))) + ((n * n) * ((w3 - w2) % n))) out += ('%08x' % x) return out
def test_complete_multiline_on_single_line(cmd2_app): text = '' line = 'test_multiline {}'.format(text) endidx = len(line) begidx = (endidx - len(text)) expected = sorted(sport_item_strs, key=cmd2_app.default_sort_key) first_match = complete_tester(text, line, begidx, endidx, cmd2_app) assert ((first_match is not None) and (cmd2_app.completion_matches == expected))
.cypress def test_cypress(): pymedphys.zip_data_paths('metersetmap-gui-e2e-data.zip', extract_directory=utilities.HERE) pymedphys.zip_data_paths('dummy-ct-and-struct.zip', extract_directory=utilities.HERE.joinpath('cypress', 'fixtures')) utilities.run_test_commands_with_gui_process(['yarn', 'yarn cypress run'])
def read_yaml(config_name, config_path): if (config_name and config_path): with open(config_path, 'r', encoding='utf-8') as f: conf = yaml.safe_load(f.read()) if (config_name in conf.keys()): return conf[config_name.upper()] else: raise KeyError('') else: raise ValueError('')
def test_version_writes_github_actions_output(repo_with_git_flow_angular_commits, cli_runner, monkeypatch, tmp_path): mock_output_file = (tmp_path / 'action.out') monkeypatch.setenv('GITHUB_OUTPUT', str(mock_output_file.resolve())) result = cli_runner.invoke(main, [version.name, '--patch', '--no-push']) assert (result.exit_code == 0) action_outputs = actions_output_to_dict(mock_output_file.read_text(encoding='utf-8')) assert ('released' in action_outputs) assert (action_outputs['released'] == 'true') assert ('version' in action_outputs) assert (action_outputs['version'] == '1.2.1') assert ('tag' in action_outputs) assert (action_outputs['tag'] == 'v1.2.1')
class ExceptionHandlerWidget(QtWidgets.QGroupBox): sigStackItemClicked = QtCore.Signal(object, object) sigStackItemDblClicked = QtCore.Signal(object, object) _threadException = QtCore.Signal(object) def __init__(self, parent=None): super().__init__(parent) self._setupUi() self.filterString = '' self._inSystrace = False self._threadException.connect(self._threadExceptionHandler) def _setupUi(self): self.setTitle('Exception Handling') self.layout = QtWidgets.QGridLayout(self) self.layout.setContentsMargins(0, 0, 0, 0) self.layout.setHorizontalSpacing(2) self.layout.setVerticalSpacing(0) self.clearExceptionBtn = QtWidgets.QPushButton('Clear Stack', self) self.clearExceptionBtn.setEnabled(False) self.layout.addWidget(self.clearExceptionBtn, 0, 6, 1, 1) self.catchAllExceptionsBtn = QtWidgets.QPushButton('Show All Exceptions', self) self.catchAllExceptionsBtn.setCheckable(True) self.layout.addWidget(self.catchAllExceptionsBtn, 0, 1, 1, 1) self.catchNextExceptionBtn = QtWidgets.QPushButton('Show Next Exception', self) self.catchNextExceptionBtn.setCheckable(True) self.layout.addWidget(self.catchNextExceptionBtn, 0, 0, 1, 1) self.onlyUncaughtCheck = QtWidgets.QCheckBox('Only Uncaught Exceptions', self) self.onlyUncaughtCheck.setChecked(True) self.layout.addWidget(self.onlyUncaughtCheck, 0, 4, 1, 1) self.stackTree = StackWidget(self) self.layout.addWidget(self.stackTree, 2, 0, 1, 7) self.runSelectedFrameCheck = QtWidgets.QCheckBox('Run commands in selected stack frame', self) self.runSelectedFrameCheck.setChecked(True) self.layout.addWidget(self.runSelectedFrameCheck, 3, 0, 1, 7) spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Policy.Expanding, QtWidgets.QSizePolicy.Policy.Minimum) self.layout.addItem(spacerItem, 0, 5, 1, 1) self.filterLabel = QtWidgets.QLabel('Filter (regex):', self) self.layout.addWidget(self.filterLabel, 0, 2, 1, 1) self.filterText = QtWidgets.QLineEdit(self) self.layout.addWidget(self.filterText, 0, 3, 1, 1) self.catchAllExceptionsBtn.toggled.connect(self.catchAllExceptions) self.catchNextExceptionBtn.toggled.connect(self.catchNextException) self.clearExceptionBtn.clicked.connect(self.clearExceptionClicked) self.stackTree.itemClicked.connect(self.stackItemClicked) self.stackTree.itemDoubleClicked.connect(self.stackItemDblClicked) self.onlyUncaughtCheck.toggled.connect(self.updateSysTrace) self.filterText.textChanged.connect(self._filterTextChanged) def setStack(self, frame=None): self.clearExceptionBtn.setEnabled(True) self.stackTree.setStack(frame) def setException(self, exc=None, lastFrame=None): self.clearExceptionBtn.setEnabled(True) self.stackTree.setException(exc, lastFrame=lastFrame) def selectedFrame(self): return self.stackTree.selectedFrame() def catchAllExceptions(self, catch=True): with SignalBlock(self.catchAllExceptionsBtn.toggled, self.catchAllExceptions): self.catchAllExceptionsBtn.setChecked(catch) if catch: with SignalBlock(self.catchNextExceptionBtn.toggled, self.catchNextException): self.catchNextExceptionBtn.setChecked(False) self.enableExceptionHandling() else: self.disableExceptionHandling() def catchNextException(self, catch=True): with SignalBlock(self.catchNextExceptionBtn.toggled, self.catchNextException): self.catchNextExceptionBtn.setChecked(catch) if catch: with SignalBlock(self.catchAllExceptionsBtn.toggled, self.catchAllExceptions): self.catchAllExceptionsBtn.setChecked(False) self.enableExceptionHandling() else: self.disableExceptionHandling() def enableExceptionHandling(self): exceptionHandling.registerCallback(self.exceptionHandler) self.updateSysTrace() def disableExceptionHandling(self): exceptionHandling.unregisterCallback(self.exceptionHandler) self.updateSysTrace() def clearExceptionClicked(self): self.stackTree.clear() self.clearExceptionBtn.setEnabled(False) def updateSysTrace(self): if ((not self.catchNextExceptionBtn.isChecked()) and (not self.catchAllExceptionsBtn.isChecked())): if (sys.gettrace() == self.systrace): self._disableSysTrace() return if self.onlyUncaughtCheck.isChecked(): if (sys.gettrace() == self.systrace): self._disableSysTrace() elif (sys.gettrace() not in (None, self.systrace)): self.onlyUncaughtCheck.setChecked(False) raise Exception('sys.settrace is in use (are you using another debugger?); cannot monitor for caught exceptions.') else: self._enableSysTrace() def _enableSysTrace(self): sys.settrace(self.systrace) threading.settrace(self.systrace) if hasattr(threading, 'settrace_all_threads'): threading.settrace_all_threads(self.systrace) def _disableSysTrace(self): sys.settrace(None) threading.settrace(None) if hasattr(threading, 'settrace_all_threads'): threading.settrace_all_threads(None) def exceptionHandler(self, excInfo, lastFrame=None): if isinstance(excInfo, Exception): exc = excInfo else: exc = excInfo.exc_value isGuiThread = (QtCore.QThread.currentThread() == QtCore.QCoreApplication.instance().thread()) if (not isGuiThread): self._threadException.emit((excInfo, lastFrame)) return if self.catchNextExceptionBtn.isChecked(): self.catchNextExceptionBtn.setChecked(False) elif (not self.catchAllExceptionsBtn.isChecked()): return self.setException(exc, lastFrame=lastFrame) def _threadExceptionHandler(self, args): self.exceptionHandler(*args) def systrace(self, frame, event, arg): if (event != 'exception'): return self.systrace if self._inSystrace: return self.systrace self._inSystrace = True try: if self.checkException(*arg): self.exceptionHandler(arg[1], lastFrame=frame) except Exception as exc: print('Exception in systrace:') traceback.print_exc() finally: self.inSystrace = False return self.systrace def checkException(self, excType, exc, tb): filename = tb.tb_frame.f_code.co_filename function = tb.tb_frame.f_code.co_name filterStr = self.filterString if (filterStr != ''): if isinstance(exc, Exception): msg = traceback.format_exception_only(type(exc), exc) elif isinstance(exc, str): msg = exc else: msg = repr(exc) match = re.search(filterStr, ('%s:%s:%s' % (filename, function, msg))) return (match is not None) if ((excType is GeneratorExit) or (excType is StopIteration)): return False if (excType is AttributeError): if (filename.endswith('numpy/core/fromnumeric.py') and (function in ('all', '_wrapit', 'transpose', 'sum'))): return False if (filename.endswith('numpy/core/arrayprint.py') and (function in '_array2string')): return False if (filename.endswith('MetaArray.py') and (function == '__getattr__')): for name in ('__array_interface__', '__array_struct__', '__array__'): if (name in exc): return False if filename.endswith('flowchart/eq.py'): return False if (excType is TypeError): if (filename.endswith('numpy/lib/function_base.py') and (function == 'iterable')): return False return True def stackItemClicked(self, item): self.sigStackItemClicked.emit(self, item) def stackItemDblClicked(self, item): self.sigStackItemDblClicked.emit(self, item) def _filterTextChanged(self, value): self.filterString = str(value)
def get_activations(files, model, batch_size=50, dims=2048, cuda=False, verbose=False): model.eval() if ((len(files) % batch_size) != 0): print('Warning: number of images is not a multiple of the batch size. Some samples are going to be ignored.') if (batch_size > len(files)): print('Warning: batch size is bigger than the data size. Setting batch size to data size') batch_size = len(files) n_batches = (len(files) // batch_size) n_used_imgs = (n_batches * batch_size) pred_arr = np.empty((n_used_imgs, dims)) for i in tqdm(range(n_batches)): if verbose: print(('\rPropagating batch %d/%d' % ((i + 1), n_batches)), end='', flush=True) start = (i * batch_size) end = (start + batch_size) images = np.array([imread(str(f)).astype(np.float32) for f in files[start:end]]) images = images.transpose((0, 3, 1, 2)) images /= 255 batch = torch.from_numpy(images).type(torch.FloatTensor) if cuda: batch = batch.cuda() pred = model(batch)[0] if ((pred.shape[2] != 1) or (pred.shape[3] != 1)): pred = adaptive_avg_pool2d(pred, output_size=(1, 1)) pred_arr[start:end] = pred.cpu().data.numpy().reshape(batch_size, (- 1)) if verbose: print(' done') return pred_arr
class BaseGlm(BaseEstimator): def __init__(self, loss='lin_reg', penalty=None, constraint=None, standardize=True, fit_intercept=True, solver='default', lla=True, initializer='default', relaxed=False, inferencer=None): pass def _estimator_type(self): loss_config = get_base_config(get_loss_config(self.loss)) return loss_config._estimator_type def _is_tuner(self): raise NotImplementedError('Subclass should overwrite') def _validate_data(self, X, y, sample_weight=None, offsets=None, accept_sparse=True): X = check_array(X, accept_sparse=accept_sparse, dtype=FLOAT_DTYPES) if (sample_weight is not None): sample_weight = _check_sample_weight(sample_weight, X, dtype=X.dtype) offsets = _check_offsets(offsets, X=X, dtype=X.dtype, force_not_none=False, force_vector=False) y = _check_y(y, multi_output=True, y_numeric=False) if ((y.ndim == 2) and (y.shape[1] == 1)): y = y.reshape((- 1)) if (y.shape[0] != X.shape[0]): raise ValueError('X and y must have the same number of rows!') return (X, y, sample_weight, offsets) def preprocess(self, X, y, sample_weight=None, offsets=None, copy=True, check_input=True): if (sample_weight is not None): if copy: sample_weight = sample_weight.copy() if (offsets is not None): if hasattr(offsets, 'copy'): offsets = offsets.copy() else: offsets = deepcopy(offsets) (X, out) = process_X(X, standardize=self.standardize, fit_intercept=self.fit_intercept, sample_weight=sample_weight, copy=copy, check_input=check_input, accept_sparse=True) (y, sample_weight, y_out) = self._process_y(X=X, y=y, sample_weight=sample_weight, copy=copy) out.update(y_out) pro_data = {'X': X, 'y': y, 'sample_weight': sample_weight, 'offsets': offsets} return (pro_data, out) def get_unflavored_tunable(self): raise NotImplementedError('Subclass should overwrite') def get_initializer(self, X, y, pre_pro_out, sample_weight=None, offsets=None): flavor_kind = get_flavor_kind(self.penalty) if (flavor_kind is not None): if is_str_and_matches(self.initializer, 'default'): if ((flavor_kind == 'adaptive') or ((flavor_kind in ['non_convex', 'mixed']) and self.lla)): init_est = self.get_unflavored_tunable() yes_pro_pro_init = True else: init_data = None init_est = None elif is_str_and_matches(self.initializer, 'zero'): (coef_shape, intercept_shape) = get_shapes_from(X=X, y=y) coef = np.zeros(coef_shape) if (len(intercept_shape) == 0): intercept = 0 else: intercept = np.zeros(intercept_shape) init_data = {'coef': coef, 'intercept': intercept} yes_pro_pro_init = False init_est = None elif isinstance(self.initializer, dict): init_data = self.initializer yes_pro_pro_init = init_data.get('pre_pro', True) init_est = None else: init_est = self.initializer yes_pro_pro_init = True if (init_est is not None): init_est = fit_if_unfitted(init_est, X=X, y=y, sample_weight=sample_weight, offsets=offsets) (coef, intercept) = get_coef_and_intercept(init_est, copy=True, error=True) init_data = {'coef': coef, 'intercept': intercept} if ((init_data is not None) and yes_pro_pro_init): init_data = process_init_data(init_data=init_data, pre_pro_out=pre_pro_out) if (flavor_kind in ['adaptive', 'mixed']): init_data['n_samples'] = X.shape[0] return (init_data, init_est) else: (init_data, init_est) = (None, None) return (init_data, init_est) def setup_and_prefit(self, X, y, sample_weight, offsets): (X, y, sample_weight, offsets) = self._validate_data(X=X, y=y, sample_weight=sample_weight, offsets=offsets) raw_data = {'X': X, 'y': y, 'sample_weight': sample_weight, 'offsets': offsets} inferencer = self.run_prefit_inference(**raw_data) (pro_data, pre_pro_out) = self.preprocess(**raw_data, copy=True) (init_data, init_est) = self.get_initializer(X=X, y=y, pre_pro_out=pre_pro_out, sample_weight=sample_weight, offsets=offsets) pre_pro_out['init_est'] = init_est configs = {'loss': get_loss_config(self.loss), 'constraint': get_constraint_config(self.constraint), 'penalty': get_penalty_config(self.penalty)} flavor_kind = get_flavor_kind(configs['penalty']) if ((flavor_kind in ['non_convex', 'mixed']) and self.lla): if (type(self.lla) == bool): solver = LLAFixedInit(max_steps=1) else: solver = self.lla solver.set_sp_solver(get_solver(self.solver, **configs)) else: solver = get_solver(self.solver, **configs) if solver.needs_fixed_init: solver.set_fixed_init(init_data) return (pro_data, raw_data, pre_pro_out, configs, solver, init_data, inferencer) def _get_solver_init(self, init_data): flavor_kind = get_flavor_kind(self.penalty) if ((flavor_kind in ['non_convex', 'mixed']) and (not self.lla)): if (init_data is None): return {} else: return {'coef_init': init_data.get('coef', None), 'intercept_init': init_data.get('intercept', None)} else: return None def _fit_from_configs(self, pro_data, raw_data, configs, solver, pre_pro_out, init_data=None): solver_init = self._get_solver_init(init_data) solver.setup(fit_intercept=self.fit_intercept, **pro_data, **configs) solver_init = ({} if (solver_init is None) else solver_init) (fit_out, _, opt_info) = solver.solve(**solver_init) if self.relaxed: raise NotImplementedError('TODO add') self._set_fit(fit_out=fit_out, pre_pro_out=pre_pro_out, configs=configs, opt_info=opt_info) self.run_after_fit_inference(**raw_data) return self def _set_fit(self, fit_out, pre_pro_out, configs=None, opt_info=None): coef = fit_out['coef'] intercept = fit_out.get('intercept', None) (self.coef_, self.intercept_) = deprocess_fit(coef=coef, intercept=intercept, pre_pro_out=pre_pro_out, fit_intercept=self.fit_intercept) if (not self.fit_intercept): self.intercept_ = None if ('label_encoder' in pre_pro_out): self.label_encoder_ = pre_pro_out['label_encoder'] self.classes_ = self.label_encoder_.classes_ elif ('label_binarizer' in pre_pro_out): self.label_binarizer_ = pre_pro_out['label_binarizer'] self.classes_ = self.label_binarizer_.classes_ if (configs is not None): self.fit_loss_ = configs['loss'] self.fit_penalty_ = configs['penalty'] self.fit_constraint_ = configs['constraint'] if (opt_info is not None): self.opt_info_ = opt_info init_est = pre_pro_out.get('init_est', None) if (init_est is not None): self.est_init_ = init_est return self def _set_fit_from(self, estimator, copy=False): for (k, v) in estimator.__dict__.items(): if k.endswith('_'): if copy: self.__dict__[k] = deepcopy(v) else: self.__dict__[k] = v return self def decision_function(self, X, offsets=None): check_is_fitted(self) X = check_array(X, accept_sparse=['csr', 'csc', 'coo']) z = safe_sparse_dot(X, self.coef_, dense_output=True) if (hasattr(self, 'intercept_') and (self.intercept_ is not None)): z += self.intercept_ if (offsets is not None): z += offsets return z def _more_tags(self): return {'requires_y': True} def run_prefit_inference(self, X, y, sample_weight=None, offsets=None): if (self.inferencer is not None): inferencer = deepcopy(self.inferencer) inferencer.pre_fit(estimator=self, X=X, y=y, sample_weight=sample_weight, offsets=offsets) return inferencer else: return None def run_after_fit_inference(self, X, y, sample_weight=None, offsets=None): if (self.inferencer_ is not None): self.inferencer_.after_fit(estimator=self, X=X, y=y, sample_weight=sample_weight, offsets=offsets) return self def _process_y(self, X, y, sample_weight=None, copy=True): raise NotImplementedError
class _ParameterGuardMeta(type): def __getitem__(self, params: Tuple[(str, object)]) -> Any: if ((not isinstance(params, tuple)) or (len(params) != 2)): raise TypeError(f'{self.__name__}[...] should be instantiated with two arguments (a variable name and a type).') if (not isinstance(params[0], str)): raise TypeError(f'The first argument to {self.__name__} must be a string') return self(params[0], params[1])
def _find_args_with_product_annotation(func: Callable[(..., Any)]) -> list[str]: annotations = get_annotations(func, eval_str=True) metas = {name: annotation.__metadata__ for (name, annotation) in annotations.items() if (get_origin(annotation) is Annotated)} args_with_product_annot = [] for (name, meta) in metas.items(): has_product_annot = any((isinstance(i, ProductType) for i in meta)) if has_product_annot: args_with_product_annot.append(name) return args_with_product_annot
class GetMessages(): async def get_messages(self: 'pyrogram.Client', chat_id: Union[(int, str)], message_ids: Union[(int, Iterable[int])]=None, reply_to_message_ids: Union[(int, Iterable[int])]=None, replies: int=1) -> Union[('types.Message', List['types.Message'])]: (ids, ids_type) = ((message_ids, raw.types.InputMessageID) if message_ids else ((reply_to_message_ids, raw.types.InputMessageReplyTo) if reply_to_message_ids else (None, None))) if (ids is None): raise ValueError('No argument supplied. Either pass message_ids or reply_to_message_ids') peer = (await self.resolve_peer(chat_id)) is_iterable = (not isinstance(ids, int)) ids = (list(ids) if is_iterable else [ids]) ids = [ids_type(id=i) for i in ids] if (replies < 0): replies = ((1 << 31) - 1) if isinstance(peer, raw.types.InputPeerChannel): rpc = raw.functions.channels.GetMessages(channel=peer, id=ids) else: rpc = raw.functions.messages.GetMessages(id=ids) r = (await self.invoke(rpc, sleep_threshold=(- 1))) messages = (await utils.parse_messages(self, r, replies=replies)) return (messages if is_iterable else (messages[0] if messages else None))
class DatasetFingerprintExtractor(object): def __init__(self, dataset_name_or_id: Union[(str, int)], num_processes: int=8, verbose: bool=False): dataset_name = maybe_convert_to_dataset_name(dataset_name_or_id) self.verbose = verbose self.dataset_name = dataset_name self.input_folder = join(nnUNet_raw, dataset_name) self.num_processes = num_processes self.dataset_json = load_json(join(self.input_folder, 'dataset.json')) self.num_foreground_voxels_for_intensitystats = .0 def collect_foreground_intensities(segmentation: np.ndarray, images: np.ndarray, seed: int=1234, num_samples: int=10000): assert (len(images.shape) == 4) assert (len(segmentation.shape) == 4) assert (not np.any(np.isnan(segmentation))), 'Segmentation contains NaN values. grrrr.... :-(' assert (not np.any(np.isnan(images))), 'Images contains NaN values. grrrr.... :-(' rs = np.random.RandomState(seed) intensities_per_channel = [] intensity_statistics_per_channel = [] foreground_mask = (segmentation[0] > 0) for i in range(len(images)): foreground_pixels = images[i][foreground_mask] num_fg = len(foreground_pixels) intensities_per_channel.append((rs.choice(foreground_pixels, num_samples, replace=True) if (num_fg > 0) else [])) intensity_statistics_per_channel.append({'mean': (np.mean(foreground_pixels) if (num_fg > 0) else np.nan), 'median': (np.median(foreground_pixels) if (num_fg > 0) else np.nan), 'min': (np.min(foreground_pixels) if (num_fg > 0) else np.nan), 'max': (np.max(foreground_pixels) if (num_fg > 0) else np.nan), 'percentile_99_5': (np.percentile(foreground_pixels, 99.5) if (num_fg > 0) else np.nan), 'percentile_00_5': (np.percentile(foreground_pixels, 0.5) if (num_fg > 0) else np.nan)}) return (intensities_per_channel, intensity_statistics_per_channel) def analyze_case(image_files: List[str], segmentation_file: str, reader_writer_class: Type[BaseReaderWriter], num_samples: int=10000): rw = reader_writer_class() (images, properties_images) = rw.read_images(image_files) (segmentation, properties_seg) = rw.read_seg(segmentation_file) (data_cropped, seg_cropped, bbox) = crop_to_nonzero(images, segmentation) (foreground_intensities_per_channel, foreground_intensity_stats_per_channel) = DatasetFingerprintExtractor.collect_foreground_intensities(seg_cropped, data_cropped, num_samples=num_samples) spacing = properties_images['spacing'] shape_before_crop = images.shape[1:] shape_after_crop = data_cropped.shape[1:] relative_size_after_cropping = (np.prod(shape_after_crop) / np.prod(shape_before_crop)) return (shape_after_crop, spacing, foreground_intensities_per_channel, foreground_intensity_stats_per_channel, relative_size_after_cropping) def run(self, overwrite_existing: bool=False) -> dict: preprocessed_output_folder = join(nnUNet_preprocessed, self.dataset_name) maybe_mkdir_p(preprocessed_output_folder) properties_file = join(preprocessed_output_folder, 'dataset_fingerprint.json') if ((not isfile(properties_file)) or overwrite_existing): file_ending = self.dataset_json['file_ending'] training_identifiers = get_identifiers_from_splitted_dataset_folder(join(self.input_folder, 'imagesTr'), file_ending) reader_writer_class = determine_reader_writer_from_dataset_json(self.dataset_json, join(self.input_folder, 'imagesTr', ((training_identifiers[0] + '_0000') + file_ending))) training_images_per_case = create_lists_from_splitted_dataset_folder(join(self.input_folder, 'imagesTr'), file_ending) training_labels_per_case = [join(self.input_folder, 'labelsTr', (i + file_ending)) for i in training_identifiers] num_foreground_samples_per_case = int((self.num_foreground_voxels_for_intensitystats // len(training_identifiers))) r = [] with multiprocessing.get_context('spawn').Pool(self.num_processes) as p: for (ti, tl) in zip(training_images_per_case, training_labels_per_case): r.append(p.starmap_async(DatasetFingerprintExtractor.analyze_case, ((ti, tl, reader_writer_class, num_foreground_samples_per_case),))) remaining = list(range(len(training_images_per_case))) workers = [j for j in p._pool] with tqdm(desc=None, total=len(training_images_per_case), disable=self.verbose) as pbar: while (len(remaining) > 0): all_alive = all([j.is_alive() for j in workers]) if (not all_alive): raise RuntimeError('Some background worker is 6 feet under. Yuck.') done = [i for i in remaining if r[i].ready()] for _ in done: pbar.update() remaining = [i for i in remaining if (i not in done)] sleep(0.1) results = [i.get()[0] for i in r] shapes_after_crop = [r[0] for r in results] spacings = [r[1] for r in results] foreground_intensities_per_channel = [np.concatenate([r[2][i] for r in results]) for i in range(len(results[0][2]))] median_relative_size_after_cropping = np.median([r[4] for r in results], 0) num_channels = len((self.dataset_json['channel_names'].keys() if ('channel_names' in self.dataset_json.keys()) else self.dataset_json['modality'].keys())) intensity_statistics_per_channel = {} for i in range(num_channels): intensity_statistics_per_channel[i] = {'mean': float(np.mean(foreground_intensities_per_channel[i])), 'median': float(np.median(foreground_intensities_per_channel[i])), 'std': float(np.std(foreground_intensities_per_channel[i])), 'min': float(np.min(foreground_intensities_per_channel[i])), 'max': float(np.max(foreground_intensities_per_channel[i])), 'percentile_99_5': float(np.percentile(foreground_intensities_per_channel[i], 99.5)), 'percentile_00_5': float(np.percentile(foreground_intensities_per_channel[i], 0.5))} fingerprint = {'spacings': spacings, 'shapes_after_crop': shapes_after_crop, 'foreground_intensity_properties_per_channel': intensity_statistics_per_channel, 'median_relative_size_after_cropping': median_relative_size_after_cropping} try: save_json(fingerprint, properties_file) except Exception as e: if isfile(properties_file): os.remove(properties_file) raise e else: fingerprint = load_json(properties_file) return fingerprint
def valid_sensor_mac(sensor_mac): invalid_mac_list = ['', '\x00\x00\x00\x00\x00\x00\x00\x00', '\x00\x00\x00\x00\x00\x00\x00\x00'] if ((len(str(sensor_mac)) == 8) and (sensor_mac not in invalid_mac_list)): return True else: LOGGER.warning(f'Unpairing bad MAC: {sensor_mac}') try: WYZESENSE_DONGLE.Delete(sensor_mac) clear_topics(sensor_mac) except TimeoutError: pass return False
_canonicalize _rewriter([Sum, Prod]) def local_op_of_op(fgraph, node): if (isinstance(node.op, Prod) or isinstance(node.op, Sum)): op_type = (Sum if isinstance(node.op, Sum) else Prod) (node_inps,) = node.inputs out_dtype = node.op.dtype if (len(fgraph.clients[node_inps]) == 1): if (node_inps.owner and isinstance(node_inps.owner.op, node.op.__class__)): if ((node_inps.owner.op.axis is None) or (node.op.axis is None)): return [op_type(None, dtype=out_dtype)(node_inps.owner.inputs[0])] newaxis = list(node_inps.owner.op.axis) for i in node.op.axis: new_i = i for ii in node_inps.owner.op.axis: if (new_i >= ii): new_i += 1 assert (new_i not in newaxis) newaxis.append(new_i) assert (len(newaxis) == len((list(node_inps.owner.op.axis) + list(node.op.axis)))) combined = op_type(newaxis, dtype=out_dtype) return [combined(node_inps.owner.inputs[0])]
class TextCapsBleu4Evaluator(): def __init__(self): from pycocoevalcap.tokenizer.ptbtokenizer import PTBTokenizer from pycocoevalcap.bleu.bleu import Bleu self.tokenizer = PTBTokenizer() self.scorer = Bleu(4) def eval_pred_list(self, pred_list): gts = {} res = {} for (idx, entry) in enumerate(pred_list): gts[idx] = [{'caption': a} for a in entry['gt_answers']] res[idx] = [{'caption': entry['pred_answer']}] gts = self.tokenizer.tokenize(gts) res = self.tokenizer.tokenize(res) (score, _) = self.scorer.compute_score(gts, res) bleu4 = score[3] return bleu4
def get_loader(image_root, gt_root, batchsize, trainsize, shuffle=True, num_workers=4, pin_memory=True, augmentation=False): dataset = PolypDataset(image_root, gt_root, trainsize, augmentation) data_loader = data.DataLoader(dataset=dataset, batch_size=batchsize, shuffle=shuffle, num_workers=num_workers, pin_memory=pin_memory) return data_loader
class SqliteErrorCode(enum.Enum): OK = 0 ERROR = 1 INTERNAL = 2 PERM = 3 ABORT = 4 BUSY = 5 LOCKED = 6 NOMEM = 7 READONLY = 8 INTERRUPT = 9 IOERR = 10 CORRUPT = 11 NOTFOUND = 12 FULL = 13 CANTOPEN = 14 PROTOCOL = 15 EMPTY = 16 SCHEMA = 17 TOOBIG = 18 CONSTRAINT = 19 MISMATCH = 20 MISUSE = 21 NOLFS = 22 AUTH = 23 FORMAT = 24 RANGE = 25 NOTADB = 26 NOTICE = 27 WARNING = 28 ROW = 100 DONE = 101
def save_model(model, model_save_path, step): if isinstance(model_save_path, Path): model_path = str(model_save_path) else: model_path = model_save_path model_state_dict = model.state_dict() for key in model_state_dict: model_state_dict[key] = model_state_dict[key].cpu() torch.save({'step': step, 'model_state_dict': model_state_dict}, model_path)
def scm(args): if (args['toSystem'] == True): printT('Try to spawn a system shell via scm & impersonation...') esc = Escalation() imp = Impersonate() status = esc.namedPipeImpersonationSystemViaSCM(ps=True, debug=False) imp.printCurrentThreadEffectiveToken() if (status == True): imp = Impersonate() imp.executeCMDWithThreadEffectiveToken()
class AttrVI_ATTR_USB_BULK_IN_PIPE(RangeAttribute): resources = [(constants.InterfaceType.usb, 'RAW')] py_name = '' visa_name = 'VI_ATTR_USB_BULK_IN_PIPE' visa_type = 'ViInt16' default = NotAvailable (read, write, local) = (True, True, True) (min_value, max_value, values) = (129, 143, [(- 1)])
def _get_builtin_metadata(): id_to_name = {x['id']: x['name'] for x in categories} thing_dataset_id_to_contiguous_id = {i: i for i in range(len(categories))} thing_classes = [id_to_name[k] for k in sorted(id_to_name)] return {'thing_dataset_id_to_contiguous_id': thing_dataset_id_to_contiguous_id, 'thing_classes': thing_classes}
def test_two_loop_vars_one_accumulator() -> None: test_list = [10, 20, 30] sum_so_far = 0 with AccumulationTable(['sum_so_far']) as table: for (index, item) in enumerate(test_list): sum_so_far = (sum_so_far + item) assert (table.loop_variables == {'index': ['N/A', 0, 1, 2], 'item': ['N/A', 10, 20, 30]}) assert (table.loop_accumulators == {'sum_so_far': [0, 10, 30, 60]})
class VanOverlappingPatchEmbedder(nn.Sequential): def __init__(self, in_channels: int, hidden_size: int, patch_size: int=7, stride: int=4): super().__init__() self.convolution = nn.Conv2d(in_channels, hidden_size, kernel_size=patch_size, stride=stride, padding=(patch_size // 2)) self.normalization = nn.BatchNorm2d(hidden_size)
class CosLrWarmupScheduler(): def __init__(self, optimizer, total_iter): assert (type(total_iter) is int) self.optimizer = optimizer self.total_iter = total_iter self.base_lr = [group['lr'] for group in optimizer.param_groups] self.current_iter = 0 def step(self): self.current_iter += 1 if (self.current_iter > self.total_iter): return for (i, group) in enumerate(self.optimizer.param_groups): group['lr'] = (((math.cos((math.pi + ((self.current_iter / self.total_iter) * math.pi))) + 1.000001) * 0.5) * self.base_lr[i])
class EarlyStopping(Callback): early_stopping_metric: str is_maximize: bool tol: float = 0.0 patience: int = 5 def __post_init__(self): self.best_epoch = 0 self.stopped_epoch = 0 self.wait = 0 self.best_weights = None self.best_loss = np.inf if self.is_maximize: self.best_loss = (- self.best_loss) super().__init__() def on_epoch_end(self, epoch, logs=None): current_loss = logs.get(self.early_stopping_metric) if (current_loss is None): return loss_change = (current_loss - self.best_loss) max_improved = (self.is_maximize and (loss_change > self.tol)) min_improved = ((not self.is_maximize) and ((- loss_change) > self.tol)) if (max_improved or min_improved): self.best_loss = current_loss self.best_epoch = epoch self.wait = 1 self.best_weights = copy.deepcopy(self.trainer.network.state_dict()) else: if (self.wait >= self.patience): self.stopped_epoch = epoch self.trainer._stop_training = True self.wait += 1 def on_train_end(self, logs=None): self.trainer.best_epoch = self.best_epoch self.trainer.best_cost = self.best_loss if (self.best_weights is not None): self.trainer.network.load_state_dict(self.best_weights) if (self.stopped_epoch > 0): msg = f''' Early stopping occurred at epoch {self.stopped_epoch}''' msg += (f' with best_epoch = {self.best_epoch} and ' + f'best_{self.early_stopping_metric} = {round(self.best_loss, 5)}') print(msg) else: msg = ((f'Stop training because you reached max_epochs = {self.trainer.max_epochs}' + f' with best_epoch = {self.best_epoch} and ') + f'best_{self.early_stopping_metric} = {round(self.best_loss, 5)}') print(msg) print('Best weights from best epoch are automatically used!')
def main(args): warpq = warpqMetric(args) warpq_rawScore = [] warpq_mappedScore = [] if (args['mode'] == 'predict_csv'): df = pd.read_csv(args['csv_input'], index_col=None) for (index, row) in tqdm(df.iterrows(), total=df.shape[0], desc='Compute quality sores...'): (rawScore, mappedScore) = warpq.evaluate(ref_path=row['Ref_Wave'], test_path=row['Test_Wave']) warpq_rawScore.append(rawScore) warpq_mappedScore.append(mappedScore) df['Raw WARP-Q Score'] = warpq_rawScore df['Mapped WARP-Q Score'] = warpq_mappedScore if (not os.path.exists(os.path.dirname(args['csv_output']))): os.makedirs(os.path.dirname(args['csv_output'])) df.to_csv(args['csv_output'], index=None) if args['getPlots']: (pearson_coef, p_pearson) = pearsonr(df['Raw WARP-Q Score'], df['MOS']) (Spearmanr_coef, p_spearman) = spearmanr(df['Raw WARP-Q Score'], df['MOS']) sns.relplot(x='MOS', y='Raw WARP-Q Score', hue='Codec', palette='muted', data=df).fig.suptitle(((('Correlations: Pearsonr= ' + str(round(pearson_coef, 2))) + ', Spearman=') + str(round(Spearmanr_coef, 2)))) (pearson_coef, p_value) = pearsonr(df['Mapped WARP-Q Score'], df['MOS']) (Spearmanr_coef, p_spearman) = spearmanr(df['Mapped WARP-Q Score'], df['MOS']) sns.relplot(x='MOS', y='Mapped WARP-Q Score', hue='Codec', palette='muted', data=df).fig.suptitle(((('Correlations: Pearsonr= ' + str(round(pearson_coef, 2))) + ', Spearman=') + str(round(Spearmanr_coef, 2)))) print(('\nResults are saved in ' + args['csv_output'])) else: print('Compute quality scores...') (warpq_rawScore, warpq_mappedScore) = warpq.evaluate(args['org'], args['deg']) print(('\nRaw WARP-Q score (lower rating means better quality): ' + str(warpq_rawScore))) print(('Mapped WARP-Q score (higher rating means better quality): ' + str(warpq_mappedScore))) print('Done!')
class Config(): def _validate_py_syntax(filename): with open(filename, 'r') as f: content = f.read() try: ast.parse(content) except SyntaxError as e: raise SyntaxError(f'There are syntax errors in config file {filename}: {e}') def _substitute_predefined_vars(filename, temp_config_name): file_dirname = osp.dirname(filename) file_basename = osp.basename(filename) file_basename_no_extension = osp.splitext(file_basename)[0] file_extname = osp.splitext(filename)[1] support_templates = dict(fileDirname=file_dirname, fileBasename=file_basename, fileBasenameNoExtension=file_basename_no_extension, fileExtname=file_extname) with open(filename, 'r') as f: config_file = f.read() for (key, value) in support_templates.items(): regexp = (('\\{\\{\\s*' + str(key)) + '\\s*\\}\\}') value = value.replace('\\', '/') config_file = re.sub(regexp, value, config_file) with open(temp_config_name, 'w') as tmp_config_file: tmp_config_file.write(config_file) def _file2dict(filename, use_predefined_variables=True): filename = osp.abspath(osp.expanduser(filename)) from .path_utils import check_files_exist check_files_exist(filename) fileExtname = osp.splitext(filename)[1] if (fileExtname not in ['.py', '.json', '.yaml', '.yml']): raise IOError('Only py/yml/yaml/json type are supported now!') with tempfile.TemporaryDirectory() as temp_config_dir: temp_config_file = tempfile.NamedTemporaryFile(dir=temp_config_dir, suffix=fileExtname) if (platform.system() == 'Windows'): temp_config_file.close() temp_config_name = osp.basename(temp_config_file.name) if use_predefined_variables: Config._substitute_predefined_vars(filename, temp_config_file.name) else: shutil.copyfile(filename, temp_config_file.name) if filename.endswith('.py'): temp_module_name = osp.splitext(temp_config_name)[0] sys.path.insert(0, temp_config_dir) Config._validate_py_syntax(filename) mod = import_module(temp_module_name) sys.path.pop(0) cfg_dict = {name: value for (name, value) in mod.__dict__.items() if (not name.startswith('__'))} del sys.modules[temp_module_name] elif filename.endswith(('.yml', '.yaml', '.json')): import pyrl cfg_dict = pyrl.load(temp_config_file.name) temp_config_file.close() cfg_text = (filename + '\n') with open(filename, 'r') as f: cfg_text += f.read() if (BASE_KEY in cfg_dict): cfg_dir = osp.dirname(filename) base_filename = cfg_dict.pop(BASE_KEY) base_filename = (base_filename if isinstance(base_filename, list) else [base_filename]) cfg_dict_list = list() cfg_text_list = list() for f in base_filename: (_cfg_dict, _cfg_text) = Config._file2dict(osp.join(cfg_dir, f)) cfg_dict_list.append(_cfg_dict) cfg_text_list.append(_cfg_text) base_cfg_dict = dict() for c in cfg_dict_list: if (len((base_cfg_dict.keys() & c.keys())) > 0): raise KeyError('Duplicate key is not allowed among bases') base_cfg_dict.update(c) base_cfg_dict = Config._merge_a_into_b(cfg_dict, base_cfg_dict) cfg_dict = base_cfg_dict cfg_text_list.append(cfg_text) cfg_text = '\n'.join(cfg_text_list) return (cfg_dict, cfg_text) def _merge_a_into_b(a, b, allow_list_keys=False): b = b.copy() for (k, v) in a.items(): if (allow_list_keys and k.isdigit() and isinstance(b, list)): k = int(k) if (len(b) <= k): raise KeyError(f'Index {k} exceeds the length of list {b}') b[k] = Config._merge_a_into_b(v, b[k], allow_list_keys) elif (isinstance(v, dict) and (k in b) and (not v.pop(DELETE_KEY, False))): allowed_types = ((dict, list) if allow_list_keys else dict) if (not isinstance(b[k], allowed_types)): raise TypeError(f'{k}={v} in child config cannot inherit from base because {k} is a dict in the child config but is of type {type(b[k])} in base config. You may set `{DELETE_KEY}=True` to ignore the base config') b[k] = Config._merge_a_into_b(v, b[k], allow_list_keys) else: b[k] = v return b def fromfile(filename, use_predefined_variables=True, import_custom_modules=True): (cfg_dict, cfg_text) = Config._file2dict(filename, use_predefined_variables) if (import_custom_modules and cfg_dict.get('custom_imports', None)): from .module_utils import import_modules_from_strings import_modules_from_strings(**cfg_dict['custom_imports']) return Config(cfg_dict, cfg_text=cfg_text, filename=filename) def auto_argparser(description=None): partial_parser = ArgumentParser(description=description) partial_parser.add_argument('config', help='config file path') cfg_file = partial_parser.parse_known_args()[0].config cfg = Config.fromfile(cfg_file) parser = ArgumentParser(description=description) parser.add_argument('config', help='config file path') add_args(parser, cfg) return (parser, cfg) def __init__(self, cfg_dict=None, cfg_text=None, filename=None): if (cfg_dict is None): cfg_dict = dict() elif (not isinstance(cfg_dict, dict)): raise TypeError(f'cfg_dict must be a dict, but got {type(cfg_dict)}') for key in cfg_dict: if (key in RESERVED_KEYS): raise KeyError(f'{key} is reserved for config file') super(Config, self).__setattr__('_cfg_dict', ConfigDict(cfg_dict)) super(Config, self).__setattr__('_filename', filename) if cfg_text: text = cfg_text elif filename: with open(filename, 'r') as f: text = f.read() else: text = '' super(Config, self).__setattr__('_text', text) def dict(self): return self._cfg_dict def filename(self): return self._filename def text(self): return self._text def pretty_text(self): indent = 4 def _indent(s_, num_spaces): s = s_.split('\n') if (len(s) == 1): return s_ first = s.pop(0) s = [((num_spaces * ' ') + line) for line in s] s = '\n'.join(s) s = ((first + '\n') + s) return s def _format_basic_types(k, v, use_mapping=False): if isinstance(v, str): v_str = f"'{v}'" else: v_str = str(v) if use_mapping: k_str = (f"'{k}'" if isinstance(k, str) else str(k)) attr_str = f'{k_str}: {v_str}' else: attr_str = f'{str(k)}={v_str}' attr_str = _indent(attr_str, indent) return attr_str def _format_list(k, v, use_mapping=False): if all((isinstance(_, dict) for _ in v)): v_str = '[\n' v_str += '\n'.join((f'dict({_indent(_format_dict(v_), indent)}),' for v_ in v)).rstrip(',') if use_mapping: k_str = (f"'{k}'" if isinstance(k, str) else str(k)) attr_str = f'{k_str}: {v_str}' else: attr_str = f'{str(k)}={v_str}' attr_str = (_indent(attr_str, indent) + ']') else: attr_str = _format_basic_types(k, v, use_mapping) return attr_str def _contain_invalid_identifier(dict_str): contain_invalid_identifier = False for key_name in dict_str: contain_invalid_identifier |= (not str(key_name).isidentifier()) return contain_invalid_identifier def _format_dict(input_dict, outest_level=False): r = '' s = [] use_mapping = _contain_invalid_identifier(input_dict) if use_mapping: r += '{' for (idx, (k, v)) in enumerate(input_dict.items()): is_last = (idx >= (len(input_dict) - 1)) end = ('' if (outest_level or is_last) else ',') if isinstance(v, dict): v_str = ('\n' + _format_dict(v)) if use_mapping: k_str = (f"'{k}'" if isinstance(k, str) else str(k)) attr_str = f'{k_str}: dict({v_str}' else: attr_str = f'{str(k)}=dict({v_str}' attr_str = ((_indent(attr_str, indent) + ')') + end) elif isinstance(v, list): attr_str = (_format_list(k, v, use_mapping) + end) elif callable(v): print(v) attr_str = inspect.getsource(v) else: attr_str = (_format_basic_types(k, v, use_mapping) + end) s.append(attr_str) r += '\n'.join(s) if use_mapping: r += '}' return r cfg_dict = self._cfg_dict.to_dict() text = _format_dict(cfg_dict, outest_level=True) yapf_style = dict(based_on_style='pep8', blank_line_before_nested_class_or_def=True, split_before_expression_after_opening_paren=True) (text, _) = FormatCode(text, style_config=yapf_style, verify=True) return text def __repr__(self): return f'Config (path: {self.filename}): {self._cfg_dict.__repr__()}' def __len__(self): return len(self._cfg_dict) def __getattr__(self, name): return getattr(self._cfg_dict, name) def __getitem__(self, name): return self._cfg_dict.__getitem__(name) def __setattr__(self, name, value): if isinstance(value, dict): value = ConfigDict(value) self._cfg_dict.__setattr__(name, value) def __setitem__(self, name, value): if isinstance(value, dict): value = ConfigDict(value) self._cfg_dict.__setitem__(name, value) def __iter__(self): return iter(self._cfg_dict) def __getstate__(self): return (self._cfg_dict, self._filename, self._text) def __setstate__(self, state): (_cfg_dict, _filename, _text) = state super(Config, self).__setattr__('_cfg_dict', _cfg_dict) super(Config, self).__setattr__('_filename', _filename) super(Config, self).__setattr__('_text', _text) def dump(self, file=None): cfg_dict = super(Config, self).__getattribute__('_cfg_dict').to_dict() if self.filename.endswith('.py'): if (file is None): return self.pretty_text else: with open(file, 'w') as f: f.write(self.pretty_text) else: from ..file import dump if (file is None): file_format = self.filename.split('.')[(- 1)] return dump(cfg_dict, file_format=file_format) else: dump(cfg_dict, file) def merge_from_dict(self, options, allow_list_keys=True): option_cfg_dict = {} for (full_key, v) in options.items(): d = option_cfg_dict key_list = full_key.split('.') for subkey in key_list[:(- 1)]: d.setdefault(subkey, ConfigDict()) d = d[subkey] subkey = key_list[(- 1)] d[subkey] = v cfg_dict = super(Config, self).__getattribute__('_cfg_dict') super(Config, self).__setattr__('_cfg_dict', Config._merge_a_into_b(option_cfg_dict, cfg_dict, allow_list_keys=allow_list_keys))
class TagModelToBaseTest(TestCase): manage_models = [test_models.TagFieldOptionsModel] def test_custom_base_used(self): tag_model = test_models.CustomTagBaseTest.singletag.tag_model self.assertTrue(issubclass(tag_model, test_models.CustomTagBase)) self.assertTrue(issubclass(tag_model, tag_models.TagModel)) self.assertTrue(tag_model.is_custom)
def resolve_module_exports_from_url(url: str, max_depth: int, is_re_export: bool=False) -> set[str]: if (max_depth == 0): logger.warning(f'Did not resolve all exports for {url} - max depth reached') return set() try: text = requests.get(url, timeout=5).text except requests.exceptions.ConnectionError as error: reason = ('' if (error is None) else ' - {error.errno}') logger.warning((('Did not resolve exports for url ' + url) + reason)) return set() (export_names, references) = resolve_module_exports_from_source(text, exclude_default=is_re_export) for ref in references: url = _resolve_relative_url(url, ref) export_names.update(resolve_module_exports_from_url(url, (max_depth - 1), is_re_export=True)) return export_names
class Model2Model(PreTrainedEncoderDecoder): def __init__(self, *args, **kwargs): super(Model2Model, self).__init__(*args, **kwargs) self.tie_weights() def tie_weights(self): pass def from_pretrained(cls, pretrained_model_name_or_path, *args, **kwargs): if (('bert' not in pretrained_model_name_or_path) or ('roberta' in pretrained_model_name_or_path) or ('distilbert' in pretrained_model_name_or_path)): raise ValueError('Only the Bert model is currently supported.') model = super(Model2Model, cls).from_pretrained(*args, encoder_pretrained_model_name_or_path=pretrained_model_name_or_path, decoder_pretrained_model_name_or_path=pretrained_model_name_or_path, **kwargs) return model
class ShortReprMixin(): def __repr__(self): return '{}{}({})'.format('{}.'.format(self.__class__.__module__), self.__class__.__name__, ', '.join((('{!r}'.format(value) if (not name.startswith('_')) else '{}={!r}'.format(name, value)) for (name, value) in self._repr_items))) def print_nested(self, stream=None, level=0, indent=' ', prefix='', first_level_indent=True, trailer='', print_doc=True, first_line_suffix=''): stream.write((((prefix + (level * indent)) if first_level_indent else '') + '{!r}{}{}\n'.format(self, trailer, first_line_suffix)))
class TestOCSPAcceptableResponses(): def test_invalid_types(self): with pytest.raises(TypeError): x509.OCSPAcceptableResponses(38) with pytest.raises(TypeError): x509.OCSPAcceptableResponses([38]) def test_eq(self): acceptable_responses1 = x509.OCSPAcceptableResponses([ObjectIdentifier('1.2.3')]) acceptable_responses2 = x509.OCSPAcceptableResponses([ObjectIdentifier('1.2.3')]) assert (acceptable_responses1 == acceptable_responses2) def test_ne(self): acceptable_responses1 = x509.OCSPAcceptableResponses([ObjectIdentifier('1.2.3')]) acceptable_responses2 = x509.OCSPAcceptableResponses([ObjectIdentifier('1.2.4')]) assert (acceptable_responses1 != acceptable_responses2) assert (acceptable_responses1 != object()) def test_repr(self): acceptable_responses = x509.OCSPAcceptableResponses([]) assert (repr(acceptable_responses) == '<OCSPAcceptableResponses(responses=[])>') def test_hash(self): acceptable_responses1 = x509.OCSPAcceptableResponses([ObjectIdentifier('1.2.3')]) acceptable_responses2 = x509.OCSPAcceptableResponses([ObjectIdentifier('1.2.3')]) acceptable_responses3 = x509.OCSPAcceptableResponses([ObjectIdentifier('1.2.4')]) assert (hash(acceptable_responses1) == hash(acceptable_responses2)) assert (hash(acceptable_responses1) != hash(acceptable_responses3)) def test_iter(self): acceptable_responses1 = x509.OCSPAcceptableResponses([ObjectIdentifier('1.2.3')]) assert (list(acceptable_responses1) == [ObjectIdentifier('1.2.3')]) def test_public_bytes(self): ext = x509.OCSPAcceptableResponses([]) assert (ext.public_bytes() == b'0\x00') ext = x509.OCSPAcceptableResponses([ObjectIdentifier('1.3.6.1.5.5.7.48.1.1')]) assert (ext.public_bytes() == b'0\x0b\x06\t+\x06\x01\x05\x05\x070\x01\x01')
class DTensorEntry(Entry): shards: List[Shard] mesh: NestedList dim_map: List[List[int]] def __init__(self, shards: List[Shard], mesh: NestedList, dim_map: List[List[int]]) -> None: super().__init__(type='DTensor') self.shards = shards self.mesh = mesh self.dim_map = dim_map def from_yaml_obj(cls, yaml_obj: Any) -> 'DTensorEntry': if ('type' in yaml_obj): del yaml_obj['type'] yaml_obj['shards'] = [Shard.from_yaml_obj(shard) for shard in yaml_obj['shards']] return cls(**yaml_obj)
def episodic_iterator(data, num_classes, transforms, forcecpu=False, use_hd=False): if ((args.dataset_device == 'cpu') or forcecpu): dataset = EpisodicCPUDataset(data, num_classes, transforms, use_hd=use_hd) return torch.utils.data.DataLoader(dataset, batch_size=((args.batch_size // args.n_ways) * args.n_ways), shuffle=False, num_workers=min(8, os.cpu_count())) else: return EpisodicDataset(data, num_classes, transforms, use_hd=use_hd)
class DepositfilesComFolder(SimpleDecrypter): __name__ = 'DepositfilesComFolder' __type__ = 'decrypter' __version__ = '0.07' __status__ = 'testing' __pattern__ = ' __config__ = [('enabled', 'bool', 'Activated', True), ('use_premium', 'bool', 'Use premium account if available', True), ('folder_per_package', 'Default;Yes;No', 'Create folder for each package', 'Default'), ('max_wait', 'int', 'Reconnect if waiting time is greater than minutes', 10)] __description__ = 'Depositfiles.com folder decrypter plugin' __license__ = 'GPLv3' __authors__ = [('zoidberg', '')] LINK_PATTERN = '<div class="progressName".*?>\\s*<a href="(.+?)" title=".+?" target="_blank">'
class XIQueryDevice(rq.ReplyRequest): _request = rq.Struct(rq.Card8('opcode'), rq.Opcode(48), rq.RequestLength(), DEVICEID('deviceid'), rq.Pad(2)) _reply = rq.Struct(rq.ReplyCode(), rq.Pad(1), rq.Card16('sequence_number'), rq.ReplyLength(), rq.LengthOf('devices', 2), rq.Pad(22), rq.List('devices', DeviceInfo))
class StoppableProcess(context.Process): def __init__(self): super().__init__() self._should_stop = context.Event() self._should_stop.clear() def join(self, timeout=0): self._should_stop.wait(timeout) if (not self.should_stop()): self.stop() return super().join(0) def stop(self): self._should_stop.set() def should_stop(self): return self._should_stop.is_set() def __repr__(self): return '<{}(should_stop={})>'.format(self.__class__.__name__, self.should_stop())