text
string
size
int64
token_count
int64
""" Content negotiation selects a appropriated parser and renderer for a HTTP request. """ from abc import ABCMeta, abstractmethod from .mimetypes import MimeType class ContentNegotiation(metaclass=ABCMeta): """ Base class for all content negotiations. """ @abstractmethod def select_parser(self, request, parsers): """ Selects the appropriated parser for the given request. :param request: The HTTP request. :param parsers: The lists of parsers. :return: The parser selected or none. """ pass @abstractmethod def select_renderer(self, request, renderers): """ Selects the appropriated renderer for the given request. :param request: The HTTP request. :param renderers: The lists of renderers. :return: The renderer selected or none. """ pass class DefaultContentNegotiation(ContentNegotiation): """ Selects a parser by request content type and a renderer by request accept. """ def select_parser(self, request, parsers): """ Selects the appropriated parser which matches to the request's content type. :param request: The HTTP request. :param parsers: The lists of parsers. :return: The parser selected or none. """ if not request.content_type: return parsers[0], parsers[0].mimetype mimetype = MimeType.parse(request.content_type) for parser in parsers: if mimetype.match(parser.mimetype): return parser, mimetype return None, None def select_renderer(self, request, renderers): """ Selects the appropriated parser which matches to the request's accept. :param request: The HTTP request. :param renderers: The lists of parsers. :return: The parser selected or none. """ if not len(request.accept_mimetypes): return renderers[0], renderers[0].mimetype for mimetype, quality in request.accept_mimetypes: accept_mimetype = MimeType.parse(mimetype) for renderer in renderers: if accept_mimetype.match(renderer.mimetype): return renderer, renderer.mimetype.replace(params=accept_mimetype.params) return None, None
2,354
632
import unittest import mock import six import codecs import os import json import logging import shutil import tarfile import io from io import BytesIO import uuid from docker_squash.squash import Squash from docker_squash.errors import SquashError, SquashUnnecessaryError from docker_squash.lib import common if not six.PY3: import docker_squash.lib.xtarfile class ImageHelper(object): @staticmethod def top_layer_path(tar): # tar_object.seek(0) reader = codecs.getreader("utf-8") if 'repositories' in tar.getnames(): repositories_member = tar.getmember('repositories') repositories = json.load( reader(tar.extractfile(repositories_member))) return repositories.popitem()[1].popitem()[1] if 'manifest.json' in tar.getnames(): manifest_member = tar.getmember('manifest.json') manifest = json.load(reader(tar.extractfile(manifest_member))) return manifest[0]["Layers"][-1].split("/")[0] class IntegSquash(unittest.TestCase): BUSYBOX_IMAGE = "busybox:1.34" log = logging.getLogger() handler = logging.StreamHandler() formatter = logging.Formatter( '%(asctime)s %(name)-12s %(levelname)-8s %(message)s') handler.setFormatter(formatter) log.addHandler(handler) log.setLevel(logging.DEBUG) docker = common.docker_client(log) @classmethod def build_image(cls, dockerfile): IntegSquash.image = IntegSquash.Image(dockerfile) IntegSquash.image.__enter__() @classmethod def cleanup_image(cls): IntegSquash.image.__exit__(None, None, None) class Image(object): def __init__(self, dockerfile): self.dockerfile = dockerfile self.docker = TestIntegSquash.docker self.name = "integ-%s" % uuid.uuid1() self.tag = "%s:latest" % self.name def __enter__(self): f = BytesIO(self.dockerfile.encode('utf-8')) for line in self.docker.build(fileobj=f, tag=self.tag, rm=True): try: print(json.loads(line.decode("utf-8"))["stream"].strip()) except: print(line) self.history = self.docker.history(self.tag) self.layers = [o['Id'] for o in self.history] self.metadata = self.docker.inspect_image(self.tag) self.tar = self._save_image() with tarfile.open(fileobj=self.tar, mode='r') as tar: self.tarnames = tar.getnames() return self def __exit__(self, exc_type, exc_val, exc_tb): if not os.getenv('CI'): self.docker.remove_image(image=self.tag, force=True) # Duplicated, I know... def _save_image(self): image = self.docker.get_image(self.tag) buf = BytesIO() for chunk in image: buf.write(chunk) buf.seek(0) # Rewind return buf def assertFileExistsInLayer(self, name, layer=-1): self.tar.seek(0) # Rewind reader = codecs.getreader("utf-8") with tarfile.open(fileobj=self.tar, mode='r') as tar: manifest_member = tar.getmember("manifest.json") manifest_file = tar.extractfile(manifest_member) manifest = json.load(reader(manifest_file)) layer_member = tar.getmember(manifest[0]["Layers"][layer]) layer_file = tar.extractfile(layer_member) with tarfile.open(fileobj=layer_file, mode='r') as layer_tar: assert name in layer_tar.getnames( ), "File '%s' was not found in layer files: %s" % (name, layer_tar.getnames()) class SquashedImage(object): def __init__(self, image, number_of_layers=None, output_path=None, load_image=True, numeric=False, tmp_dir=None, log=None, development=False, tag=True): self.image = image self.number_of_layers = number_of_layers self.docker = TestIntegSquash.docker self.log = log or TestIntegSquash.log if tag: self.tag = "%s:squashed" % self.image.name else: self.tag = None self.output_path = output_path self.load_image = load_image self.numeric = numeric self.tmp_dir = tmp_dir self.development = development def __enter__(self): from_layer = self.number_of_layers if self.number_of_layers and not self.numeric: from_layer = self.docker.history( self.image.tag)[self.number_of_layers]['Id'] squash = Squash( self.log, self.image.tag, self.docker, tag=self.tag, from_layer=from_layer, output_path=self.output_path, load_image=self.load_image, tmp_dir=self.tmp_dir, development=self.development) self.image_id = squash.run() if not self.output_path: self.history = self.docker.history(self.image_id) if self.tag: self.tar = self._save_image() with tarfile.open(fileobj=self.tar, mode='r') as tar: self.tarnames = tar.getnames() self.squashed_layer = self._squashed_layer() self.layers = [o['Id'] for o in self.docker.history(self.image_id)] self.metadata = self.docker.inspect_image(self.image_id) return self def __exit__(self, exc_type, exc_val, exc_tb): if not (os.getenv('CI') or self.output_path): self.docker.remove_image(image=self.image_id, force=True) def _save_image(self): image = self.docker.get_image(self.tag) buf = BytesIO() for chunk in image: buf.write(chunk) buf.seek(0) # Rewind return buf def _extract_file(self, name, tar_object): tar_object.seek(0) with tarfile.open(fileobj=tar_object, mode='r') as tar: member = tar.getmember(name) return tar.extractfile(member) def _squashed_layer(self): self.tar.seek(0) with tarfile.open(fileobj=self.tar, mode='r') as tar: self.squashed_layer_path = ImageHelper.top_layer_path(tar) return self._extract_file("%s/layer.tar" % self.squashed_layer_path, self.tar) def assertFileExists(self, name): self.squashed_layer.seek(0) # Rewind with tarfile.open(fileobj=self.squashed_layer, mode='r') as tar: assert name in tar.getnames( ), "File '%s' was not found in the squashed files: %s" % (name, tar.getnames()) def assertFileDoesNotExist(self, name): self.squashed_layer.seek(0) # Rewind with tarfile.open(fileobj=self.squashed_layer, mode='r') as tar: assert name not in tar.getnames( ), "File '%s' was found in the squashed layer files: %s" % (name, tar.getnames()) def assertFileIsNotHardLink(self, name): self.squashed_layer.seek(0) # Rewind with tarfile.open(fileobj=self.squashed_layer, mode='r') as tar: member = tar.getmember(name) assert member.islnk( ) == False, "File '%s' should not be a hard link, but it is" % name class Container(object): def __init__(self, image): self.image = image self.docker = TestIntegSquash.docker self.log = TestIntegSquash.log def __enter__(self): self.container = self.docker.create_container(image=self.image.tag) data = self.docker.export(self.container) self.content = BytesIO() for chunk in data: self.content.write(chunk) self.content.seek(0) # Rewind return self def __exit__(self, exc_type, exc_val, exc_tb): if not os.getenv('CI'): self.docker.remove_container(self.container, force=True) def assertFileExists(self, name): self.content.seek(0) # Rewind with tarfile.open(fileobj=self.content, mode='r') as tar: assert name in tar.getnames( ), "File %s was not found in the container files: %s" % (name, tar.getnames()) def assertFileDoesNotExist(self, name): self.content.seek(0) # Rewind with tarfile.open(fileobj=self.content, mode='r') as tar: assert name not in tar.getnames( ), "File %s was found in the container files: %s" % (name, tar.getnames()) class TestIntegSquash(IntegSquash): def test_all_files_should_be_in_squashed_layer(self): """ We squash all layers in RUN, all files should be in the resulting squashed layer. """ dockerfile = ''' FROM %s RUN touch /somefile_layer1 RUN touch /somefile_layer2 RUN touch /somefile_layer3 ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 3) as squashed_image: squashed_image.assertFileDoesNotExist('.wh.somefile_layer1') squashed_image.assertFileDoesNotExist('.wh.somefile_layer2') squashed_image.assertFileDoesNotExist('.wh.somefile_layer3') squashed_image.assertFileExists('somefile_layer1') squashed_image.assertFileExists('somefile_layer2') squashed_image.assertFileExists('somefile_layer3') with self.Container(squashed_image) as container: container.assertFileExists('somefile_layer1') container.assertFileExists('somefile_layer2') container.assertFileExists('somefile_layer3') # We should have two layers less in the image self.assertTrue( len(squashed_image.layers) == len(image.layers) - 2) def test_only_files_from_squashed_image_should_be_in_squashed_layer(self): """ We squash all layers in RUN, all files should be in the resulting squashed layer. """ dockerfile = ''' FROM %s RUN touch /somefile_layer1 RUN touch /somefile_layer2 RUN touch /somefile_layer3 ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 2) as squashed_image: squashed_image.assertFileDoesNotExist('.wh.somefile_layer2') squashed_image.assertFileDoesNotExist('.wh.somefile_layer3') # This file should not be in the squashed layer squashed_image.assertFileDoesNotExist('somefile_layer1') # Nor a marker files for it squashed_image.assertFileDoesNotExist('.wh.somefile_layer1') squashed_image.assertFileExists('somefile_layer2') squashed_image.assertFileExists('somefile_layer3') with self.Container(squashed_image) as container: # This file should be in the container container.assertFileExists('somefile_layer1') container.assertFileExists('somefile_layer2') container.assertFileExists('somefile_layer3') # We should have two layers less in the image self.assertEqual( len(squashed_image.layers), len(image.layers) - 1) def test_there_should_be_a_marker_file_in_the_squashed_layer(self): """ Here we're testing that the squashed layer should contain a '.wh.somefile_layer1' file, because the file was not found in the squashed tar and it is present in the layers we do not squash. """ dockerfile = ''' FROM %s RUN touch /somefile_layer1 RUN rm /somefile_layer1 RUN touch /somefile_layer3 ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 2) as squashed_image: squashed_image.assertFileDoesNotExist('somefile_layer1') squashed_image.assertFileExists('somefile_layer3') squashed_image.assertFileExists('.wh.somefile_layer1') squashed_image.assertFileIsNotHardLink('.wh.somefile_layer1') with self.Container(squashed_image) as container: container.assertFileExists('somefile_layer3') container.assertFileDoesNotExist('somefile_layer1') # We should have one layer less in the image self.assertEqual( len(squashed_image.layers), len(image.layers) - 1) def test_there_should_be_a_marker_file_in_the_squashed_layer_even_more_complex(self): dockerfile = ''' FROM %s RUN touch /somefile_layer1 RUN rm /somefile_layer1 RUN touch /somefile_layer2 RUN touch /somefile_layer3 RUN rm /somefile_layer2 RUN touch /somefile_layer4 ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 2) as squashed_image: squashed_image.assertFileDoesNotExist('somefile_layer1') squashed_image.assertFileDoesNotExist('somefile_layer2') squashed_image.assertFileDoesNotExist('somefile_layer3') squashed_image.assertFileExists('somefile_layer4') squashed_image.assertFileDoesNotExist('.wh.somefile_layer1') squashed_image.assertFileExists('.wh.somefile_layer2') squashed_image.assertFileIsNotHardLink('.wh.somefile_layer2') squashed_image.assertFileDoesNotExist('.wh.somefile_layer3') squashed_image.assertFileDoesNotExist('.wh.somefile_layer4') with self.Container(squashed_image) as container: container.assertFileExists('somefile_layer3') container.assertFileExists('somefile_layer4') container.assertFileDoesNotExist('somefile_layer1') container.assertFileDoesNotExist('somefile_layer2') # We should have one layer less in the image self.assertEqual( len(squashed_image.layers), len(image.layers) - 1) def test_should_handle_removal_of_directories(self): dockerfile = ''' FROM %s RUN mkdir -p /some/dir/tree RUN touch /some/dir/tree/file1 RUN touch /some/dir/tree/file2 RUN touch /some/dir/file1 RUN touch /some/dir/file2 RUN rm -rf /some/dir/tree ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 2) as squashed_image: squashed_image.assertFileDoesNotExist('some/dir/tree/file1') squashed_image.assertFileDoesNotExist('some/dir/tree/file2') squashed_image.assertFileDoesNotExist('some/dir/file1') squashed_image.assertFileExists('some/dir/file2') squashed_image.assertFileExists('some/dir/.wh.tree') squashed_image.assertFileIsNotHardLink('some/dir/.wh.tree') with self.Container(squashed_image) as container: container.assertFileExists('some/dir/file1') container.assertFileExists('some/dir/file2') container.assertFileDoesNotExist('some/dir/tree') container.assertFileDoesNotExist('some/dir/tree/file1') container.assertFileDoesNotExist('some/dir/tree/file2') # We should have one layer less in the image self.assertEqual( len(squashed_image.layers), len(image.layers) - 1) def test_should_skip_files_when_these_are_modified_and_removed_in_squashed_layer(self): dockerfile = ''' FROM %s RUN touch /file RUN chmod -R 777 /file RUN rm -rf /file ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 2) as squashed_image: squashed_image.assertFileDoesNotExist('file') squashed_image.assertFileExists('.wh.file') squashed_image.assertFileIsNotHardLink('.wh.file') with self.Container(squashed_image) as container: container.assertFileDoesNotExist('file') # We should have one layer less in the image self.assertEqual( len(squashed_image.layers), len(image.layers) - 1) def test_should_skip_files_when_these_are_removed_and_modified_in_squashed_layer(self): dockerfile = ''' FROM %s RUN touch /file RUN chmod -R 777 /file RUN rm -rf /file RUN touch /file ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 3) as squashed_image: squashed_image.assertFileExists('file') squashed_image.assertFileDoesNotExist('.wh.file') with self.Container(squashed_image) as container: container.assertFileExists('file') # We should have two layers less in the image self.assertEqual( len(squashed_image.layers), len(image.layers) - 2) def test_should_handle_multiple_changes_to_files_in_squashed_layers(self): dockerfile = ''' FROM %s RUN mkdir -p /some/dir/tree RUN touch /some/dir/tree/file1 RUN touch /some/dir/tree/file2 RUN touch /some/dir/file1 RUN touch /some/dir/file2 RUN chmod -R 777 /some RUN rm -rf /some/dir/tree ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 2, numeric=True) as squashed_image: squashed_image.assertFileDoesNotExist('some/dir/tree/file1') squashed_image.assertFileDoesNotExist('some/dir/tree/file2') squashed_image.assertFileExists('some/dir/file1') squashed_image.assertFileExists('some/dir/file2') squashed_image.assertFileExists('some/dir/.wh.tree') squashed_image.assertFileIsNotHardLink('some/dir/.wh.tree') with self.Container(squashed_image) as container: container.assertFileExists('some/dir/file1') container.assertFileExists('some/dir/file2') container.assertFileDoesNotExist('some/dir/tree') container.assertFileDoesNotExist('some/dir/tree/file1') container.assertFileDoesNotExist('some/dir/tree/file2') # We should have one layer less in the image self.assertEqual( len(squashed_image.layers), len(image.layers) - 1) # https://github.com/goldmann/docker-squash/issues/97 def test_should_leave_whiteout_entries_as_is(self): dockerfile = ''' FROM %s RUN mkdir -p /opt/test.one RUN mkdir -p /opt/test.two RUN mkdir -p /opt/foo RUN touch /opt/test.one/file RUN touch /opt/test.two/file RUN touch /opt/foo/file RUN rm -rvf /opt/test*/* RUN rm -rvf /opt/foo/* ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 2, numeric=True) as squashed_image: squashed_image.assertFileDoesNotExist('opt/test.one/file') squashed_image.assertFileDoesNotExist('opt/test.two/file') squashed_image.assertFileDoesNotExist('opt/foo/file') squashed_image.assertFileExists('opt/test.one') squashed_image.assertFileExists('opt/test.two') squashed_image.assertFileExists('opt/foo') squashed_image.assertFileExists('opt/test.one/.wh.file') squashed_image.assertFileExists('opt/test.two/.wh.file') squashed_image.assertFileExists('opt/foo/.wh.file') with self.Container(squashed_image) as container: container.assertFileDoesNotExist('opt/test.one/file') container.assertFileDoesNotExist('opt/test.two/file') container.assertFileDoesNotExist('opt/foo/file') container.assertFileExists('opt/foo') container.assertFileExists('opt/test.one') container.assertFileExists('opt/test.two') # https://github.com/goldmann/docker-scripts/issues/28 def test_docker_version_in_metadata_should_be_set_after_squashing(self): dockerfile = ''' FROM %s RUN touch file RUN touch another_file ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 2) as squashed_image: self.assertEqual( len(squashed_image.layers), len(image.layers) - 1) self.assertEqual( image.metadata['DockerVersion'], squashed_image.metadata['DockerVersion']) # https://github.com/goldmann/docker-scripts/issues/30 # https://github.com/goldmann/docker-scripts/pull/31 def test_files_in_squashed_tar_not_prefixed_wth_dot(self): dockerfile = ''' FROM %s RUN touch file RUN touch another_file ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 2, output_path="image.tar"): with tarfile.open("image.tar", mode='r') as tar: all_files = tar.getnames() for name in all_files: self.assertFalse(name.startswith('.')) # https://github.com/goldmann/docker-scripts/issues/32 def test_version_file_exists_in_squashed_layer(self): dockerfile = ''' FROM %s RUN touch file RUN touch another_file ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 2, output_path="image.tar"): with tarfile.open("image.tar", mode='r') as tar: squashed_layer_path = ImageHelper.top_layer_path(tar) all_files = tar.getnames() self.assertIn("%s/json" % squashed_layer_path, all_files) self.assertIn("%s/layer.tar" % squashed_layer_path, all_files) self.assertIn("%s/VERSION" % squashed_layer_path, all_files) # https://github.com/goldmann/docker-scripts/issues/33 def test_docker_size_in_metadata_should_be_upper_case(self): dockerfile = ''' FROM %s RUN touch file RUN touch another_file ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 2) as squashed_image: self.assertEqual( len(squashed_image.layers), len(image.layers) - 1) self.assertIsInstance(image.metadata['Size'], int) with six.assertRaisesRegex(self, KeyError, "'size'"): self.assertEqual(image.metadata['size'], None) def test_handle_correctly_squashing_layers_without_data(self): dockerfile = ''' FROM %s ENV a=1 ENV b=2 ENV c=3 ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 2) as squashed_image: self.assertEqual( len(squashed_image.layers), len(image.layers) - 1) image_data_layers = [ s for s in image.tarnames if "layer.tar" in s] squashed_image_data_layers = [ s for s in squashed_image.tarnames if "layer.tar" in s] if 'manifest.json' in image.tarnames: # For v2 # For V2 only layers with data contain layer.tar archives # In our test case we did not add any data, so the count should # be the same self.assertEqual(len(image_data_layers), len(squashed_image_data_layers)) else: # For v1 # V1 image contains as many layer.tar archives as the image has layers # We squashed 2 layers, so squashed image contains one layer less self.assertEqual(len(image_data_layers), len( squashed_image_data_layers) + 1) # This is an edge case where we try to squash last 2 layers # but these layers do not create any content on filesystem # https://github.com/goldmann/docker-scripts/issues/54 def test_should_squash_exactly_2_layers_without_data(self): dockerfile = ''' FROM %s CMD /bin/env LABEL foo bar ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 2) as squashed_image: self.assertEqual( len(squashed_image.layers), len(image.layers) - 1) def test_should_squash_exactly_3_layers_with_data(self): dockerfile = ''' FROM %s RUN touch /abc CMD /bin/env LABEL foo bar ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 3) as squashed_image: self.assertEqual( len(squashed_image.layers), len(image.layers) - 2) def test_should_not_squash_if_only_one_layer_is_to_squash(self): dockerfile = ''' FROM %s RUN touch /abc CMD /bin/env LABEL foo bar ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.assertRaises(SquashUnnecessaryError) as cm: with self.SquashedImage(image, 1) as squashed_image: pass self.assertEqual( str(cm.exception), 'Single layer marked to squash, no squashing is required') # https://github.com/goldmann/docker-scripts/issues/52 # Test may be misleading, but squashing all layers makes sure we hit # at least one <missing> layer def test_should_squash_every_layer(self): dockerfile = ''' FROM %s RUN touch /tmp/test1 RUN touch /tmp/test2 CMD /bin/env LABEL foo bar ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image) as squashed_image: self.assertEqual( len(squashed_image.layers), 1) # https://github.com/goldmann/docker-scripts/issues/44 def test_remove_tmp_dir_after_failure(self): dockerfile = ''' FROM busybox:1.24.0 LABEL foo bar ''' tmp_dir = "/tmp/docker-squash-integ-tmp-dir" log = mock.Mock() shutil.rmtree(tmp_dir, ignore_errors=True) self.assertFalse(os.path.exists(tmp_dir)) with self.Image(dockerfile) as image: with six.assertRaisesRegex(self, SquashError, r"Cannot squash 20 layers, the .* image contains only \d layers"): with self.SquashedImage(image, 20, numeric=True, tmp_dir=tmp_dir, log=log): pass log.debug.assert_any_call( "Using /tmp/docker-squash-integ-tmp-dir as the temporary directory") log.debug.assert_any_call( "Cleaning up /tmp/docker-squash-integ-tmp-dir temporary directory") self.assertFalse(os.path.exists(tmp_dir)) def test_should_not_remove_tmp_dir_after_failure_if_development_mode_is_on(self): dockerfile = ''' FROM busybox:1.24.0 LABEL foo bar ''' tmp_dir = "/tmp/docker-squash-integ-tmp-dir" log = mock.Mock() shutil.rmtree(tmp_dir, ignore_errors=True) self.assertFalse(os.path.exists(tmp_dir)) with self.Image(dockerfile) as image: with six.assertRaisesRegex(self, SquashError, r"Cannot squash 20 layers, the .* image contains only \d layers"): with self.SquashedImage(image, 20, numeric=True, tmp_dir=tmp_dir, log=log, development=True): pass log.debug.assert_any_call( "Using /tmp/docker-squash-integ-tmp-dir as the temporary directory") self.assertTrue(os.path.exists(tmp_dir)) # https://github.com/goldmann/docker-squash/issues/80 def test_should_not_fail_with_hard_links(self): dockerfile = ''' FROM %s RUN touch /file && ln file link RUN rm file ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, None): pass # https://github.com/goldmann/docker-squash/issues/99 # TODO: try not to use centos:6.6 image - this slows down testsuite def test_should_not_fail_with_hard_links_to_files_gh_99(self): dockerfile = ''' FROM centos:7 RUN yum -y update bind-utils RUN yum clean all ''' with self.Image(dockerfile) as image: with self.SquashedImage(image, None): pass # https://github.com/goldmann/docker-squash/issues/66 def test_build_without_tag(self): dockerfile = ''' FROM %s RUN touch file ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, None, tag=False): pass # https://github.com/goldmann/docker-squash/issues/94 def test_should_squash_correctly_hardlinks(self): dockerfile = ''' FROM %s RUN mkdir -p /usr/libexec/git-core && \ echo foo > /usr/libexec/git-core/git-remote-ftp && \ ln /usr/libexec/git-core/git-remote-ftp \ /usr/libexec/git-core/git-remote-http CMD /bin/bash ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 3, numeric=True) as squashed_image: self.assertEqual( len(squashed_image.layers), len(image.layers) - 2) squashed_image.assertFileExists( 'usr/libexec/git-core/git-remote-ftp') squashed_image.assertFileExists( 'usr/libexec/git-core/git-remote-http') # https://github.com/goldmann/docker-squash/issues/104 def test_should_handle_symlinks_to_nonexisting_locations(self): dockerfile = ''' FROM %s RUN mkdir -p /var/log RUN touch /var/log/somelog RUN mv /var/log /var/log-removed && ln -sf /data/var/log /var/log RUN rm -rf /var/log-removed ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 3, numeric=True) as squashed_image: self.assertEqual( len(squashed_image.layers), len(image.layers) - 2) def test_should_squash_every_layer_from_an_image_from_docker_hub(self): dockerfile = ''' FROM python:3.5-alpine ''' with self.Image(dockerfile) as image: with self.SquashedImage(image) as squashed_image: self.assertEqual( len(squashed_image.layers), 1) # https://github.com/goldmann/docker-squash/issues/111 def test_correct_symlinks_squashing(self): dockerfile = ''' FROM %s RUN mkdir -p /zzz RUN ln -s /zzz /xxx ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image) as squashed_image: squashed_image.assertFileExists('zzz') squashed_image.assertFileExists('xxx') with self.Container(squashed_image) as container: container.assertFileExists('zzz') container.assertFileExists('xxx') # https://github.com/goldmann/docker-squash/issues/112 def test_should_add_broken_symlinks_back(self): dockerfile = ''' FROM %s RUN touch a RUN touch b RUN ln -s /zzz /xxx ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 2, numeric=True) as squashed_image: squashed_image.assertFileExists('xxx') with self.Container(squashed_image) as container: container.assertFileExists('xxx') def test_should_add_hard_hard_link_back_if_target_exists_in_moved_files(self): dockerfile = ''' FROM %s RUN touch a RUN touch b RUN ln /a /link RUN touch c ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 3, numeric=True) as squashed_image: squashed_image.assertFileExists('link') squashed_image.assertFileExists('b') with self.Container(squashed_image) as container: container.assertFileExists('link') container.assertFileExists('b') container.assertFileExists('a') container.assertFileExists('c') # https://github.com/goldmann/docker-squash/issues/112 def test_should_add_sym_link_back_if_it_was_broken_before(self): dockerfile = ''' FROM %s RUN touch a RUN touch b RUN touch c RUN ln -s /a /link ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 3, numeric=True) as squashed_image: squashed_image.assertFileExists('link') squashed_image.assertFileExists('b') squashed_image.assertFileExists('c') with self.Container(squashed_image) as container: container.assertFileExists('link') container.assertFileExists('a') container.assertFileExists('b') container.assertFileExists('c') # https://github.com/goldmann/docker-squash/issues/116 def test_should_not_skip_sym_link(self): dockerfile = ''' FROM %s RUN mkdir /dir RUN touch /dir/a RUN touch /dir/b RUN mkdir /dir/dir RUN touch /dir/dir/file RUN mv /dir/dir /newdir RUN ln -s /newdir /dir/dir ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 2, numeric=True) as squashed_image: with self.Container(squashed_image) as container: container.assertFileExists('dir') container.assertFileExists('dir/a') container.assertFileExists('dir/b') container.assertFileExists('dir/dir') container.assertFileExists('newdir/file') # https://github.com/goldmann/docker-squash/issues/118 def test_should_not_skip_hard_link(self): dockerfile = ''' FROM %s RUN mkdir /dir RUN touch /dir/a RUN touch /dir/b RUN mkdir /dir/dir RUN touch /dir/dir/file RUN mkdir /newdir RUN mv /dir/dir/file /newdir/file RUN ln /newdir/file /dir/dir/file ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 2, numeric=True) as squashed_image: with self.Container(squashed_image) as container: container.assertFileExists('dir') container.assertFileExists('dir/a') container.assertFileExists('dir/b') container.assertFileExists('dir/dir') container.assertFileExists('newdir/file') # https://github.com/goldmann/docker-squash/issues/118 def test_should_not_add_hard_link_if_exists_in_other_squashed_layer(self): dockerfile = ''' FROM %s RUN echo "base" > file && ln file link RUN echo "first layer" > file && ln -f file link RUN echo "second layer" > file && ln -f file link ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 2, numeric=True) as squashed_image: with self.Container(squashed_image) as container: pass # https://github.com/goldmann/docker-squash/issues/120 def test_should_handle_symlinks_to_directory(self): dockerfile = ''' FROM %s RUN mkdir /tmp/dir RUN touch /tmp/dir/file RUN set -e ; cd / ; mkdir /data-template ; tar cf - ./tmp/dir/ | ( cd /data-template && tar xf - ) ; mkdir -p $( dirname /tmp/dir ) ; rm -rf /tmp/dir ; ln -sf /data/tmp/dir /tmp/dir ''' % TestIntegSquash.BUSYBOX_IMAGE with self.Image(dockerfile) as image: with self.SquashedImage(image, 3, numeric=True) as squashed_image: with self.Container(squashed_image) as container: container.assertFileExists('data-template') container.assertFileExists('data-template/tmp') container.assertFileExists('data-template/tmp/dir') container.assertFileExists('data-template/tmp/dir/file') container.assertFileExists('tmp/dir') container.assertFileDoesNotExist('tmp/dir/file') # https://github.com/goldmann/docker-squash/issues/122 def test_should_not_add_duplicate_files(self): dockerfile = ''' FROM {} RUN mkdir -p /etc/systemd/system/multi-user.target.wants RUN mkdir -p /etc/systemd/system/default.target.wants RUN touch /etc/systemd/system/multi-user.target.wants/remote-fs.target RUN touch /etc/systemd/system/default.target.wants/remote-fs.target # End of preparations, going to squash from here RUN find /etc/systemd/system/* '!' -name '*.wants' | xargs rm -rvf RUN rmdir -v /etc/systemd/system/multi-user.target.wants && mkdir /etc/systemd/system/container-ipa.target.wants && ln -s /etc/systemd/system/container-ipa.target.wants /etc/systemd/system/multi-user.target.wants RUN ln -s /etc/group /etc/systemd/system/default.target RUN ln -s /etc/group /etc/systemd/system/container-ipa.target.wants/ipa-server-configure-first.service RUN echo "/etc/systemd/system" > /etc/volume-data-list RUN set -e ; cd / ; mkdir /data-template ; cat /etc/volume-data-list | while read i ; do echo $i ; if [ -e $i ] ; then tar cf - .$i | ( cd /data-template && tar xf - ) ; fi ; mkdir -p $( dirname $i ) ; if [ "$i" == /var/log/ ] ; then mv /var/log /var/log-removed ; else rm -rf $i ; fi ; ln -sf /data$i $i ; done '''.format(TestIntegSquash.BUSYBOX_IMAGE) with self.Image(dockerfile) as image: with self.SquashedImage(image, 6, numeric=True, output_path="tox.tar") as squashed_image: with self.Container(squashed_image) as container: container.assertFileExists( 'data-template/etc/systemd/system/container-ipa.target.wants') container.assertFileExists( 'data-template/etc/systemd/system/default.target.wants') container.assertFileExists( 'data-template/etc/systemd/system/default.target') container.assertFileExists( 'data-template/etc/systemd/system/multi-user.target.wants') container.assertFileExists( 'data-template/etc/systemd/system/container-ipa.target.wants/ipa-server-configure-first.service') container.assertFileExists('etc/systemd/system') # https://github.com/goldmann/docker-squash/issues/181 def test_should_not_add_marker_files_in_already_marked_directories(self): dockerfile = ''' FROM {} RUN mkdir -p /opt/testing/some/dir/structure RUN touch /opt/testing/some/dir/structure/1_base_file /opt/testing/some/dir/structure/2_base_file /opt/testing/some/dir/structure/3_base_file RUN rm /opt/testing/some/dir/structure/2_base_file /opt/testing/some/dir/structure/3_base_file RUN touch /opt/testing/some/dir/structure/new_file RUN rm -rf /opt/testing RUN rm -rf /opt '''.format(TestIntegSquash.BUSYBOX_IMAGE) with self.Image(dockerfile) as image: with self.SquashedImage(image, 4, numeric=True) as squashed_image: squashed_image.assertFileExists('.wh.opt') with self.Container(squashed_image) as container: container.assertFileDoesNotExist('/opt') # https://github.com/goldmann/docker-squash/issues/181 def test_should_not_add_marker_files_in_already_marked_directories_multiple_removal(self): dockerfile = ''' FROM {} RUN mkdir -p /opt/testing/some/dir/structure RUN touch /opt/testing/some/dir/structure/1_base_file /opt/testing/some/dir/structure/2_base_file /opt/testing/some/dir/structure/3_base_file RUN rm /opt/testing/some/dir/structure/2_base_file /opt/testing/some/dir/structure/3_base_file RUN touch /opt/testing/some/dir/structure/new_file RUN rm -rf /opt/testing '''.format(TestIntegSquash.BUSYBOX_IMAGE) with self.Image(dockerfile) as image: with self.SquashedImage(image, 3, numeric=True) as squashed_image: squashed_image.assertFileExists('opt/.wh.testing') with self.Container(squashed_image) as container: container.assertFileDoesNotExist('/opt/testing') # https://github.com/goldmann/docker-squash/issues/186 def test_should_handle_opaque_dirs(self): dockerfile = ''' FROM {} RUN mkdir -p /d1 && touch /d1/foobar RUN rm -rf /d1 && mkdir -p /d1 && touch /d1/foo '''.format(TestIntegSquash.BUSYBOX_IMAGE) with self.Image(dockerfile) as image: with self.SquashedImage(image, 2, numeric=True) as squashed_image: with self.Container(image) as container: container.assertFileExists('d1/foo') container.assertFileDoesNotExist('d1/foobar') with self.Container(squashed_image) as container: container.assertFileExists('d1/foo') container.assertFileDoesNotExist('d1/foobar') # https://github.com/goldmann/docker-squash/issues/186 # https://github.com/opencontainers/image-spec/blob/master/layer.md#whiteouts def test_should_handle_opaque_dirs_spec_example(self): dockerfile = ''' FROM {} RUN mkdir -p a/b/c && touch a/b/c/bar RUN rm -rf a RUN mkdir -p a/b/c && touch a/b/c/foo '''.format(TestIntegSquash.BUSYBOX_IMAGE) with self.Image(dockerfile) as image: image.assertFileExistsInLayer('a', -3) image.assertFileExistsInLayer('a/b', -3) image.assertFileExistsInLayer('a/b/c', -3) image.assertFileExistsInLayer('a/b/c/bar', -3) image.assertFileExistsInLayer('.wh.a', -2) image.assertFileExistsInLayer('a') image.assertFileExistsInLayer('a/b') image.assertFileExistsInLayer('a/b/c') image.assertFileExistsInLayer('a/b/c/foo') with self.SquashedImage(image, 3, numeric=True) as squashed_image: squashed_image.assertFileExists('a') squashed_image.assertFileExists('a/b') squashed_image.assertFileExists('a/b/c') squashed_image.assertFileExists('a/b/c/foo') squashed_image.assertFileDoesNotExist('a/b/c/bar') class NumericValues(IntegSquash): @classmethod def setUpClass(cls): dockerfile = ''' FROM busybox:1.24.0 RUN touch /tmp/test1 RUN touch /tmp/test2 CMD /bin/env LABEL foo bar ''' IntegSquash.build_image(dockerfile) @classmethod def tearDownClass(cls): IntegSquash.cleanup_image() def test_should_not_squash_more_layers_than_image_has(self): with six.assertRaisesRegex(self, SquashError, r"Cannot squash 20 layers, the .* image contains only \d layers"): with self.SquashedImage(NumericValues.image, 20, numeric=True): pass def test_should_not_squash_negative_number_of_layers(self): with six.assertRaisesRegex(self, SquashError, "Number of layers to squash cannot be less or equal 0, provided: -1"): with self.SquashedImage(NumericValues.image, -1, numeric=True): pass def test_should_not_squash_zero_number_of_layers(self): with six.assertRaisesRegex(self, SquashError, "Number of layers to squash cannot be less or equal 0, provided: 0"): with self.SquashedImage(NumericValues.image, 0, numeric=True): pass def test_should_not_squash_single_layer(self): with six.assertRaisesRegex(self, SquashUnnecessaryError, "Single layer marked to squash, no squashing is required"): with self.SquashedImage(NumericValues.image, 1, numeric=True): pass def test_should_squash_2_layers(self): with self.SquashedImage(NumericValues.image, 2, numeric=True) as squashed_image: i_h = NumericValues.image.history[0] s_h = squashed_image.history[0] for key in 'Comment', 'Size': self.assertEqual(i_h[key], s_h[key]) self.assertEqual(s_h['CreatedBy'], '') self.assertEqual( len(squashed_image.layers), len(NumericValues.image.layers) - 1) def test_should_squash_3_layers(self): with self.SquashedImage(NumericValues.image, 3, numeric=True) as squashed_image: i_h = NumericValues.image.history[0] s_h = squashed_image.history[0] for key in 'Comment', 'Size': self.assertEqual(i_h[key], s_h[key]) self.assertEqual(s_h['CreatedBy'], '') self.assertEqual( len(squashed_image.layers), len(NumericValues.image.layers) - 2) def test_should_squash_4_layers(self): with self.SquashedImage(NumericValues.image, 4, numeric=True) as squashed_image: i_h = NumericValues.image.history[0] s_h = squashed_image.history[0] for key in 'Comment', 'Size': self.assertEqual(i_h[key], s_h[key]) self.assertEqual(s_h['CreatedBy'], '') self.assertEqual( len(squashed_image.layers), len(NumericValues.image.layers) - 3) if __name__ == '__main__': unittest.main()
48,070
14,487
import torch import math import torch.nn as nn import torch.nn.functional as F class SpatialGate(nn.Module): def __init__(self, gate_channel, reduction_ratio, dilation_conv_num=2): super(SpatialGate, self).__init__() dilation_val = [1, 2] self.gate_s = nn.Sequential() self.gate_s.add_module( 'gate_s_conv_reduce0', nn.Conv2d(gate_channel, gate_channel//reduction_ratio, kernel_size=1)) self.gate_s.add_module( 'gate_s_bn_reduce0', nn.BatchNorm2d(gate_channel//reduction_ratio) ) self.gate_s.add_module( 'gate_s_relu_reduce0',nn.ReLU() ) for i in range( dilation_conv_num ): self.gate_s.add_module( 'gate_s_conv_di_%d'%i, nn.Conv2d(gate_channel//reduction_ratio, gate_channel//reduction_ratio, kernel_size=3, padding=dilation_val[i], dilation=dilation_val[i]) ) self.gate_s.add_module( 'gate_s_bn_di_%d'%i, nn.BatchNorm2d(gate_channel//reduction_ratio) ) self.gate_s.add_module( 'gate_s_relu_di_%d'%i, nn.ReLU() ) self.gate_s.add_module( 'gate_s_conv_final', nn.Conv2d(gate_channel//reduction_ratio, 1, kernel_size=1) ) def forward(self, in_tensor): return self.gate_s( in_tensor ).expand_as(in_tensor) class BAM(nn.Module): def __init__(self, gate_channel, reduction_ratio): super(BAM, self).__init__() self.spatial_att = SpatialGate(gate_channel, reduction_ratio) def forward(self,in_tensor): att = 1 + torch.sigmoid(self.spatial_att(in_tensor)) return att * in_tensor
1,627
627
import cv2 faceCascade = cv2.CascadeClassifier(cv2.data.haarcascades + "haarcascade_frontalface_default.xml") #capture = cv2.VideoCapture(0) capture = cv2.VideoCapture('Elon Musk 320.mp4') while True: _, frame = capture.read() gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) #faces = faceCascade.detectMultiScale(gray, 1.1, 4) faces = faceCascade.detectMultiScale( gray, scaleFactor=1.1, minNeighbors=5, #minSize=(30, 30), flags=cv2.CASCADE_SCALE_IMAGE ) for (x, y, w, h) in faces: cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 0), 2) cv2.imshow('Image', frame) keyboard = cv2.waitKey(30 & 0xff) if keyboard==27: break capture.release()
736
329
from setuptools import setup import sys if sys.version_info < (2,5): raise NotImplementedError( "Sorry, you need at least Python 2.5 to use cprofilev.") VERSION = '1.0.4' __doc__ = """\ An easier way to use cProfile. Outputs a simpler html view of profiled stats. Able to show stats while the code is still running! """ setup( name='CProfileV', version=VERSION, url='https://github.com/ymichael/cprofilev', author='Michael Yong', author_email='wrong92@gmail.com', py_modules=['cprofilev'], entry_points=""" [console_scripts] cprofilev = cprofilev:main """, install_requires=["bottle"], license='MIT', description='An easier way to use cProfile', long_description=__doc__, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'Environment :: Web Environment', 'Framework :: Bottle', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Topic :: Software Development :: Testing', ] )
1,110
351
# standard libraries import os # third party libraries pass # first party libraries from . import (application, routing, static, surly, requests, responses, utils, cookies, exceptions, middleware, ) __where__ = os.path.dirname(os.path.abspath(__file__)) __all__ = ('Application', 'application', 'routing', 'Route', 'Routes', 'Request', 'Response', 'exceptions', 'surly', 'Url') Route = routing.Route Routes = routing.Routes Application = application.Application Url = surly.Url Request = requests.Request Response = responses.Response
566
158
import sys # this is for extracting command line arguments. def parse_activator(flag, value): if flag[1] == 'a': return (True, value) else: return (False,None) pass def parse_optimizer(flag, value): if flag[1] == 'o': return (True, value) else: return (False,None) pass def parse_source(flag, value): if flag[1] == 's': return (True, value) else: return (False,None) pass activator = '' optimizer = '' source = '' if len(sys.argv) == 1 or (len(sys.argv) - 1) % 2 != 0: raise ValueError("Usage: [-s image] [-a activator] [-o optimizer]") else: # could this be done better? # sure, but this works for now... for i in range(1, len(sys.argv) - 1): flag = sys.argv[i] value = sys.argv[i + 1] isActivator, act = parse_activator(flag, value) if isActivator: if act != '-o': activator = act continue isOptimizer, opt = parse_optimizer(flag, value) if isOptimizer: optimizer = opt continue isSource, so = parse_source(flag, value) if isSource: source = so continue pass pass # naive check to ensure no argument is left unfilled if len(activator) == 0 or len(optimizer) == 0 or len(source) == 0 : raise ValueError("Usage: [-s image] [-a activator] [-o optimizer]") # exit(0) ############# Classification Logic ################## import pandas as pd import io import requests import numpy as np import os import logging import json import shutil from sklearn.model_selection import train_test_split from sklearn import metrics import tensorflow as tf from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Dense, Activation, Conv2D, MaxPooling2D, Dropout, Flatten from tensorflow.keras.callbacks import EarlyStopping from tensorflow.keras.callbacks import ModelCheckpoint from tensorflow.keras.applications.vgg16 import VGG16 from PIL import Image, ImageFile, ImageEnhance from matplotlib.pyplot import imshow import requests from io import BytesIO import matplotlib.image as mpimg import matplotlib.pyplot as plt ####### warning messages not printed ####### logging.disable(logging.WARNING) os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' # class labels are as follows for the cifar10 # airplane : 0 # automobile : 1 # bird : 2 # cat : 3 # deer : 4 # dog : 5 # frog : 6 # horse : 7 # ship : 8 # truck : 9 class_labels = ['airplane','automobile','bird','cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck'] num_classes = 10 # Image preprocessing img = Image.open(source) img = img.resize((32,32)) enhancer = ImageEnhance.Sharpness(img) enhanced_im = enhancer.enhance(10.0) enhanced_im.save('resized.jpg') img_array = np.asarray(enhanced_im) img_array = img_array / 255 input_shape = (32,32,3) # reshape for model # original model was trained with (32,32,3) img_array = img_array.reshape((1,32,32,3)) modelo = Sequential() modelo.add(Conv2D(32, (3, 3), activation=activator, padding='same', input_shape=input_shape)) modelo.add(Conv2D(32, (3, 3), activation=activator, padding='same')) modelo.add(Conv2D(32, (3, 3), activation=activator, padding='same')) modelo.add(MaxPooling2D((3, 3))) modelo.add(Dropout(0.2)) modelo.add(Conv2D(64, (3, 3), activation=activator, padding='same')) modelo.add(Conv2D(64, (3, 3), activation=activator, padding='same')) modelo.add(Conv2D(64, (3, 3), activation=activator, padding='same')) modelo.add(MaxPooling2D((3, 3))) modelo.add(Dropout(0.2)) modelo.add(Conv2D(128, (3, 3), activation=activator, padding='same')) modelo.add(Conv2D(128, (3, 3), activation=activator, padding='same')) modelo.add(MaxPooling2D((3, 3))) modelo.add(Flatten()) modelo.add(Dense(128, activation=activator)) modelo.add(Dropout(0.2)) modelo.add(Dense(10, activation='softmax')) modelo.compile(loss='categorical_crossentropy',optimizer=optimizer) # validate the 'activator' pass # validate the 'optimizer' pass # Load weights based on activator and optimizer # probably not needed as we are already passing the optimizer as a variable if optimizer == 'adam': # compile with adam modelo.compile(loss='categorical_crossentropy',optimizer=optimizer) # activator if activator == 'relu': # load adam-relu modelo.load_weights('dnn/relu-adam2.hdf5') elif activator == 'sigmoid': # load sigmoid-adam modelo.load_weights('dnn/sigmoid-adam2.hdf5') elif activator == 'tanh': # load tanh-adam modelo.load_weights('dnn/tanh-adam2.hdf5') else: print('error') elif optimizer == 'sgd': # compile with sgd modelo.compile(loss='categorical_crossentropy',optimizer=optimizer) if activator == 'relu': # load relu-sgd modelo.load_weights('dnn/relu-sgd2.hdf5') elif activator == 'sigmoid': # load sigmoid-sgd modelo.load_weights('dnn/sigmoid-sgd2.hdf5') elif activator == 'tanh': # load tanh-sgd modelo.load_weights('dnn/tanh-sgd2.hdf5') else: print('error') # Get the classification ############# classification ############## pred = modelo.predict(img_array) pred = pred[0] pred_class = class_labels[np.argmax(pred)] ############# JSON ############### # classification = {k:v for k,v in zip(class_labels,pred)} classification = [ { class_labels[0] : pred[0] }, { class_labels[1] : pred[1] }, { class_labels[2] : pred[2] }, { class_labels[3] : pred[3] }, { class_labels[4] : pred[4] }, { class_labels[5] : pred[5] }, { class_labels[6] : pred[6] }, { class_labels[7] : pred[7] }, { class_labels[8] : pred[8] }, { class_labels[9] : pred[9] }, ] ########## output ################ print(classification)
5,947
2,158
# coding: utf-8 # Author: Leo BRUNEL # Contact: contact@leobrunel.com # Python modules from PyQt5 import QtWidgets, QtCore, QtGui from PyQt5.QtCore import pyqtSignal import logging # Wizard modules from wizard.core import assets from wizard.core import project from wizard.vars import ressources # Wizard gui modules from wizard.gui import gui_utils from wizard.gui import gui_server logger = logging.getLogger(__name__) class destination_manager(QtWidgets.QWidget): def __init__(self, export_id, parent=None): super(destination_manager, self).__init__(parent) self.setWindowIcon(QtGui.QIcon(ressources._wizard_ico_)) self.setWindowTitle(f"Wizard - Destination manager") self.references_ids = dict() self.export_id = export_id self.fill_thread = fill_thread(self) self.build_ui() self.connect_functions() self.refresh() def build_ui(self): self.setMinimumSize(QtCore.QSize(800,500)) self.main_layout = QtWidgets.QVBoxLayout() self.main_layout.setContentsMargins(0,0,0,0) self.main_layout.setSpacing(0) self.setLayout(self.main_layout) self.header = QtWidgets.QWidget() self.header.setObjectName('transparent_widget') self.header_layout = QtWidgets.QHBoxLayout() self.header_layout.setSpacing(6) self.header.setLayout(self.header_layout) self.main_layout.addWidget(self.header) self.header_label = QtWidgets.QLabel() self.header_layout.addWidget(self.header_label) self.content_widget = QtWidgets.QWidget() self.content_widget.setObjectName('dark_widget') self.content_layout = QtWidgets.QVBoxLayout() self.content_layout.setSpacing(6) self.content_widget.setLayout(self.content_layout) self.main_layout.addWidget(self.content_widget) self.list_view = QtWidgets.QTreeWidget() self.list_view.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) self.list_view.setObjectName('tree_as_list_widget') self.list_view.setColumnCount(2) self.list_view.setHeaderLabels(['Destination', 'Referenced version']) self.list_view.header().resizeSection(0, 450) self.list_view.setIndentation(0) self.list_view.setAlternatingRowColors(True) self.list_view.setSelectionMode(QtWidgets.QAbstractItemView.ExtendedSelection) self.content_layout.addWidget(self.list_view) self.buttons_widget = QtWidgets.QWidget() self.buttons_widget.setObjectName('transparent_widget') self.buttons_layout = QtWidgets.QHBoxLayout() self.buttons_layout.setContentsMargins(0,0,0,0) self.buttons_layout.setSpacing(6) self.buttons_widget.setLayout(self.buttons_layout) self.content_layout.addWidget(self.buttons_widget) self.buttons_layout.addSpacerItem(QtWidgets.QSpacerItem(0,0, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed)) self.remove_selection_button = QtWidgets.QPushButton() gui_utils.application_tooltip(self.remove_selection_button, "Remove selected references") self.remove_selection_button.setFixedSize(35,35) self.remove_selection_button.setIconSize(QtCore.QSize(25,25)) self.remove_selection_button.setIcon(QtGui.QIcon(ressources._tool_archive_)) self.buttons_layout.addWidget(self.remove_selection_button) self.update_button = QtWidgets.QPushButton() gui_utils.application_tooltip(self.update_button, "Update selected references") self.update_button.setFixedSize(35,35) self.update_button.setIconSize(QtCore.QSize(25,25)) self.update_button.setIcon(QtGui.QIcon(ressources._tool_update_)) self.buttons_layout.addWidget(self.update_button) def connect_functions(self): self.fill_thread.data_signal.connect(self.update_reference) self.remove_selection_button.clicked.connect(self.remove_selection) self.update_button.clicked.connect(self.update_selection) def refresh(self): self.header_label.setText(assets.instance_to_string(('export', self.export_id))) reference_rows = project.get_references_by_export(self.export_id) project_references_id = [] for reference_row in reference_rows: project_references_id.append(reference_row['id']) if reference_row['id'] not in self.references_ids.keys(): target_item = custom_target_item(reference_row, self.list_view.invisibleRootItem()) self.references_ids[reference_row['id']] = target_item references_list_ids = list(self.references_ids.keys()) for reference_id in references_list_ids: if reference_id not in project_references_id: self.remove_reference_item(reference_id) self.fill_thread.update_reference_rows(self.export_id, reference_rows) def remove_reference_item(self, reference_id): if reference_id in self.references_ids.keys(): item = self.references_ids[reference_id] self.list_view.invisibleRootItem().removeChild(item) del self.references_ids[reference_id] def remove_selection(self): selected_items = self.list_view.selectedItems() for selected_item in selected_items: project.remove_reference(selected_item.reference_row['id']) gui_server.refresh_team_ui() def update_selection(self): selected_items = self.list_view.selectedItems() for selected_item in selected_items: reference_id = selected_item.reference_row['id'] assets.set_reference_last_version(reference_id) gui_server.refresh_team_ui() def update_reference(self, data_tuple): if data_tuple[0] in self.references_ids.keys(): self.references_ids[data_tuple[0]].update(data_tuple) class custom_target_item(QtWidgets.QTreeWidgetItem): def __init__(self, reference_row, parent=None): super(custom_target_item, self).__init__(parent) self.reference_row = reference_row bold_font=QtGui.QFont() bold_font.setBold(True) self.setFont(0, bold_font) def update(self, data_tuple): self.setText(0, data_tuple[1]) self.setText(1, data_tuple[2]) if data_tuple[3]: self.setForeground(1, QtGui.QBrush(QtGui.QColor('#9ce87b'))) else: self.setForeground(1, QtGui.QBrush(QtGui.QColor('#f79360'))) class fill_thread(QtCore.QThread): data_signal = pyqtSignal(tuple) def __init__(self, parent = None): super(fill_thread, self).__init__(parent) self.export_id = None self.references_rows = [] self.running = False def update_reference_rows(self, export_id, reference_rows): self.references_rows = reference_rows self.export_id = export_id self.running = True self.start() def run(self): if self.running: default_export_version_id = project.get_default_export_version(self.export_id, 'id') for reference_row in self.references_rows: work_env_string = assets.instance_to_string(('work_env', reference_row['work_env_id'])) export_version_row = project.get_export_version_data(reference_row['export_version_id']) if default_export_version_id != export_version_row['id']: up_to_date = 0 else: up_to_date = 1 self.data_signal.emit((reference_row['id'], work_env_string, export_version_row['name'], up_to_date))
7,658
2,381
from .BaseAgent import BaseAgent import pandas as pd import numpy as np from itertools import islice class SixMonthCycle_Agent(BaseAgent): def __init__(self, window_size, small, large, signal, up, down): super().__init__(window_size) self.up = up self.down = down self.large = large self.small = small self.signal = signal self.window_size = window_size def get_macd_signal(self): memory_slice = list(islice(self.memory, self.window_size - self.large, self.window_size)) memory_slice = pd.DataFrame(memory_slice) df_memory = pd.DataFrame(memory_slice) df_macd = df_memory.ewm(span=self.small, adjust=False).mean() - df_memory.ewm(span=self.large, adjust=False).mean() signal = df_macd.ewm(span=self.signal, adjust=False).mean()[0][self.large - 1] macd = df_macd[0][self.large - 1] if macd >= (1 + self.up)*(signal): return "buy" elif macd <= (1 - self.down)*(signal): return "sell" else: return "hold" def step(self, price, date): self.memory.append(price) if len(self.memory)<self.window_size: return 0 date = list(map(int, date.split("-"))) month = date[1] macd_signal = self.get_macd_signal() # Buy in november if month > 10 or month < 5 and macd_signal == "buy": return 1 # Sell in may if month > 4 and month < 11 and macd_signal == "sell": return -1 # Hold return 0
1,591
518
n = float(input()) m = float(input()) max = max(n, m) min = min(n, m) time = max - min print(time // 60, time % 60)
116
54
import tensorflow as tf import matplotlib.pyplot as plt X = [1, 2, 3] Y = [1, 2, 3] W = tf.placeholder(tf.float32) hypothesis = X * W cost = tf.reduce_mean(tf.square(hypothesis - Y)) sess = tf.Session() sess.run(tf.global_variables_initializer()) W_val = [] cost_val = [] for i in range(-30, 50): feed_W = i * 0.1 curr_cost, curr_W = sess.run([cost, W], feed_dict={W: feed_W}) W_val.append(curr_W) cost_val.append(curr_cost) plt.plot(W_val, cost_val) plt.show()
485
219
""" .. module:: test_multisampling :synopsis: Test multisampling strategy .. moduleauthor:: David Eriksson <dme65@cornell.edu> """ from pySOT import Ackley, CandidateDYCORS, GeneticAlgorithm, \ MultiStartGradient, SyncStrategyNoConstraints, \ RBFInterpolant, CubicKernel, LinearTail, \ SymmetricLatinHypercube, MultiSampling from poap.controller import SerialController import numpy as np import os.path import logging def main(): if not os.path.exists("./logfiles"): os.makedirs("logfiles") if os.path.exists("./logfiles/test_multisampling.log"): os.remove("./logfiles/test_multisampling.log") logging.basicConfig(filename="./logfiles/test_multisampling.log", level=logging.INFO) print("\nNumber of threads: 1") print("Maximum number of evaluations: 500") print("Sampling method: CandidateDYCORS, Genetic Algorithm, Multi-Start Gradient") print("Experimental design: Latin Hypercube") print("Surrogate: Cubic RBF") nthreads = 1 maxeval = 500 nsamples = nthreads data = Ackley(dim=10) print(data.info) # Create a strategy and a controller sampling_method = [CandidateDYCORS(data=data, numcand=100*data.dim), GeneticAlgorithm(data=data), MultiStartGradient(data=data)] controller = SerialController(data.objfunction) controller.strategy = \ SyncStrategyNoConstraints( worker_id=0, data=data, maxeval=maxeval, nsamples=nsamples, response_surface=RBFInterpolant(kernel=CubicKernel, tail=LinearTail, maxp=maxeval), exp_design=SymmetricLatinHypercube(dim=data.dim, npts=2*(data.dim + 1)), sampling_method=MultiSampling(sampling_method, [0, 1, 0, 2])) result = controller.run() best, xbest = result.value, result.params[0] print('Best value: {0}'.format(best)) print('Best solution: {0}\n'.format( np.array_str(xbest, max_line_width=np.inf, precision=5, suppress_small=True))) if __name__ == '__main__': main()
2,078
689
import torch import argparse from selfplaylab.game.go import CaptureGoState, PixelCaptureGoState, GoState from selfplaylab.game.gomoku import GoMokuState, GoMokuStateAugmented, TicTacToe, TicTacToeAugmented from selfplaylab.game.nim import NimState from selfplaylab.game.othello import OthelloState from selfplaylab.play import play_game parser = argparse.ArgumentParser(description="Self-play visualization.") parser.add_argument("--game", type=str, help="Game to play") parser.add_argument("--tag", type=str, help="Tag for experiment", default="") args = parser.parse_args() game = args.game if game == "cg": game_class = CaptureGoState elif game == "pxcg": game_class = PixelCaptureGoState elif game == "nim": game_class = NimState elif game == "oth": game_class = OthelloState else: raise Exception("unknown game") net = game_class.create_net(tag=args.tag) options = {} print(f"Loaded net {net.metadata['filename']} on cuda? {net.device}") temp_fn = lambda mv: 1.0 if mv < 2 else 0.1 with torch.no_grad(): game_states = play_game( net_evaluator=net.evaluate_sample, game_class=game_class, temperature=temp_fn, verbose=True, )
1,172
399
import re def try_parse_int(s, base=10, val=None): try: return int(s, base) except ValueError: return val def test_version(): """Test version string""" from coco2customvision import __version__ version_parts = re.split("[.-]", __version__) if __version__ != "UNKNOWN": assert 3 <= len(version_parts), "must have at least Major.minor.patch" assert all( not try_parse_int(i) is None for i in version_parts[:2] ), f"Version Major.minor must be 2 integers. Received {__version__}"
558
182
# -*- coding: utf-8 -*- from Pages.PageObject import PageObject import time class ITProPage(PageObject): firstHandle = "" secondHandle = "" def __init__(self, driver): PageObject.__init__(self, driver) def click_picture(self): self.firstHandle = self.driver.window_handles[0] picture =\ self.waiting_element_by_xpath("//img[@alt=\"小江戸らぐ\"]") #self.driver.save_screenshot("C:\\home\\hirofumi\\koedo\\a.jpg") self.click(picture) for handle in self.driver.window_handles: if handle != self.firstHandle: self.secondHandle = handle self.driver.switch_to_window(self.secondHandle) picture =\ self.waiting_element_by_xpath("//img[@src=\"koedlug.jpg\"]") time.sleep(5) return self def quit(self): self.driver.switch_to_window(self.secondHandle) self.driver.close() self.driver.switch_to_window(self.firstHandle) self.driver.quit() def click_PC_button(self): PC_button =\ self.waiting_element_by_xpath("//img[@src=\"/images/n/itpro/2010/leaf/btn_pc.gif\"]") self.click(PC_button) return self
1,266
421
import requests from bs4 import BeautifulSoup notebook_html = requests.get('https://search.shopping.naver.com/search/all?pagingIndex=2&pagingSize=80&query=노트북') notebook_soup = BeautifulSoup(notebook_html.text,"html.parser") notebook_list_box = notebook_soup.find("ul", {"class" : "list_basis"}) notebook_list = notebook_list_box.find_all('li', {"class" : "basicList_item__2XT81"}) result = [] for notebook in notebook_list: title = notebook.find("div",{"class":"basicList_title__3P9Q7"}).find("a").string price = notebook.find("div",{"class":"basicList_price_area__1UXXR"}).find("span",{"class":"price_num__2WUXn"}).text notebook_info = { 'title' : title, 'price' : price } result.append(notebook_info) print(result) # notebook_list = notebook_soup.select_one('#__next > div > div.style_container__1YjHN > div.style_inner__18zZX > div.style_content_wrap__1PzEo > div.style_content__2T20F > ul > div > div:nth-child(1) > li > div > div.basicList_info_area__17Xyo > div.basicList_title__3P9Q7 > a').string notebook_list = notebook_soup.select_one('#__next > div > div.style_container__1YjHN > div.style_inner__18zZX > div.style_content_wrap__1PzEo > div.style_content__2T20F > ul > div > div:nth-child(1) > li > div > div.basicList_info_area__17Xyo > div.basicList_title__3P9Q7 > a')
1,328
528
#!/usr/bin/env python3 # -*- coding: utf-8 -*- QUANDLKEY = '<Enter your Quandl APT key here>' """ Created on Fri Oct 5 23:24:35 2018 @author: jeff """ '''************************************* #1. Import libraries and define key variables ''' import pandas as pd import numpy as np import quandl import matplotlib.pyplot as plt from sklearn.metrics import classification_report,roc_curve, auc,confusion_matrix,f1_score from sklearn.model_selection import train_test_split from sklearn import tree from sklearn.neural_network import MLPClassifier from sklearn.preprocessing import StandardScaler import pickle import graphviz #KPI keys quandl.ApiConfig.api_key = QUANDLKEY '''************************************* #2. Definition of functions ''' #2a.Download tickers def download_tkr(tkr): record_db_events_gp = pd.DataFrame() record_db_financials=quandl.get_table('SHARADAR/SF1', calendardate={'gte': '2008-12-31'}, ticker=tkr, dimension='MRY') record_db_financials['year'] = record_db_financials['reportperiod'].dt.year record_db_financials['year_1'] = record_db_financials['year']+1 record_db_events=quandl.get_table('SHARADAR/EVENTS', ticker=tkr) tmp_series = record_db_events['eventcodes'].str.contains('21') record_db_events= record_db_events[tmp_series] record_db_events['year'] = record_db_events.date.dt.year record_db_events= record_db_events.drop(['date'],axis=1) record_db_events_gp = record_db_events.groupby(['ticker','year'],as_index=False).count() combined_pd = pd.merge(record_db_financials,record_db_events_gp,how ='left',left_on='year_1',right_on='year') #convert all events to 1 and NaN combined_pd.loc[combined_pd['eventcodes']>1,'eventcodes'] = 1 X = record_db_financials.iloc[:,6:-5] Y = combined_pd.iloc[:,-1] return combined_pd, X, Y #tkr = 'AMZN' #df_tmp = download_tkr(tkr) #2b.Train tree def train_tree(X,Y,ind): print('Decision Tree') #split the dataset into training set and testing set X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=.33, random_state=0) min_leaf_size = int(len(X_train) * 0.01) tree_clf = tree.DecisionTreeClassifier(min_samples_leaf=min_leaf_size) #preprocessing the data scaler = StandardScaler() scaler.fit(X_train) X_train = scaler.transform(X_train) X_test = scaler.transform(X_test) #fit the training data to the model tree_clf.fit(X_train,Y_train) ##metric 1: roc Y_score_tree = tree_clf.predict(X_test) fpr, tpr, thresholds = roc_curve(Y_test,Y_score_tree, pos_label=1) roc_auc = auc(fpr,tpr) lw=2 plt.figure() plt.plot(fpr,tpr,color='darkorange',lw=lw,label='ROC curve (area = %0.2f)' %roc_auc) plt.plot([0,1],[0,1],color='navy',lw=lw,linestyle='--') plt.xlim([0.0, 1.0]) plt.ylim([0.0, 1.05]) plt.xlabel('False Positive Rate') plt.ylabel('True Positive Rate') plt.title('Receiver operating characteristic - Decision Tree '+ind) plt.legend(loc="lower right") plt.savefig(ind+'_DT.png') ##metric 2: Confusion matrix Y_pred_tree = tree_clf.predict(X_test) confusion_matrix_tree = confusion_matrix(Y_test, Y_pred_tree) print(confusion_matrix_tree) print(classification_report(Y_test, Y_pred_tree)) #common standard to compare across models f1_clf = f1_score(Y_test, Y_pred_tree, average='weighted') ##save model f_tree = open(ind+'_tree_clf.pkl',"wb+") pickle.dump(tree_clf, f_tree) f_tree.close() f_tree_sc = open(ind+'_tree_scaler.pkl',"wb+") pickle.dump(scaler, f_tree_sc) f_tree_sc.close() return tree_clf,f1_clf ##2C Neural Network #2Ci. Grid search that simulate the performance of different neural network design def grid_search(X_train,X_test, Y_train,Y_test,num_training_sample): best_f1 = 0 best_hidden_layers_list = [] best_hidden_layers_tuple = () #various depth for depth in range(1,5): print('Depth = '+str(depth)) for layer_size in range(1,8): neuron_cnt = 0 hidden_layers_list = [] i = 0 while i<depth: hidden_layers_list.append(layer_size) neuron_cnt += layer_size i+=1 #pruning - to avoid over-training if num_training_sample<neuron_cnt: break hidden_layers_tuple = tuple(hidden_layers_list) nn_clf = MLPClassifier(alpha=1e-5, hidden_layer_sizes=hidden_layers_tuple, random_state=1) nn_clf.fit(X_train,Y_train) Y_pred = nn_clf.predict(X_test) temp_f1 = f1_score(Y_test, Y_pred, average='weighted') if temp_f1 > best_f1: best_f1 = temp_f1 best_hidden_layers_list = hidden_layers_list best_hidden_layers_tuple = hidden_layers_tuple print(best_hidden_layers_list) return best_hidden_layers_list,best_hidden_layers_tuple #2Cii. Train Neural Network def train_NN(X,Y,ind): print('Neural Network') #split the dataset into training set and testing set X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=.33, random_state=0) #preprocessing the data scaler = StandardScaler() scaler.fit(X_train) X_train = scaler.transform(X_train) X_test = scaler.transform(X_test) num_training_sample = len(X_train) best_hidden_layers_list,best_hidden_layers_tuple = grid_search(X_train, X_test, Y_train, Y_test,num_training_sample) nn_clf = MLPClassifier(alpha=1e-5, hidden_layer_sizes=best_hidden_layers_tuple, random_state=1) #fit the training data to the model nn_clf.fit(X_train,Y_train) ##metric 1: roc Y_score_nn = nn_clf.predict(X_test) fpr, tpr, thresholds = roc_curve(Y_test,Y_score_nn, pos_label=1) roc_auc = auc(fpr,tpr) lw=2 plt.figure() plt.plot(fpr,tpr,color='darkorange',lw=lw,label='ROC curve (area = %0.2f)' %roc_auc) plt.plot([0,1],[0,1],color='navy',lw=lw,linestyle='--') plt.xlim([0.0, 1.0]) plt.ylim([0.0, 1.05]) plt.xlabel('False Positive Rate') plt.ylabel('True Positive Rate') plt.title('Receiver operating characteristic - Neural Network '+ind) plt.legend(loc="lower right") #plt.show() plt.savefig(ind+'_NN.png') ##metric 2: Confusion matrix Y_pred_tree = nn_clf.predict(X_test) confusion_matrix_tree = confusion_matrix(Y_test, Y_pred_tree) print(confusion_matrix_tree) print(classification_report(Y_test, Y_pred_tree)) #common standard to compare across models #f1_clf = f1_score(Y_test, Y_score_nn, average='binary') f1_clf = f1_score(Y_test, Y_score_nn, average='weighted') ##save model f_nn = open(ind+'_nn_clf_.pkl',"wb+") pickle.dump(nn_clf, f_nn) f_nn.close() f_nn_sc = open(ind+'_nn_scaler.pkl',"wb+") pickle.dump(scaler, f_nn_sc) f_nn_sc.close() return nn_clf, f1_clf '''************************************* 3. Execute the program #3a. filter the industry in scope ''' groupby_fld = 'sicsector' min_size = 30 df_tkr = pd.read_csv('industry_tickers_list.csv') dict_ind_tkr = {} f1_list = [] df_tkr_ind = pd.DataFrame() df_tkr_ind['cnt'] = df_tkr.groupby(groupby_fld)['ticker'].count() df_tkr_ind_select = df_tkr_ind[df_tkr_ind['cnt']>=min_size] list_scope = list(df_tkr_ind_select.index) #collect ticker in each industry for index, row in df_tkr.iterrows(): ind = row[groupby_fld] tkr = row['ticker'] if ind in list_scope: if ind in dict_ind_tkr: dict_ind_tkr[ind].append(tkr) else: dict_ind_tkr[ind] = [tkr] #loop through the dictionary - one industry at a time for ind, list_tkr in dict_ind_tkr.items(): df_X = pd.DataFrame({}) df_Y = pd.DataFrame({}) print(ind) #Go through the ticker list to Download data from source #loop through tickers from that industry for tkr in list_tkr: print(tkr) try: df_tmp,X_tmp,Y_tmp = download_tkr(tkr) except Exception: continue if len(df_X)==0: #df_all = df_tmp df_X = X_tmp df_Y = Y_tmp else: #df_all = pd.concat([df_all,df_tmp]) df_X = pd.concat([df_X,X_tmp]) df_Y = pd.concat([df_Y,Y_tmp]) ''' ************************************* 3b. prepare features for clustering for the industry ''' #convert to float and calc the difference across rows df_X = df_X.astype(float) df_Y = df_Y.astype(float) #remove zero records df_X = df_X.replace([np.inf ], 999999999) df_X = df_X.fillna(0) df_Y = df_Y.fillna(0) #neural network nn_clf,f1_score_temp = train_NN(df_X,df_Y,ind) f1_list.append(f1_score_temp) nn_clf.get_params() #decision tree try: tree_clf,f1_score_temp = train_tree(df_X,df_Y,ind) except Exception: continue f1_list.append(f1_score_temp) tree_clf.get_params() ''' #3c. Visualize the result ''' fields_list = df_tmp.columns print('********************') print('f1 of the models') print(f1_list) print('********************') #for visualization of decision tree x_feature_name = fields_list[6:-8] y_target_name = fields_list[-1] d_tree_out_file = 'decision_tree_'+ind dot_data = tree.export_graphviz(tree_clf, out_file=None, feature_names=x_feature_name, class_names=y_target_name, filled=True, rounded=True, special_characters=True) graph = graphviz.Source(dot_data) graph.render(d_tree_out_file)
9,891
3,685
import random uppercase_letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" lowercase_letters = "abcdefghijklmnopqrstuvwxyz" digits = "0123456789" symbols = "!@#$%^&*()_+-=[]{}|;':,./<>?" upper, lower, digit, symbol = True, True, True, True all = "" if upper : all += uppercase_letters if lower : all += lowercase_letters if digit : all += digits if symbol : all += symbols length = 20 amount = 10 for x in range(amount) : password = "".join(random.sample(all, length)) print(password)
530
228
import sqlite3 import pandas import itertools import networkx as nx from gtfspy.gtfs import GTFS from gtfspy.util import timeit from scripts.all_to_all_settings import * def attach_database(conn, other_db_path, name="other"): cur = conn.cursor() cur.execute("ATTACH '%s' AS '%s'" % (str(other_db_path), name)) cur.execute("PRAGMA database_list") print("other database attached:", cur.fetchall()) return conn """ AllToAllDifferenceAnalyzer calculates the difference between various summary statistics of temporal distance and number of boardings, stores the values in a database and handles calls to this database. """ def stops_to_exclude(return_sqlite_list=False): gtfs_lm = GTFS(LM_DICT["gtfs_dir"]) areas_to_remove = gtfs_lm.execute_custom_query_pandas( "SELECT * FROM stops WHERE CASE WHEN substr(stop_id,1, 5) = '__b__' THEN CAST(substr(stop_id,6, 1) AS integer) ELSE CAST(substr(stop_id,1, 1) AS integer) END >4") if return_sqlite_list: return "(" + ",".join([str(x) for x in areas_to_remove["stop_I"].tolist()]) + ")" return areas_to_remove class AllToAllDifferenceAnalyzer: def __init__(self, gtfs_path, before_db_path, after_db_path, output_db): self.gtfs = GTFS(gtfs_path) print(output_db) self._create_indecies(before_db_path) self._create_indecies(after_db_path) self.conn = sqlite3.connect(output_db) self.conn = attach_database(self.conn, before_db_path, name="before") self.conn = attach_database(self.conn, after_db_path, name="after") def _create_indecies(self, db_path): conn = sqlite3.connect(db_path) cur = conn.cursor() for table in ["journey_duration", "n_boardings", "temporal_distance"]: query = """CREATE INDEX IF NOT EXISTS %s_from_stop_I_idx ON %s (from_stop_I); CREATE INDEX IF NOT EXISTS %s_to_stop_I_idx ON %s (to_stop_I);""" % (table, table, table, table) conn.commit() def diff_table(self, groupby="to_stop_I", measure="temporal_distance", ignore_stops=None): """ Creates a table with the before-after difference of mean, min and max temporal distance or number of boardings on a stop to stop basis :return: """ cur = self.conn.cursor() query = """DROP TABLE IF EXISTS diff_{groupby}_{measure}""".format(measure=measure, groupby=groupby) cur.execute(query) multiplier = 1 first = 0.5 second = 1 third = 1.5 threshold = 10800 # threshold for change in mean temporal distance if measure == "temporal_distance" or "journey_duration": multiplier = 60 first = 5 second = 10 third = 20 first_str = str(first).replace(".", "_") second_str = str(second).replace(".", "_") third_str = str(third).replace(".", "_") if ignore_stops: ignore_stops = " AND t1.to_stop_I NOT IN " + ignore_stops + " AND t1.from_stop_I NOT IN " + ignore_stops else: ignore_stops = "" query = """CREATE TABLE IF NOT EXISTS diff_{groupby}_{measure} ({groupby} INT, min_diff_mean REAL, mean_diff_mean REAL, max_diff_mean REAL, incr_count_over_{0} INT, incr_count_over_{1} INT, incr_count_over_{2} INT, decr_count_over_{0} INT, decr_count_over_{1} INT, decr_count_over_{2} INT ) """.format(first_str, second_str, third_str, measure=measure, groupby=groupby) cur.execute(query) query = """INSERT OR REPLACE INTO diff_{groupby}_{measure} ({groupby}, min_diff_mean, mean_diff_mean, max_diff_mean, incr_count_over_{first_str}, incr_count_over_{second_str}, incr_count_over_{third_str}, decr_count_over_{first_str}, decr_count_over_{second_str}, decr_count_over_{third_str}) SELECT {groupby}, min(diff_mean) AS min_diff_mean, avg(diff_mean) AS mean_diff_mean, max(diff_mean) AS max_diff_mean, sum(CASE WHEN diff_mean >= {0}*{multiplier} THEN 1 ELSE 0 END) AS incr_count_over_{first_str}, sum(CASE WHEN diff_mean >= {1}*{multiplier} THEN 1 ELSE 0 END) AS incr_count_over_{second_str}, sum(CASE WHEN diff_mean >= {2}*{multiplier} THEN 1 ELSE 0 END) AS incr_count_over_{third_str}, sum(CASE WHEN diff_mean <= -{0}*{multiplier} THEN 1 ELSE 0 END) AS decr_count_over_{first_str}, sum(CASE WHEN diff_mean <= -{1}*{multiplier} THEN 1 ELSE 0 END) AS decr_count_over_{second_str}, sum(CASE WHEN diff_mean <= -{2}*{multiplier} THEN 1 ELSE 0 END) AS decr_count_over_{third_str} FROM (SELECT t1.from_stop_I AS from_stop_I, t1.to_stop_I AS to_stop_I, t2.mean-t1.mean AS diff_mean FROM before.{measure} AS t1, after.{measure} AS t2 WHERE t1.from_stop_I = t2.from_stop_I AND t1.to_stop_I = t2.to_stop_I {ignore_stops} AND abs(t2.mean-t1.mean) < {threshold}) q1 GROUP BY {groupby}""".format(first, second, third, first_str=first_str, second_str=second_str, third_str=third_str, measure=measure, groupby=groupby, multiplier=multiplier, threshold=threshold, ignore_stops=ignore_stops) cur.execute(query) self.conn.commit() def get_mean_change_for_all_targets(self, groupby="to_stop_I", measure="temporal_distance", ignore_stops=None): """ Returns pre generated differences table as pandas DataFrame :param groupby: "to_stop_I" or "from_stop_I" designating if calculating the measure to the target or from the target :param measure: "temporal_distance", "n_boardings", :return: if ignore_stops: ignore_stops = " WHERE " + groupby + " IN " + ignore_stops else: ignore_stops = "" """ query = """SELECT * FROM diff_{groupby}_{measure}""".format(measure=measure, groupby=groupby) print("running query") df = pandas.read_sql_query(query, self.conn) df = self.gtfs.add_coordinates_to_df(df, stop_id_column=groupby, lat_name="lat", lon_name="lon") if measure == "temporal_distance": df["mean_diff_mean"] = df["mean_diff_mean"].apply(lambda x: x / 60) return df def extreme_change_od_pairs(self, threshold): """ Returns O-D pairs where the absolute change is larger than the threshold. Returns increase in travel time with positive thresholds and decrease in travel time with negative thresholds :param threshold: int :return: Pandas DataFrame """ if threshold < 0: string_to_add = " <= " + str(threshold) else: string_to_add = " >= " + str(threshold) query = """SELECT t1.from_stop_I AS from_stop_I, t1.to_stop_I AS to_stop_I, t2.mean-t1.mean AS diff_mean FROM before.temporal_distance AS t1, after.temporal_distance AS t2 WHERE t1.from_stop_I = t2.from_stop_I AND t1.to_stop_I = t2.to_stop_I AND t2.mean-t1.mean %s AND t2.mean-t1.mean < 10800""" % (string_to_add,) df = pandas.read_sql_query(query, self.conn) return df def get_global_mean_change(self, measure, threshold=10800, ignore_stops=False): ignore_list = "" if ignore_stops: ignore_list=stops_to_exclude(return_sqlite_list=True) query = """SELECT before_global_mean, after_global_mean, after_global_mean-before_global_mean AS global_mean_difference FROM (SELECT avg(mean) AS before_global_mean FROM before.{measure} WHERE mean <= {threshold} AND mean >0 AND from_stop_I NOT IN {ignore_stops} AND to_stop_I NOT IN {ignore_stops}) t1, (SELECT avg(mean) AS after_global_mean FROM after.{measure} WHERE mean <= {threshold} AND mean >0 AND from_stop_I NOT IN {ignore_stops} AND to_stop_I NOT IN {ignore_stops}) t2 """.format(measure=measure, threshold=threshold, ignore_stops=ignore_list) df = pandas.read_sql_query(query, self.conn) return df @timeit def get_rows_with_abs_change_greater_than_n(self, stops, measure, n, sign, unit="s"): stops = ",".join([str(x) for x in stops]) divisors = {"s": 1, "m": 60, "h": 3600} divisor = divisors[unit] query = """SELECT t1.{measure}/{divisor} AS before_{measure}, t2.{measure}/{divisor} AS after_{measure}, (t2.{measure}-t1.{measure})/{divisor} AS diff_{measure} FROM before.temporal_distance AS t1, after.temporal_distance AS t2 WHERE t1.from_stop_I != t1.to_stop_I AND t1.from_stop_I = t2.from_stop_I AND t1.to_stop_I = t2.to_stop_I AND t1.from_stop_I NOT IN ({stops}) AND t2.to_stop_I NOT IN ({stops}) AND t2.{measure}-t1.{measure} {sign} {n}""".format(measure=measure, divisor=divisor, stops=stops, n=n, sign=sign) df = pandas.read_sql_query(query, self.conn) return df @timeit def get_rows_based_on_stop_list(self, from_stops, to_stops, measure, measure_mode, unit="s"): """ :param from_stops: list :param to_stops: list :param measure: string (mean, min, max, median) :param unit: string :param measure_mode: string :return: """ assert measure_mode in ["n_boardings", "temporal_distance"] from_stops = ",".join([str(x) for x in from_stops]) to_stops = ",".join([str(x) for x in to_stops]) divisors = {"s": 1, "m": 60, "h": 3600} divisor = divisors[unit] query = """SELECT t1.{measure}/{divisor} AS before_{measure}, t2.{measure}/{divisor} AS after_{measure}, (t2.{measure}-t1.{measure})/{divisor} AS diff_{measure} FROM before.{mode} AS t1, after.{mode} AS t2 WHERE t1.from_stop_I != t1.to_stop_I AND t1.from_stop_I = t2.from_stop_I AND t1.to_stop_I = t2.to_stop_I AND t1.from_stop_I IN ({from_stops}) AND t2.to_stop_I IN ({to_stops})""".format(measure=measure, mode=measure_mode, divisor=divisor, from_stops=from_stops, to_stops=to_stops) df = pandas.read_sql_query(query, self.conn) return df def get_data_for_target(self, target, measure, direction="to", threshold=10800, unit="s", ignore_stops=False): divisors = {"s": 1, "m": 60, "h": 3600} divisor = divisors[unit] ignore_list = "" if ignore_stops: ignore_list = stops_to_exclude(return_sqlite_list=True) ignore_list = " AND t1.from_stop_I NOT IN {ignore_list} AND t1.to_stop_I NOT IN {ignore_list}".format(ignore_list=ignore_list) query = """SELECT t1.from_stop_I, t1.to_stop_I, t1.mean/{divisor} AS before_mean, t2.mean/{divisor} AS after_mean, (t2.mean-t1.mean)/{divisor} AS diff_mean, COALESCE((t2.mean/t1.mean)- 1, 0) AS diff_mean_relative FROM before.{measure} t1, after.{measure} t2 WHERE t1.from_stop_I=t2.from_stop_I AND t1.to_stop_I=t2.to_stop_I AND t1.mean <= {threshold} AND t2.mean <= {threshold} AND t1.{direction}_stop_I={target} {ignore_list}""".format(measure=measure, target=target, direction=direction, threshold=threshold, divisor=divisor, ignore_list=ignore_list) df = pandas.read_sql_query(query, self.conn) return df def get_mean_change(self, measure, threshold=10800, descening_order=False, include_list=None): if descening_order: order_by = "DESC" else: order_by = "ASC" include_list = "(" + ",".join([str(x) for x in include_list]) + ")" query = """SELECT t1.to_stop_I, t2.mean AS before, t2.mean-t1.mean AS diff_mean FROM (SELECT to_stop_I, avg(mean) AS mean FROM before.{measure} WHERE mean <= {threshold} AND to_stop_I IN {include_list} GROUP BY to_stop_I) t1, (SELECT to_stop_I, avg(mean) AS mean FROM after.{measure} WHERE mean <= {threshold} AND to_stop_I IN {include_list} GROUP BY to_stop_I) t2 WHERE t1.to_stop_I=t2.to_stop_I ORDER BY diff_mean {order_by} """.format(measure=measure, threshold=threshold, order_by=order_by, include_list=include_list) df = pandas.read_sql_query(query, self.conn) return df def get_n_winning_targets_using_change_in_mean(self, n, measure, distance=500, threshold=10800, losers=False, include_list=None): if losers: order_by = "DESC" else: order_by = "ASC" include_list = "(" + ",".join([str(x) for x in include_list]) + ")" query = """SELECT t1.to_stop_I, t2.mean-t1.mean AS diff_mean FROM (SELECT to_stop_I, avg(mean) AS mean FROM before.{measure} WHERE mean <= {threshold} AND to_stop_I IN {include_list} GROUP BY to_stop_I) t1, (SELECT to_stop_I, avg(mean) AS mean FROM after.{measure} WHERE mean <= {threshold} AND to_stop_I IN {include_list} GROUP BY to_stop_I) t2 WHERE t1.to_stop_I=t2.to_stop_I ORDER BY diff_mean {order_by} """.format(measure=measure, threshold=threshold, order_by=order_by, include_list=include_list) df = pandas.read_sql_query(query, self.conn) # exclude nearby stops nearby_excluded_stops = [] stops_remaining = [] gtfs = GTFS(GTFS_PATH) for value in df.itertuples(): if not value.to_stop_I in nearby_excluded_stops: exclude_df = gtfs.get_stops_within_distance(value.to_stop_I, distance) nearby_excluded_stops += list(exclude_df["stop_I"]) stops_remaining.append(value.to_stop_I) if len(stops_remaining) == n: break df = df.loc[df['to_stop_I'].isin(stops_remaining)] return df def n_inf_stops_per_stop(self, measure, indicator, threshold, group_by="to_stop_I", routing="before"): if group_by == "to_stop_I": stop_I = "from_stop_I" elif group_by == "from_stop_I": stop_I = "to_stop_I" else: raise AssertionError("Group_by should be to_stop_I or from_stop_I") query = """SELECT {group_by}, count(to_stop_I) AS N_stops FROM {routing}.{measure} WHERE {indicator} >{threshold} GROUP by {group_by} ORDER BY count(to_stop_I)""".format(measure=measure, threshold=threshold, indicator=indicator, routing=routing, group_by=group_by, stop_I=stop_I) df = pandas.read_sql_query(query, self.conn) return df def find_stops_where_all_indicators_are_finite(self, measure="temporal_distance", indicator="max", routing="after", threshold=10800): stops_to_ignore = [] ignore_statement = "" while True: query = """SELECT from_stop_I, count(to_stop_I) as invalid_connections FROM {routing}.{measure} WHERE {indicator} >= {threshold} {ignore_statement} group by from_stop_I order by invalid_connections""".format(measure=measure, indicator=indicator, threshold=threshold, routing=routing, ignore_statement=ignore_statement) df = pandas.read_sql_query(query, self.conn) print("query has run, with {n} stops remaining".format(n=len(df.index))) df['removal_column'] = df.index+df.invalid_connections n_stops_in_iteration = len(df.index) df_to_remove = df.loc[df['removal_column'] > n_stops_in_iteration] print("{n} stops removed".format(n=len(df_to_remove.index))) if len(df_to_remove.index) == 0: break stops_to_ignore += list(df_to_remove['from_stop_I']) stops_to_ignore_str = "" for stop in stops_to_ignore: if not stops_to_ignore_str == "": stops_to_ignore_str += "," stops_to_ignore_str += str(stop) # stops_to_ignore_str = ','.join(stops_to_ignore_str) ignore_statement = "AND from_stop_I NOT IN ({stops_comma}) " \ "AND to_stop_I NOT IN ({stops_comma})".format(stops_comma=stops_to_ignore_str) return list(df['from_stop_I']), stops_to_ignore def find_stops_where_all_indicators_are_finite_using_network(self, measure="temporal_distance", indicator="max", routing="after", threshold=10800): pass """ nodes = [x[0] for x in nodes] edges = itertools.combinations(nodes, 2) print("combinations") G = nx.Graph() G.add_edges_from(edges) print("initial edges in place") for row in df.iterrows(): G.remove_edge(row.from_stop_I, row.to_stop_I) print("removing stuff") """ if __name__ == "__main__": for time in TIMES: a2aa = AllToAllDifferenceAnalyzer(GTFS_PATH, get_a2aa_db_path(time, "old"), get_a2aa_db_path(time, "lm"), get_a2aa_db_path(time, "output")) ignore_list = stops_to_exclude(return_sqlite_list=True) a2aa.diff_table(groupby="to_stop_I", measure="n_boardings", ignore_stops=ignore_list) a2aa.diff_table(groupby="from_stop_I", measure="n_boardings", ignore_stops=ignore_list) a2aa.diff_table(groupby="to_stop_I", measure="temporal_distance", ignore_stops=ignore_list) a2aa.diff_table(groupby="from_stop_I", measure="temporal_distance", ignore_stops=ignore_list) #a2aa.diff_table(groupby="to_stop_I", measure="journey_duration", ignore_stops=ignore_list) #a2aa.diff_table(groupby="from_stop_I", measure="journey_duration", ignore_stops=ignore_list)
20,517
6,258
from flowsaber.api import * def test_snakemake_workflow(): # EnvTask is the real dependent task when using conda/image option @shell def bwa(self, fa: File, fastq: File): # input will be automatically converted if has type annotation """bwa mem -t {self.config.cpu} {fa} {fastq} | samtools view -Sb - > {fastq.stem}.bam""" return "*.bam" # for ShellTask, str variable in the return will be treated as File and globed @shell def sort(bam: File): # self is optional in case you don't want to access the current task """samtools sort -o {sorted_bam} {bam}""" sorted_bam = f"{bam.stem}.sorted.bam" return sorted_bam @shell(publish_dirs=["results/vcf"]) def call(fa: File, bams: list): # In case you need to write some python codes """samtools mpileup -g -f {fa} {bam_files} | bcftools call -mv - > all.vcf""" bam_files = ' '.join(str(bam) for bam in bams) return "all.vcf" @task def stats(vcf: File): import matplotlib matplotlib.use("Agg") import matplotlib.pyplot as plt from pysam import VariantFile quals = [record.qual for record in VariantFile(str(vcf))] plt.hist(quals) plt.savefig("report.svg") @flow def call_vcf_flow(): """Call vcf from fastq file. Parameters ---------- fa: : str The path of genome file fastq: List[str] list of fastq files """ def _call(bams): # task is normal function, use python as wish return call(fa, bams) context = flowsaber.context fa = Channel.value(context.fa) fastq = Channel.values(*context.fastq) bam1 = bwa(fa, fastq) # automatically clone channel bam2 = bwa(fa, fastq) mix(bam1, bam2) | sort | collect | _call | stats prefix = 'tests/test_flow/snamke-demo.nosync/data' with flowsaber.context({ "fa": f'{prefix}/genome.fa', "fastq": [f'{prefix}/samples/{sample}' for sample in ['A.fastq', 'B.fastq', 'C.fastq']] }): # resolve dependency workflow = call_vcf_flow() run(workflow) if __name__ == "__main__": test_snakemake_workflow() pass
2,258
757
mass = eval(input("Enter the amount of water in kilograms: ")) initial_temp = eval(input("Enter the initial temperature: ")) final_temp = eval(input("Enter the final temperature: ")) energy = mass * (final_temp - initial_temp) * 4184 print("The energy needed is {}".format(energy))
282
85
from django.apps import AppConfig from django.conf import settings class PortalCoreConfig(AppConfig): name = 'portal_core'
129
36
import glob import SimpleITK as sitk import numpy as np class CTScanMhd(object): def __init__(self, base_dir, filename): self.filename = filename self.coords = None self.base_dir = base_dir path = glob.glob(self.base_dir + '/*/' + self.filename + '.mhd') self.ds = sitk.ReadImage(path[0]) self.image = sitk.GetArrayFromImage(self.ds) def set_coords(self, coords): self.coords = (coords[2], coords[1], coords[0]) def get_resolution(self): return self.ds.GetSpacing() def get_origin(self): return self.ds.GetOrigin() def get_ds(self): return self.ds def get_voxel_coords(self): origin = self.get_origin() resolution = self.get_resolution() voxel_coords = [np.absolute(self.coords[j]-origin[j])/resolution[j] \ for j in range(len(self.coords))] return tuple(voxel_coords) def get_image(self): return self.image def get_subimage(self, center, dims): self.set_coords(center) x, y, z = self.get_voxel_coords() subImage = self.image[int(z-dims[0]/2):int(z+dims[0]/2), int(y-dims[1]/2):int(y+dims[1]/2), int(x-dims[2]/2):int(x+dims[2]/2)] return subImage def get_normalized_image(self, minHU, maxHU): #maxHU = 400. #minHU = -1000. img = (self.image - minHU) / (maxHU - minHU) img[img>1] = 1. img[img<0] = 0. return img
1,482
561
""" Section 1 Multithreading - Thread (2) - Daemon, Join Keyword - DaemonThread, Join """ """ DaemonThread(데몬스레드) (1). 백그라운드에서 실행 (2). 메인스레드 종료시 즉시 종료 (서브 스레드의 경우는 메인 스레드와 상관없이 자기 작업을 끝까지 수행함.) (3). 주로 백그라운드 무한 대기 이벤트 발생 실행하는 부분 담당 -> JVM(가비지 컬렉션), 자동 저장 (4). 일반 스레드는 작업 종료시까지 실행 """ import logging import threading # 스레드 실행 함수 def thread_func(name, d): logging.info("Sub-Thread %s: starting", name) for i in d: print(name, i) logging.info("Sub-Thread %s: finishing", name) # 메인 영역 if __name__ == "__main__": # Logging format 설정 format = "%(asctime)s: %(message)s" logging.basicConfig(format=format, level=logging.INFO, datefmt="%H:%M:%S") logging.info("Main-Thread: before creating thread") # 함수 인자 확인 # Daemon: Default False x = threading.Thread(target=thread_func, args=("First", range(200)), daemon=True) y = threading.Thread(target=thread_func, args=("Two", range(10)), daemon=False) logging.info("Main-Thread: before running thread") # 서브 스레드 시작 x.start() y.start() # DaemonThread 확인 print(x.isDaemon()) print(y.isDaemon()) # 주석 전후 결과 확인 # x.join() # 서브 스레드의 작업이 끝날 떄까지, 메인 스레드가 기다림. # y.join() logging.info("Main-Thread: wait for the thread to finish") logging.info("Main-Thread: all done")
1,324
769
#!/usr/bin/env python3 # PURPOSE: studying function side effects import os os.system('clear') orgList = [5, 3, 2, 1, 4] def sumList(myList): for i in range(1, len(myList)): myList[i] += myList[i-1] return myList[len(myList)-1] print(sumList(orgList)) print(orgList)
287
120
import pandas as pd import numpy as np serie = pd.Series(['a', 'b', 'c', 'd', 'e'], index=['a', 'b', 'c', 'd', 'e'], name="Ejemplo Serie") print(serie) ecoli_matraz = pd.Series([0.1, 0.15, 0.19, 0.5, 0.9, 1.4, 1.8, 2.1, 2.3], index=['t1', 't2', 't3', 't4', 't5', 't6', 't7', 't8', 't9'], name='Matraz') print(ecoli_matraz) ODs = pd.Series([0.2, 0.2, 0.4, 0.1, 0.2, 0.1, 0.2, 0.4, 0.1], index=[8, 4, 1, 2, 3, 0, 5, 7, 6], name='Ajustes') # EJERCICIO 1 ---------------------------------------------------------------------- produccion = pd.Series([5, 11, 4, 7, 2], index=['gen1', 'gen2', 'gen3', 'gen4', 'gen5']) costos = pd.Series([5, 4.3, 7, 3.5], index=['gen1', 'gen2', 'gen3', 'gen5']) costo_unitario = costos/produccion.T print(costo_unitario) print(costo_unitario.min()) # ----------------------------------------------------- nan_test = pd.Series([0.1, None, 2.1, 2.3], name='Matraz') print(nan_test.count()) # loc y iloc series_test = pd.Series([5.1, 2.2, 1.1, 3.1, 4.2], index=[5, 2, 1, 3, 4]) print(series_test) print(series_test.loc[1]) print(series_test.iloc[1]) # EJERCICIO 2 ------------------------------------------------------------------ bool_min = costo_unitario == costo_unitario.min() bool_max = costo_unitario == costo_unitario.max() print(costo_unitario[bool_min | bool_max]) # Repetir índices regulon = pd.Series(['aidB', 'alaS', 'accB', 'accC', 'bhsA'], index=['AidB', 'AlaS', 'AccB', 'AccB', 'ComR'], name='Genes regulados') print(regulon.loc['AccB']) print(regulon.loc['AidB']) # Clases en series class Mamifero: vertebrado = True def haz_ruido(self): print('aaaaaaaaaaaaaaaaaaaaaaaaaaa') array_clase = pd.Series([np.sum, 'a', Mamifero], name='objetos') jerbo = array_clase.iloc[2] print(jerbo.haz_ruido())
2,029
886
# Copyright (c) Microsoft Corporation. # Licensed under the MIT license. """ Script to prepare mit67 dataset for pytorch dataloader. """ from typing import List, Dict, Tuple, Union, Optional import os import pdb import time import argparse import os import tempfile import requests from torchvision.datasets.utils import download_and_extract_archive, download_url from torch.utils.model_zoo import tqdm from PIL import Image import shutil from collections import defaultdict import pathlib from archai.common import utils def check_mit67(dataroot: str) -> bool: mit67 = os.path.join(dataroot, 'mit67') train = os.path.join(mit67, 'train') test = os.path.join(mit67, 'test') meta = os.path.join(mit67, 'meta') if not os.path.isdir(mit67) or not os.path.isdir(train) or not os.path.isdir(test) or not os.path.isdir(meta): return False num_train_files = 0 for base, dirs, files in os.walk(train): for file in files: num_train_files += 1 if num_train_files != 12466: return False num_test_files = 0 for base, dirs, files in os.walk(test): for file in files: num_test_files += 1 if num_test_files != 3153: return False # all checks passed return True def download(dataroot: str): DOWNLOAD_URL = 'http://groups.csail.mit.edu/vision/LabelMe/NewImages/indoorCVPR_09.tar' with tempfile.TemporaryDirectory() as tempdir: download_and_extract_archive( DOWNLOAD_URL, tempdir, extract_root=dataroot, remove_finished=True) def load_test_csv_data(filename: str) -> Dict[str, List[str]]: ''' Loads the data in csv files into a dictionary with class names as keys and list of image names as values. Works only for test data csv''' data_dict = defaultdict(list) with open(filename, 'r') as f: lines = f.readlines() assert len(lines) > 0 for line in lines[1:]: words = line.rstrip().split(',') assert len(words) > 0 data_dict[words[0]] = words[1:] return data_dict def load_train_csv_data(filename: str) -> Dict[str, List[str]]: ''' Loads the data in csv files into a dictionary with class names as keys and list of image names as values. Works only for train data csv ''' data_dict = defaultdict(list) with open(filename, 'r') as f: lines = f.readlines() assert len(lines) > 0 for line in lines[1:]: words = line.rstrip().split(',') assert len(words) > 0 data_dict[words[1]] = words[2:] return data_dict def copy_data_helper(data: Dict[str, List[str]], imagesroot: str, foldername: str) -> None: for key in data.keys(): images = data[key] for im in images: if not im: continue source = os.path.join(imagesroot, key, im) target = os.path.join(foldername, key, im) if not os.path.isfile(target): utils.copy_file(source, target) def prepare_data(mit67_root: str): test_file = os.path.join(mit67_root, 'meta', 'MIT67_test.csv') test_data = load_test_csv_data(test_file) # train data is split into 4 files for some reason train1_file = os.path.join(mit67_root, 'meta', 'MIT67_train1.csv') train2_file = os.path.join(mit67_root, 'meta', 'MIT67_train2.csv') train3_file = os.path.join(mit67_root, 'meta', 'MIT67_train3.csv') train4_file = os.path.join(mit67_root, 'meta', 'MIT67_train4.csv') train_files = [train1_file, train2_file, train3_file, train4_file] train_data = defaultdict(list) for tf in train_files: this_data = load_train_csv_data(tf) train_data.update(this_data) # make classname directories for train and test for key in test_data.keys(): os.makedirs(os.path.join(mit67_root, 'test', key), exist_ok=True) os.makedirs(os.path.join(mit67_root, 'train', key), exist_ok=True) # copy images to the right locations imagesroot = os.path.join(mit67_root, 'Images') testfoldername = os.path.join(mit67_root, 'test') copy_data_helper(test_data, imagesroot, testfoldername) trainfoldername = os.path.join(mit67_root, 'train') copy_data_helper(train_data, imagesroot, trainfoldername) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--dataroot', type=str, default='C:\\Users\\dedey\\dataroot', help='root directory where mit67 folder is intended to exist. If mit67 already exists in the format required this script will skip downloading') args = parser.parse_args() # check that dataset is in format required # else download and prepare dataset if not check_mit67(args.dataroot): # make mit67 directory mit67 = os.path.join(args.dataroot, 'mit67') train = os.path.join(mit67, 'train') test = os.path.join(mit67, 'test') meta = os.path.join(mit67, 'meta') os.makedirs(mit67, exist_ok=True) os.makedirs(train, exist_ok=True) os.makedirs(test, exist_ok=True) os.makedirs(meta, exist_ok=True) # this step will create folder mit67/Images # which has all the images for each class in its own subfolder download(mit67) # download the csv files for the train and test split # from 'NAS Evaluation is Frustrating' repo # note that download_url doesn't work in vscode debug mode test_file_url = 'https://raw.githubusercontent.com/antoyang/NAS-Benchmark/master/data/MIT67_test.csv' train_file_urls = ['https://raw.githubusercontent.com/antoyang/NAS-Benchmark/master/data/MIT67_train1.csv', 'https://raw.githubusercontent.com/antoyang/NAS-Benchmark/master/data/MIT67_train2.csv', 'https://raw.githubusercontent.com/antoyang/NAS-Benchmark/master/data/MIT67_train3.csv', 'https://raw.githubusercontent.com/antoyang/NAS-Benchmark/master/data/MIT67_train4.csv'] download_url(test_file_url, meta, filename=None, md5=None) for tu in train_file_urls: download_url(tu, meta, filename=None, md5=None) prepare_data(mit67)
6,209
2,086
from tail.core import read_last_lines, follow_lines __all__ = ["read_last_lines", "follow_lines"]
99
34
# Copyright 2018 IBM Corp. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Tests the class command """ from __future__ import absolute_import, print_function import sys import os from packaging.version import parse as parse_version import pytest from pywbem import __version__ as pywbem_version from .cli_test_extensions import CLITestsBase, PYWBEM_0, \ FAKEURL_STR from .common_options_help_lines import CMD_OPTION_NAMES_ONLY_HELP_LINE, \ CMD_OPTION_HELP_HELP_LINE, CMD_OPTION_SUMMARY_HELP_LINE, \ CMD_OPTION_NAMESPACE_HELP_LINE, CMD_OPTION_PROPERTYLIST_HELP_LINE, \ CMD_OPTION_INCLUDE_CLASSORIGIN_HELP_LINE, \ CMD_OPTION_LOCAL_ONLY_CLASS_HELP_LINE, CMD_OPTION_NO_QUALIFIERS_HELP_LINE, \ CMD_OPTION_MULTIPLE_NAMESPACE_HELP_LINE, \ CMD_OPTION_ASSOCIATION_FILTER_HELP_LINE, \ CMD_OPTION_INDICATION_FILTER_HELP_LINE, \ CMD_OPTION_EXPERIMENTAL_FILTER_HELP_LINE, \ CMD_OPTION_DEPRECATED_FILTER_HELP_LINE, \ CMD_OPTION_SINCE_FILTER_HELP_LINE, \ CMD_OPTION_SCHEMA_FILTER_HELP_LINE, \ CMD_OPTION_SUBCLASSOF_FILTER_HELP_LINE, \ CMD_OPTION_LEAFCLASSES_FILTER_HELP_LINE _PYWBEM_VERSION = parse_version(pywbem_version) # pywbem 1.0.0 or later PYWBEM_1_0_0 = _PYWBEM_VERSION.release >= (1, 0, 0) # Mock scripts with setup() function are supported MOCK_SETUP_SUPPORTED = sys.version_info >= (3, 5) TEST_DIR = os.path.dirname(__file__) # A mof file that defines basic qualifier decls, classes, and instances # but not tied to the DMTF classes. SIMPLE_MOCK_FILE = 'simple_mock_model.mof' INVOKE_METHOD_MOCK_FILE_0 = 'simple_mock_invokemethod_v0.py' INVOKE_METHOD_MOCK_FILE_1 = 'simple_mock_invokemethod_v1old.py' INVOKE_METHOD_MOCK_FILE = INVOKE_METHOD_MOCK_FILE_0 if PYWBEM_0 else \ INVOKE_METHOD_MOCK_FILE_1 SIMPLE_ASSOC_MOCK_FILE = 'simple_assoc_mock_model.mof' QUALIFIER_FILTER_MODEL = 'qualifier_filter_model.mof' TREE_TEST_MOCK_FILE = 'tree_test_mock_model.mof' SIMPLE_INTEROP_MOCK_FILE = 'simple_interop_mock_script.py' # # The following list defines the help for each command in terms of particular # parts of lines that are to be tested.//FakedUrl:5988 # For each test, try to include: # 1. The usage line and in particular the argument component # 2. The single # 2. The last line CMD_OPTION_HELP_HELP_LINE # CLASS_HELP_LINES = [ 'Usage: pywbemcli [GENERAL-OPTIONS] class COMMAND [ARGS] [COMMAND-OPTIONS]', 'Command group for CIM classes.', CMD_OPTION_HELP_HELP_LINE, 'associators List the classes associated with a class.', 'delete Delete a class.', 'enumerate List top classes or subclasses of a class in a namespace.', 'find List the classes with matching class names on the server.', 'get Get a class.', 'invokemethod Invoke a method on a class.', 'references List the classes referencing a class.', 'tree Show the subclass or superclass hierarchy for a class.', ] CLASS_ASSOCIATORS_HELP_LINES = [ 'Usage: pywbemcli [GENERAL-OPTIONS] class associators CLASSNAME ' '[COMMAND-OPTIONS]', 'List the classes associated with a class.', '--ac, --assoc-class CLASSNAME Filter the result set by association class', '--rc, --result-class CLASSNAME Filter the result set by result class', '-r, --role PROPERTYNAME Filter the result set by source end role', '--rr, --result-role PROPERTYNAME Filter the result set by far end role', CMD_OPTION_NO_QUALIFIERS_HELP_LINE, CMD_OPTION_INCLUDE_CLASSORIGIN_HELP_LINE, CMD_OPTION_PROPERTYLIST_HELP_LINE, CMD_OPTION_NAMES_ONLY_HELP_LINE, CMD_OPTION_NAMESPACE_HELP_LINE, CMD_OPTION_SUMMARY_HELP_LINE, CMD_OPTION_HELP_HELP_LINE, ] CLASS_DELETE_HELP_LINES = [ 'Usage: pywbemcli [GENERAL-OPTIONS] class delete CLASSNAME ' '[COMMAND-OPTIONS]', 'Delete a class.', '-f, --force Same as --include-instances.', '--include-instances Delete any instances of the class as well.', CMD_OPTION_NAMESPACE_HELP_LINE, CMD_OPTION_HELP_HELP_LINE, ] CLASS_ENUMERATE_HELP_LINES = [ 'Usage: pywbemcli [GENERAL-OPTIONS] class enumerate CLASSNAME ' '[COMMAND-OPTIONS]', 'List top classes or subclasses of a class in a namespace.', '--di, --deep-inheritance Include the complete subclass hierarchy', CMD_OPTION_LOCAL_ONLY_CLASS_HELP_LINE, CMD_OPTION_NO_QUALIFIERS_HELP_LINE, CMD_OPTION_INCLUDE_CLASSORIGIN_HELP_LINE, CMD_OPTION_NAMES_ONLY_HELP_LINE, CMD_OPTION_NAMESPACE_HELP_LINE, CMD_OPTION_SUMMARY_HELP_LINE, # NOTE: The FILTER options are a group. Define all of them. CMD_OPTION_ASSOCIATION_FILTER_HELP_LINE, CMD_OPTION_INDICATION_FILTER_HELP_LINE, CMD_OPTION_EXPERIMENTAL_FILTER_HELP_LINE, CMD_OPTION_DEPRECATED_FILTER_HELP_LINE, CMD_OPTION_SINCE_FILTER_HELP_LINE, CMD_OPTION_SCHEMA_FILTER_HELP_LINE, CMD_OPTION_SUBCLASSOF_FILTER_HELP_LINE, CMD_OPTION_LEAFCLASSES_FILTER_HELP_LINE, CMD_OPTION_HELP_HELP_LINE, ] CLASS_FIND_HELP_LINES = [ 'Usage: pywbemcli [GENERAL-OPTIONS] class find CLASSNAME-GLOB ' '[COMMAND-OPTIONS]', 'List the classes with matching class names on the server.', '-s, --sort Sort by namespace. Default is to sort by', CMD_OPTION_MULTIPLE_NAMESPACE_HELP_LINE, # FILTER OPTIONS CMD_OPTION_ASSOCIATION_FILTER_HELP_LINE, CMD_OPTION_INDICATION_FILTER_HELP_LINE, CMD_OPTION_EXPERIMENTAL_FILTER_HELP_LINE, CMD_OPTION_DEPRECATED_FILTER_HELP_LINE, CMD_OPTION_SINCE_FILTER_HELP_LINE, CMD_OPTION_SCHEMA_FILTER_HELP_LINE, CMD_OPTION_SUBCLASSOF_FILTER_HELP_LINE, CMD_OPTION_LEAFCLASSES_FILTER_HELP_LINE, CMD_OPTION_HELP_HELP_LINE, ] CLASS_GET_HELP_LINES = [ 'Usage: pywbemcli [GENERAL-OPTIONS] class get CLASSNAME [COMMAND-OPTIONS]', 'Get a class.', CMD_OPTION_LOCAL_ONLY_CLASS_HELP_LINE, CMD_OPTION_NO_QUALIFIERS_HELP_LINE, CMD_OPTION_INCLUDE_CLASSORIGIN_HELP_LINE, CMD_OPTION_PROPERTYLIST_HELP_LINE, CMD_OPTION_NAMESPACE_HELP_LINE, CMD_OPTION_HELP_HELP_LINE, ] CLASS_INVOKEMETHOD_HELP_LINES = [ 'Usage: pywbemcli [GENERAL-OPTIONS] class invokemethod CLASSNAME ' 'METHODNAME [COMMAND-OPTIONS]', 'Invoke a method on a class.', '-p, --parameter PARAMETERNAME=VALUE Specify a method input parameter', CMD_OPTION_NAMESPACE_HELP_LINE, CMD_OPTION_HELP_HELP_LINE, ] CLASS_REFERENCES_HELP_LINES = [ 'Usage: pywbemcli [GENERAL-OPTIONS] class references CLASSNAME ' '[COMMAND-OPTIONS]', 'List the classes referencing a class.', '--rc, --result-class CLASSNAME Filter the result set by result class', '-r, --role PROPERTYNAME Filter the result set by source end role', CMD_OPTION_NO_QUALIFIERS_HELP_LINE, CMD_OPTION_INCLUDE_CLASSORIGIN_HELP_LINE, CMD_OPTION_PROPERTYLIST_HELP_LINE, CMD_OPTION_NAMES_ONLY_HELP_LINE, CMD_OPTION_NAMESPACE_HELP_LINE, CMD_OPTION_SUMMARY_HELP_LINE, CMD_OPTION_HELP_HELP_LINE, ] CLASS_TREE_HELP_LINES = [ 'Usage: pywbemcli [GENERAL-OPTIONS] class tree CLASSNAME [COMMAND-OPTIONS]', 'Show the subclass or superclass hierarchy for a class.', '-s, --superclasses Show the superclass hierarchy.', ' -d, --detail Show details about the class: the Version', CMD_OPTION_NAMESPACE_HELP_LINE, CMD_OPTION_HELP_HELP_LINE, ] # pylint: disable=line-too-long CIMFOO_SUB_SUB = """ [Description ( "Subclass of CIM_Foo_sub" )] class CIM_Foo_sub_sub : CIM_Foo_sub { string cimfoo_sub_sub; string cimfoo_sub; [Key ( true ), Description ( "This is key property." )] string InstanceID; [Description ( "This is Uint32 property." )] uint32 IntegerProp; [Description ( "Embedded instance property" ), EmbeddedInstance ( "CIM_FooEmb3" )] string cimfoo_emb3; [Description ( "Sample method with input and output parameters" )] uint32 Method1( [IN ( false ), OUT ( true ), Description ( "Response param 2" )] string OutputParam2); [Description ( "Method with in and out parameters" )] uint32 Fuzzy( [IN ( true ), OUT ( true ), Description ( "Define data to be returned in output parameter" )] string TestInOutParameter, [IN ( true ), OUT ( true ), Description ( "Test of ref in/out parameter" )] CIM_FooRef1 REF TestRef, [IN ( false ), OUT ( true ), Description ( "Rtns method name if exists on input" )] string OutputParam, [IN ( true ), Description ( "Defines return value if provided." )] uint32 OutputRtnValue); [Description ( "Static method with in and out parameters" ), Static ( true )] uint32 FuzzyStatic( [IN ( true ), OUT ( true ), Description ( "Define data to be returned in output parameter" )] string TestInOutParameter, [IN ( true ), OUT ( true ), Description ( "Test of ref in/out parameter" )] CIM_Foo REF TestRef, [IN ( false ), OUT ( true ), Description ( "Rtns method name if exists on input" )] string OutputParam, [IN ( true ), Description ( "Defines return value if provided." )] uint32 OutputRtnValue, [IN ( true ), Description ( "Embedded instance parameter" ), EmbeddedInstance ( "CIM_FooEmb1" )] string cimfoo_emb1); [Description ( "Method with no parameters but embedded instance return" ), EmbeddedInstance ( "CIM_FooEmb2" )] string DeleteNothing(); }; """ # noqa: E501 # pylint: enable=line-too-long CIMFOO_SUB_SUB_NO_QUALS = """ class CIM_Foo_sub_sub : CIM_Foo_sub { string cimfoo_sub_sub; string cimfoo_sub; string InstanceID; uint32 IntegerProp; string cimfoo_emb3; uint32 Method1( string OutputParam2); uint32 Fuzzy( string TestInOutParameter, CIM_FooRef1 REF TestRef, string OutputParam, uint32 OutputRtnValue); uint32 FuzzyStatic( string TestInOutParameter, CIM_Foo REF TestRef, string OutputParam, uint32 OutputRtnValue, string cimfoo_emb1); string DeleteNothing(); }; """ # TODO: This never referenced REFERENCES_CLASS_RTN = [ FAKEURL_STR + '/root/cimv2:TST_Lineage', 'class TST_Lineage {', '', ' string InstanceID;', '', ' TST_Person REF parent;', '', ' TST_Person REF child;', '', '};', '', FAKEURL_STR + '/root/cimv2:TST_MemberOfFamilyCollection', 'class TST_MemberOfFamilyCollection {', '', ' TST_Person REF family;', '', ' TST_Person REF member;', '', '};', ''] # TODO: This never referenced REFERENCES_CLASS_RTN2 = [ FAKEURL_STR + '/root/cimv2:TST_MemberOfFamilyCollection', 'class TST_MemberOfFamilyCollection {', '', ' TST_Person REF family;', '', ' TST_Person REF member;', '', '};', '', ''] REFERENCES_CLASS_RTN_QUALS2 = [ FAKEURL_STR + '/root/cimv2:TST_MemberOfFamilyCollection', ' [Association ( true ),', ' Description ( " Family gathers person to family." )]', 'class TST_MemberOfFamilyCollection {', ' [key ( true )]', ' TST_Person REF family;', ' [key ( true )]', ' TST_Person REF member;', '};'] OK = True # mark tests OK when they execute correctly RUN = True # Mark OK = False and current test case being created RUN FAIL = False # Any test currently FAILING or not tested yet TEST_CASES = [ # List of testcases. # Each testcase is a list with the following items: # * desc: Description of testcase. # * inputs: String, or tuple/list of strings, or dict of 'env', 'args', # 'general', and 'stdin'. See the 'inputs' parameter of # CLITestsBase.command_test() in cli_test_extensions.py for detailed # documentation. # * exp_response: Dictionary of expected responses (stdout, stderr, rc) and # test definition (test: <testname>). See the 'exp_response' parameter # of CLITestsBase.command_test() in cli_test_extensions.py for # detailed documentation. # * mock: None, name of file (.mof or .py), or list thereof. # * condition: If True the test is executed, if 'pdb' the test breaks in the # the debugger, if 'verbose' print verbose messages, if False the test # is skipped. ['Verify class command --help response', ['--help'], {'stdout': CLASS_HELP_LINES, 'test': 'innows'}, None, OK], ['Verify class command --help command order', ['--help'], {'stdout': r'Commands:' '.*\n enumerate' '.*\n get' '.*\n delete' '.*\n invokemethod' '.*\n references' '.*\n associators' '.*\n find' '.*\n tree', 'test': 'regex'}, None, OK], ['Verify class command -h response', ['-h'], {'stdout': CLASS_HELP_LINES, 'test': 'innows'}, None, OK], # # Enumerate command and its options # ['Verify class command enumerate --help response', ['enumerate', '--help'], {'stdout': CLASS_ENUMERATE_HELP_LINES, 'test': 'innows'}, None, OK], ['Verify class command enumerate -h response', ['enumerate', '-h'], {'stdout': CLASS_ENUMERATE_HELP_LINES, 'test': 'innows'}, None, OK], ['Verify class command enumerate CIM_Foo', ['enumerate', 'CIM_Foo'], {'stdout': ['[Description ( "Subclass of CIM_Foo" )]'], 'test': 'regex'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate CIM_Foo --lo', ['enumerate', 'CIM_Foo', '--lo'], {'stdout': ' [Description ( "Subclass of CIM_Foo" )]', 'test': 'startswith'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate CIM_Foo --lo', ['enumerate', 'CIM_Foo', '--local-only'], {'stdout': ' [Description ( "Subclass of CIM_Foo" )]', 'test': 'startswith'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate CIM_Foo_sub', ['enumerate', 'CIM_Foo_sub'], {'stdout': CIMFOO_SUB_SUB, 'test': 'linesnows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate CIM_Foo local-only', ['enumerate', 'CIM_Foo', '--local-only'], {'stdout': ' [Description ( "Subclass of CIM_Foo" )]', 'test': 'startswith'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate CIM_Foo -no-qualifiers', ['enumerate', 'CIM_Foo_sub', '--no-qualifiers'], {'stdout': CIMFOO_SUB_SUB_NO_QUALS, 'test': 'linesnows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate CIM_Foo --di', ['enumerate', 'CIM_Foo', '--di'], {'stdout': ' [Description ( "Subclass of CIM_Foo" )]', 'test': 'startswith'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate CIM_Foo --deep-inheritance', ['enumerate', 'CIM_Foo', '--deep-inheritance'], {'stdout': ' [Description ( "Subclass of CIM_Foo" )]', 'test': 'startswith'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate CIM_Foo --ico', ['enumerate', 'CIM_Foo', '--ico'], {'stdout': ' [Description ( "Subclass of CIM_Foo" )]', 'test': 'startswith'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate CIM_Foo --include-classorigin', ['enumerate', 'CIM_Foo', '--include-classorigin'], {'stdout': ' [Description ( "Subclass of CIM_Foo" )]', 'test': 'startswith'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate CIM_Foo --no names only', ['enumerate', 'CIM_Foo', '--no'], {'stdout': ['CIM_Foo', 'CIM_Foo_sub', 'CIM_Foo_sub2'], 'test': 'in'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate CIM_Foo --names only', ['enumerate', 'CIM_Foo', '--names-only'], {'stdout': ['CIM_Foo', 'CIM_Foo_sub', 'CIM_Foo_sub2'], 'test': 'in'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate --no names only - table', {'args': ['enumerate', '--no'], 'general': ['--output-format', 'table']}, {'stdout': """Classnames: +--------------+ | Class Name | |--------------| | CIM_BaseEmb | | CIM_BaseRef | | CIM_Foo | | CIM_FooAssoc | +--------------+ """, 'test': 'in'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate CIM_Foo --no names only - table', {'args': ['enumerate', 'CIM_Foo', '--no'], 'general': ['--output-format', 'table']}, {'stdout': """Classnames: +--------------+ | Class Name | |--------------| | CIM_Foo_sub | | CIM_Foo_sub2 | +--------------+ """, 'test': 'in'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate CIM_Foo --names-only', ['enumerate', 'CIM_Foo', '--names-only'], {'stdout': ['CIM_Foo', 'CIM_Foo_sub', 'CIM_Foo_sub2'], 'test': 'in'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate CIM_Foo summary table', ['enumerate', 'CIM_Foo', '-s'], {'stdout': ['2 CIMClass(s) returned'], 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate CIM_Foo summary, table', ['enumerate', 'CIM_Foo', '--summary'], {'stdout': ['2 CIMClass(s) returned'], 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate CIM_Foo summary table output', {'args': ['enumerate', 'CIM_Foo', '--summary'], 'general': ['--output-format', 'table']}, {'stdout': ["""Summary of CIMClass returned +---------+------------+ | Count | CIM Type | |---------+------------| | 2 | CIMClass | +---------+------------+ """], 'test': 'linesnows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate CIM_Foo names and --di --no', ['enumerate', 'CIM_Foo', '--di', '--no'], {'stdout': ['CIM_Foo_sub', 'CIM_Foo_sub2', 'CIM_Foo_sub_sub'], 'test': 'in'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate CIM_Foo names and --deep-inheritance ' '--names-only', ['enumerate', 'CIM_Foo', '--names-only', '--deep-inheritance'], {'stdout': ['CIM_Foo_sub', 'CIM_Foo_sub2', 'CIM_Foo_sub_sub'], 'test': 'in'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate CIM_Foo include qualifiers', ['enumerate', 'CIM_Foo'], {'stdout': ['Key ( true )', '[Description (', 'class CIM_Foo'], 'test': 'in'}, SIMPLE_MOCK_FILE, OK], ['Verify class command get with xml output format).', {'args': ['enumerate'], 'general': ['--output-format', 'repr']}, {'stdout': [r"CIMClass\(classname='CIM_Foo', superclass=None,", r"'InstanceID': CIMProperty\(name='InstanceID', value=None,"], 'test': 'regex'}, SIMPLE_MOCK_FILE, OK], ['Verify class command get with repr output format).', {'args': ['enumerate'], 'general': ['--output-format', 'txt']}, {'stdout': ["CIMClass(classname='CIM_BaseEmb', ...)", "CIMClass(classname='CIM_BaseRef', ...)", "CIMClass(classname='CIM_Foo', ...)", "CIMClass(classname='CIM_FooAssoc', ...)"], 'test': 'lines'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate with repr output format).', {'args': ['enumerate'], 'general': ['--output-format', 'xml']}, {'stdout': ['<CLASS( | .+ )NAME="CIM_Foo">', '<PROPERTY( | .+ )NAME="InstanceID"', '<PROPERTY( | .+ )NAME="IntegerProp"', '<METHOD( | .+ )NAME="DeleteNothing"'], 'test': 'regex'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate --di --no --namespace', ['enumerate', '--di', '--no', '-n', 'interop'], {'stdout': ['CIM_Namespace', 'CIM_ObjectManager'], 'test': 'in'}, SIMPLE_INTEROP_MOCK_FILE, OK], # # Enumerate commands with the filter options # ['Verify class command enumerate with --association filter.', ['enumerate', '--association', '--names-only'], {'stdout': ['TST_Lineage', 'TST_MemberOfFamilyCollection'], 'test': 'innows'}, SIMPLE_ASSOC_MOCK_FILE, OK], ['Verify class command enumerate with --association filter --summary.', ['enumerate', '--association', '--summary'], {'stdout': ['2 CIMClass(s) returned'], 'test': 'innows'}, SIMPLE_ASSOC_MOCK_FILE, OK], ['Verify class command enumerate with --association filter.', ['enumerate', '--association', '--names-only'], {'stdout': ['TST_Lineage', 'TST_MemberOfFamilyCollection', 'TST_MemberOfFamilyCollectionDep', 'TST_MemberOfFamilyCollectionExp'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --association filter --summary.', ['enumerate', '--association', '--summary'], {'stdout': ['4 CIMClass(s) returned'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --association filter and no ' 'qualifiers.', ['enumerate', '--association', '--nq'], {'stdout': ['class TST_Lineage {', 'string InstanceID;', 'TST_Person REF parent;', 'TST_Person REF child;', 'class TST_MemberOfFamilyCollection {', 'TST_Person REF family;', 'TST_Person REF member;', '};'], 'test': 'innows'}, SIMPLE_ASSOC_MOCK_FILE, OK], ['Verify class command enumerate with --no-association filter and ' 'no-qualifiers. Tests no qualifiers on parameters', ['enumerate', '--no-association', '--no-qualifiers'], {'stdout': ['class CIM_Foo {', 'string InstanceID;', 'uint32 IntegerProp;', 'string cimfoo_emb3;', 'uint32 Fuzzy(', 'string TestInOutParameter,', 'CIM_FooRef1 REF TestRef,', 'string OutputParam,', 'uint32 OutputRtnValue);', 'uint32 FuzzyStatic(', 'string TestInOutParameter,', 'CIM_Foo REF TestRef,', 'string OutputParam,', 'uint32 OutputRtnValue,', 'string cimfoo_emb1);', 'string DeleteNothing();', '};'], 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate with --no-association filter, simple mod.', ['enumerate', '--no-association', '--names-only'], {'stdout': ['TST_FamilyCollection', 'TST_Person'], 'test': 'innows'}, SIMPLE_ASSOC_MOCK_FILE, OK], ['Verify class command enumerate with --no-association, --summary.', ['enumerate', '--no-association', '--summary'], {'stdout': ['2 CIMClass(s) returned'], 'test': 'innows'}, SIMPLE_ASSOC_MOCK_FILE, OK], ['Verify class command enumerate with --no-association filter qual filt.', ['enumerate', '--no-association', '--names-only'], {'stdout': ['BLA_Person', 'EXP_TestExperimental1', 'EXP_TestExperimental2', 'EXP_TestExperimental3', 'EXP_TestExperimental4', 'TST_FamilyCollection', 'TST_Indication', 'TST_IndicationDeprecated', 'TST_IndicationExperimental', 'TST_Person'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --no-association, --summary.', ['enumerate', '--no-association', '--summary'], {'stdout': ['10 CIMClass(s) returned'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --indication filter.', ['enumerate', '--indication', '--names-only'], {'stdout': ['TST_Indication', 'TST_IndicationDeprecated', 'TST_IndicationExperimental'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --indication filter --summary.', ['enumerate', '--indication', '--summary'], {'stdout': ['3 CIMClass(s) returned'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --no-indication filter.', ['enumerate', '--no-indication', '--names-only'], {'stdout': ['BLA_Person', 'EXP_TestExperimental1', 'EXP_TestExperimental2', 'EXP_TestExperimental3', 'EXP_TestExperimental4', 'TST_FamilyCollection', 'TST_FamilyCollection', 'TST_Lineage', 'TST_MemberOfFamilyCollection', 'TST_MemberOfFamilyCollectionExp', 'TST_Person'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --no-indication filter, --summary.', ['enumerate', '--no-indication', '--summary'], {'stdout': ['11 CIMClass(s) returned'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --experimentat filter.', ['enumerate', '--experimental', '--names-only'], {'stdout': ['EXP_TestExperimental1', ' EXP_TestExperimental2', 'EXP_TestExperimental3', 'EXP_TestExperimental4', 'TST_IndicationExperimental', 'TST_MemberOfFamilyCollectionExp'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --experimentat filter -- summary.', ['enumerate', '--experimental', '--summary'], {'stdout': ['6 CIMClass(s) returned'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --no-experimental filter.', ['enumerate', '--no-experimental', '--names-only'], {'stdout': ['BLA_Person', 'TST_FamilyCollection', 'TST_Indication', 'TST_IndicationDeprecated', 'TST_Lineage', 'TST_MemberOfFamilyCollection', 'TST_MemberOfFamilyCollectionDep', 'TST_Person'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --no-experimental, --summary.', ['enumerate', '--no-experimental', '--summary'], {'stdout': ['8 CIMClass(s) returned'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --experimental, --association.', ['enumerate', '--experimental', '--association', '--names-only'], {'stdout': ['EXP_TestExperimental1', 'EXP_TestExperimental2', 'EXP_TestExperimental3', 'EXP_TestExperimental4', 'TST_IndicationExperimental', 'TST_MemberOfFamilyCollectionExp'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --experimental, --association, ' '--summary', ['enumerate', '--experimental', '--association', '--summary'], {'stdout': ['6 CIMClass(s) returned'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --experimental , --no-association.', ['enumerate', '--experimental', '--no-association', '--names-only'], {'stdout': ['EXP_TestExperimental1', 'EXP_TestExperimental2', 'EXP_TestExperimental3', 'EXP_TestExperimental4', 'TST_IndicationExperimental'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --indication and --experimental.', ['enumerate', '--experimental', '--indication', '--names-only'], {'stdout': ['TST_IndicationExperimental'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --indication, --no-experimental.', ['enumerate', '--no-experimental', '--indication', '--names-only'], {'stdout': ['TST_Indication', 'TST_IndicationDeprecated'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --no-indication, --no-experimental, ' '--no-association', ['enumerate', '--no-experimental', '--no-indication', '--no-association', '--names-only'], {'stdout': ['BLA_Person', 'TST_FamilyCollection', 'TST_Person'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --deprecated, --no-association.', ['enumerate', '--deprecated', '--no-association', '--names-only'], {'stdout': ['TST_IndicationDeprecated', 'TST_MemberOfFamilyCollectionDep'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --deprecated, --no-association, ' '--summary', ['enumerate', '--deprecated', '--no-association', '--summary'], {'stdout': ['2 CIMClass(s) returned'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --no-deprecated, --association', ['enumerate', '--no-deprecated', '--association', '--names-only'], {'stdout': ['BLA_Person', 'EXP_TestExperimental1', 'EXP_TestExperimental2', 'EXP_TestExperimental3', 'EXP_TestExperimental4', 'TST_FamilyCollection', 'TST_Indication', 'TST_IndicationExperimental', 'TST_Lineage', 'TST_MemberOfFamilyCollection', 'TST_MemberOfFamilyCollectionExp', 'TST_Person'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --no-deprecated, --no-association' '--summary', ['enumerate', '--no-deprecated', '--association', '--summary'], {'stdout': ['12 CIMClass(s) returned'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --experimental, --since 2.42.0.', ['enumerate', '--experimental', '--since', '2.42.0', '--names-only'], {'stdout': ['TST_IndicationExperimental'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --experimental and --since 2.42.0' '--summary', ['enumerate', '--experimental', '--since', '2.42.0', '--summary'], {'stdout': ['3 CIMClass(s) returned'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --experimental and --since 2.45.0.', ['enumerate', '--experimental', '--since', '2.45.0', '--names-only'], {'stdout': [], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --experimental and --since 2.45.x.', ['enumerate', '--experimental', '--since', '2.45.x', '--names-only'], {'stderr': ['--since option value invalid. ', 'Must contain 3 integer elements', '2.45.x'], 'rc': 1, 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --indication and --since 2.45.', ['enumerate', '--experimental', '--since', '2.45', '--names-only'], {'stderr': ['Version value must contain 3 integer elements (int.int.int)', '2.45'], 'rc': 1, 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --schema "TST".', ['enumerate', '--schema', 'TST', '--names-only'], {'stdout': ['TST_FamilyCollection', 'TST_Indication', 'TST_IndicationDeprecated', 'TST_IndicationExperimental', 'TST_Lineage', 'TST_MemberOfFamilyCollection', 'TST_MemberOfFamilyCollectionDep', 'TST_MemberOfFamilyCollectionExp', 'TST_Person', ], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --schema "BLA".', ['enumerate', '--schema', 'BLA', '--names-only'], {'stdout': ['BLA_Person', ], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --schema "EXP".', ['enumerate', '--schema', 'EXP', '--names-only'], {'stdout': ['EXP_TestExperimental1', 'EXP_TestExperimental2', 'EXP_TestExperimental3', 'EXP_TestExperimental4'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --schema "EXP" --summary', ['enumerate', '--schema', 'EXP', '--summary'], {'stdout': ['4 CIMClass(s) returned'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --schema "EXP" and --experimental.', ['enumerate', '--schema', 'EXP', '--experimental', '--names-only'], {'stdout': ['EXP_TestExperimental1', 'EXP_TestExperimental2', 'EXP_TestExperimental3', 'EXP_TestExperimental4'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --schema "EXP" and --experimental.', ['enumerate', '--schema', 'EXP', '--experimental', '--summary'], {'stdout': ['4 CIMClass(s) returned'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --schema "EXP",--experimental, ' '--summary.', ['enumerate', '--schema', 'EXP', '--experimental', '--summary'], {'stdout': ['4 CIMClass(s) returned'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --schema "EXP" , --no-experimental.', ['enumerate', '--schema', 'EXP', '--no-experimental', '--names-only'], {'stdout': [], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --schema "EXP" , --no-experimental ' '--summary', ['enumerate', '--schema', 'EXP', '--no-experimental', '--summary'], {'stdout': ['0 objects returned'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --schema "NOT_EXIST".', ['enumerate', '--schema', 'NOT_EXIST', '--names-only'], {'stdout': [], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --subclass-of TST_Person.', ['enumerate', '--subclass-of', 'TST_Person', '--di', '--names-only'], {'stdout': ['TST_PersonClsDep', 'TST_PersonDep', 'TST_PersonExp', 'TST_PersonExpProperty', 'TST_PersonPropDep', 'TST_PersonSub'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --subclass-of TST_Person --summary.', ['enumerate', '--subclass-of', 'TST_Person', '--di', '--summary'], {'stdout': ['6 CIMClass(s) returned'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --subclass-of TST_Person ' '-- association--summary .', ['enumerate', '--association', '--subclass-of', 'TST_Person', '--di', '--summary'], {'stdout': ['0 objects returned'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --subclass-of TST_PersonDep.', ['enumerate', '--subclass-of', 'TST_PersonDep', '--di', '--names-only'], {'stdout': [], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --subclass-of TST_PersonDep ' '--summary.', ['enumerate', '--subclass-of', 'TST_PersonDep', '--di', '--summary'], {'stdout': ['0 objects returned'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --subclass-of NOT_EXIST excepts.', ['enumerate', '--subclass-of', 'NOT_EXIST', '--names-only'], {'stderr': ['Classname NOT_EXIST for "subclass-of" not found'], 'rc': 1, 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify instance command enumerate CIM_Foo_sub2, w --verbose rtns msg.', {'args': ['enumerate', 'CIM_Foo_sub2'], 'general': ['--verbose']}, {'stdout': 'No objects returned', 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], # # Enumerate errors # ['Verify class command enumerate nonexistent class name', ['enumerate', 'CIM_FClassDoesNotExist'], {'stderr': ['CIMError', 'CIM_ERR_INVALID_CLASS'], 'rc': 1, 'test': 'regex'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate table output fails).', {'args': ['enumerate'], 'general': ['--output-format', 'table']}, {'stderr': ['Output format "table"', 'not allowed', 'Only CIM formats:'], 'rc': 1, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], # # Test class get # ['Verify class command get --help response', ['get', '--help'], {'stdout': CLASS_GET_HELP_LINES, 'test': 'innows'}, None, OK], ['Verify class command get -h response', ['get', '-h'], {'stdout': CLASS_GET_HELP_LINES, 'test': 'innows'}, None, OK], # command get local-only option ['Verify class command get not local-only. Tests for property names', ['get', 'CIM_Foo_sub2'], {'stdout': ['string cimfoo_sub2;', 'InstanceID', 'IntegerProp', 'Fuzzy', 'Key ( true )', 'IN ( false )'], 'test': 'in'}, SIMPLE_MOCK_FILE, OK], ['Verify class command get local-only(--lo)).', ['get', 'CIM_Foo_sub2', '--lo'], {'stdout': ['class CIM_Foo_sub2 : CIM_Foo {', '', ' string cimfoo_sub2;', '', '};', ''], 'test': 'patterns'}, SIMPLE_MOCK_FILE, OK], ['Verify class command get local-only. Tests whole response', ['get', 'CIM_Foo_sub2', '--local-only'], {'stdout': ['class CIM_Foo_sub2 : CIM_Foo {', '', ' string cimfoo_sub2;', '', '};', ''], 'test': 'patterns'}, SIMPLE_MOCK_FILE, OK], # includequalifiers. Test the flag that excludes qualifiers ['Verify class command get without qualifiers. Tests whole response', ['get', 'CIM_Foo_sub2', '--nq'], {'stdout': ['class CIM_Foo_sub2 : CIM_Foo {', '', ' string cimfoo_sub2;', '', ' string InstanceID;', '', ' uint32 IntegerProp;', '', ' string cimfoo_emb3;', '', ' uint32 Fuzzy(', ' string TestInOutParameter,', ' CIM_FooRef1 REF TestRef,', ' string OutputParam,', ' uint32 OutputRtnValue);', '', ' uint32 FuzzyStatic(', ' string TestInOutParameter,', ' CIM_Foo REF TestRef,', ' string OutputParam,', ' uint32 OutputRtnValue,', ' string cimfoo_emb1);', '', ' string DeleteNothing();', '', '};', ''], 'test': 'lines'}, SIMPLE_MOCK_FILE, OK], ['Verify class command get without qualifiers. Tests whole response', ['get', 'CIM_Foo_sub2', '--no-qualifiers'], {'stdout': ['class CIM_Foo_sub2 : CIM_Foo {', '', ' string cimfoo_sub2;', '', ' string InstanceID;', '', ' uint32 IntegerProp;', '', ' string cimfoo_emb3;', '', ' uint32 Fuzzy(', ' string TestInOutParameter,', ' CIM_FooRef1 REF TestRef,', ' string OutputParam,', ' uint32 OutputRtnValue);', '', ' uint32 FuzzyStatic(', ' string TestInOutParameter,', ' CIM_Foo REF TestRef,', ' string OutputParam,', ' uint32 OutputRtnValue,', ' string cimfoo_emb1);', '', ' string DeleteNothing();', '', '};', ''], 'test': 'lines'}, SIMPLE_MOCK_FILE, OK], # pylint: disable=line-too-long ['Verify class command get with propertylist. Tests whole response', ['get', 'CIM_Foo_sub2', '--pl', 'InstanceID'], {'stdout': ['class CIM_Foo_sub2 : CIM_Foo {', '', ' [Key ( true ),', ' Description ( "This is key property." )]', ' string InstanceID;', '', ' [Description ( "Method with in and out parameters" )]', ' uint32 Fuzzy(', ' [IN ( true ),', ' OUT ( true ),', ' Description ( "Define data to be returned in output parameter" )]', # noqa: E501 ' string TestInOutParameter,', ' [IN ( true ),', ' OUT ( true ),', ' Description ( "Test of ref in/out parameter" )]', ' CIM_FooRef1 REF TestRef,', ' [IN ( false ),', ' OUT ( true ),', ' Description ( "Rtns method name if exists on input" )]', # noqa: E501 ' string OutputParam,', ' [IN ( true ),', ' Description ( "Defines return value if provided." )]', # noqa: E501 ' uint32 OutputRtnValue);', '', ' [Description ( "Static method with in and out parameters" ),', # noqa: E501 ' Static ( true )]', ' uint32 FuzzyStatic(', ' [IN ( true ),', ' OUT ( true ),', ' Description ( "Define data to be returned in output parameter" )]', # noqa: E501 ' string TestInOutParameter,', ' [IN ( true ),', ' OUT ( true ),', ' Description ( "Test of ref in/out parameter" )]', ' CIM_Foo REF TestRef,', ' [IN ( false ),', ' OUT ( true ),', ' Description ( "Rtns method name if exists on input" )]', # noqa: E501 ' string OutputParam,', ' [IN ( true ),', ' Description ( "Defines return value if provided." )]', # noqa: E501 ' uint32 OutputRtnValue,', ' [IN ( true ),', ' Description ( "Embedded instance parameter" ),', ' EmbeddedInstance ( "CIM_FooEmb1" )]', ' string cimfoo_emb1);', '', ' [Description ( "Method with no parameters but embedded instance return" ),', # noqa: E501 ' EmbeddedInstance ( "CIM_FooEmb2" )]', ' string DeleteNothing();', '', '};', ''], 'test': 'lines'}, SIMPLE_MOCK_FILE, OK], ['Verify class command get with empty propertylist. Tests whole ' 'response', ['get', 'CIM_Foo_sub2', '--pl', '""'], {'stdout': ['class CIM_Foo_sub2 : CIM_Foo {', '', ' [Description ( "Method with in and out parameters" )]', ' uint32 Fuzzy(', ' [IN ( true ),', ' OUT ( true ),', ' Description ( "Define data to be returned in output parameter" )]', # noqa: E501 ' string TestInOutParameter,', ' [IN ( true ),', ' OUT ( true ),', ' Description ( "Test of ref in/out parameter" )]', ' CIM_FooRef1 REF TestRef,', ' [IN ( false ),', ' OUT ( true ),', ' Description ( "Rtns method name if exists on input" )]', # noqa: E501 ' string OutputParam,', ' [IN ( true ),', ' Description ( "Defines return value if provided." )]', # noqa: E501 ' uint32 OutputRtnValue);', '', ' [Description ( "Static method with in and out parameters" ),', # noqa: E501 ' Static ( true )]', ' uint32 FuzzyStatic(', ' [IN ( true ),', ' OUT ( true ),', ' Description ( "Define data to be returned in output parameter" )]', # noqa: E501 ' string TestInOutParameter,', ' [IN ( true ),', ' OUT ( true ),', ' Description ( "Test of ref in/out parameter" )]', ' CIM_Foo REF TestRef,', ' [IN ( false ),', ' OUT ( true ),', ' Description ( "Rtns method name if exists on input" )]', # noqa: E501 ' string OutputParam,', ' [IN ( true ),', ' Description ( "Defines return value if provided." )]', # noqa: E501 ' uint32 OutputRtnValue,', ' [IN ( true ),', ' Description ( "Embedded instance parameter" ),', ' EmbeddedInstance ( "CIM_FooEmb1" )]', ' string cimfoo_emb1);', '', ' [Description ( "Method with no parameters but embedded instance return" ),', # noqa: E501 ' EmbeddedInstance ( "CIM_FooEmb2" )]', ' string DeleteNothing();', '', '};', ''], 'test': 'lines'}, SIMPLE_MOCK_FILE, OK], # pylint: enable=line-too-long ['Verify class command get with xml output format).', {'args': ['get', 'CIM_Foo'], 'general': ['--output-format', 'repr']}, {'stdout': [r"CIMClass\(classname='CIM_Foo', superclass=None,", r"'InstanceID': CIMProperty\(name='InstanceID', value=None,"], 'test': 'regex'}, SIMPLE_MOCK_FILE, OK], ['Verify class command get with repr output format).', {'args': ['get', 'CIM_Foo'], 'general': ['--output-format', 'txt']}, {'stdout': ["CIMClass(classname='CIM_Foo', ...)"], 'test': 'lines'}, SIMPLE_MOCK_FILE, OK], ['Verify class command get with repr output format).', {'args': ['get', 'CIM_Foo'], 'general': ['--output-format', 'xml']}, {'stdout': ['<CLASS( | .+ )NAME="CIM_Foo">', '<PROPERTY( | .+ )NAME="InstanceID"', '<PROPERTY( | .+ )NAME="IntegerProp"', '<METHOD( | .+ )NAME="DeleteNothing"'], 'test': 'regex'}, SIMPLE_MOCK_FILE, OK], # pylint: disable=line-too-long ['Verify class command get with propertylist and classorigin,', ['get', 'CIM_Foo_sub2', '--pl', 'InstanceID', '--ico'], {'stdout': ['class CIM_Foo_sub2 : CIM_Foo {', ' [Key ( true ),', ' Description ( "This is key property." )]', ' string InstanceID;', ' [Description ( "Method with in and out parameters" )]', ' uint32 Fuzzy(', ' [IN ( true ),', ' OUT ( true ),', ' Description ( "Define data to be returned in ' 'output parameter" )]', ' string TestInOutParameter,', ' [IN ( true ),', ' OUT ( true ),', ' Description ( "Test of ref in/out parameter" )]', ' CIM_FooRef1 REF TestRef,', ' [IN ( false ),', ' OUT ( true ),', ' Description ( "Rtns method name if exists on ' 'input" )]', ' string OutputParam,', ' [IN ( true ),', ' Description ( "Defines return value if ' 'provided." )]', ' uint32 OutputRtnValue);', ' [Description ( "Static method with in and out ' 'parameters" ),', ' Static ( true )]', ' uint32 FuzzyStatic(', ' [IN ( true ),', ' OUT ( true ),', ' Description ( "Define data to be returned in ' 'output parameter" )]', ' string TestInOutParameter,', ' [IN ( true ),', ' OUT ( true ),', ' Description ( "Test of ref in/out parameter" )]', ' CIM_Foo REF TestRef,', ' [IN ( false ),', ' OUT ( true ),', ' Description ( "Rtns method name if exists on ' 'input" )]', ' string OutputParam,', ' [IN ( true ),', ' Description ( "Defines return value if ' 'provided." )]', ' uint32 OutputRtnValue,', ' [IN ( true ),', ' Description ( "Embedded instance parameter" ),', ' EmbeddedInstance ( "CIM_FooEmb1" )]', ' string cimfoo_emb1);', ' [Description ( "Method with no parameters but embedded instance return" ),', # noqa: E501 ' EmbeddedInstance ( "CIM_FooEmb2" )]', ' string DeleteNothing();', '};', ''], 'test': 'linesnows'}, SIMPLE_MOCK_FILE, OK], # pylint: enable=line-too-long ['Verify class command enumerate --di --no --namespace', ['get', 'CIM_Namespace', '-n', 'interop'], {'stdout': ['class CIM_Namespace', 'string ObjectManagerCreationClassName;'], 'test': 'innows'}, SIMPLE_INTEROP_MOCK_FILE, OK], # get command errors ['Verify class command get invalid classname', ['get', 'CIM_Argh'], {'stderr': ['CIMError', 'CIM_ERR_NOT_FOUND', '6'], 'rc': 1, 'test': 'regex'}, SIMPLE_MOCK_FILE, OK], ['Verify class command get invalid namespace', ['get', 'CIM_Foo', '--namespace', 'Argh'], {'stderr': ['CIMError', 'CIM_ERR_INVALID_NAMESPACE', '3'], 'rc': 1, 'test': 'regex'}, SIMPLE_MOCK_FILE, OK], ['Verify class command enumerate table output fails).', {'args': ['get', 'CIM_Foo'], 'general': ['--output-format', 'table']}, {'stderr': ['Output format "table" ', 'not allowed', 'Only CIM formats:'], 'rc': 1, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], # # find command # ['Verify class command find --help response', ['find', '--help'], {'stdout': CLASS_FIND_HELP_LINES, 'test': 'innows'}, None, OK], ['Verify class command find -h response', ['find', '-h'], {'stdout': CLASS_FIND_HELP_LINES, 'test': 'innows'}, None, OK], ['Verify class command find simple name in all namespaces', ['find', 'CIM_*'], {'stdout': [" root/cimv2: CIM_Foo", " root/cimv2: CIM_Foo_sub", " root/cimv2: CIM_Foo_sub2", " root/cimv2: CIM_Foo_sub_sub"], 'test': 'in'}, SIMPLE_MOCK_FILE, OK], ['Verify class command find simple name in all namespaces wo case', ['find', 'cim_*'], {'stdout': [" root/cimv2: CIM_Foo", " root/cimv2: CIM_Foo_sub", " root/cimv2: CIM_Foo_sub2", " root/cimv2: CIM_Foo_sub_sub"], 'test': 'in'}, SIMPLE_MOCK_FILE, OK], ['Verify class command find simple name in all namespaces lead wc', ['find', '*sub_sub*'], {'stdout': [" root/cimv2: CIM_Foo_sub_sub"], 'test': 'in'}, SIMPLE_MOCK_FILE, OK], ['Verify class command find simple name in all namespaces wo case', ['find', '*sub_su?*'], {'stdout': [" root/cimv2: CIM_Foo_sub_sub"], 'test': 'in'}, SIMPLE_MOCK_FILE, OK], ['Verify class command find simple name in known namespace', ['find', 'CIM_*', '-n', 'root/cimv2'], {'stdout': [" root/cimv2: CIM_BaseEmb", " root/cimv2: CIM_BaseRef", " root/cimv2: CIM_Foo", " root/cimv2: CIM_FooAssoc", " root/cimv2: CIM_FooEmb1", " root/cimv2: CIM_FooEmb2", " root/cimv2: CIM_FooEmb3", " root/cimv2: CIM_FooRef1", " root/cimv2: CIM_FooRef2", " root/cimv2: CIM_Foo_sub", " root/cimv2: CIM_Foo_sub2", " root/cimv2: CIM_Foo_sub_sub"], 'test': 'lines'}, SIMPLE_MOCK_FILE, OK], ['Verify class command find simple name in interop namespace', ['find', 'CIM_*'], {'stdout': [" interop: CIM_Namespace", " interop: CIM_ObjectManager"], 'test': 'in'}, SIMPLE_INTEROP_MOCK_FILE, OK], ['Verify class command find name in known namespace -o grid', {'general': ['-o', 'grid'], 'args': ['find', 'CIM_*', '-n', 'root/cimv2']}, {'stdout': ['Find class CIM_*', '+-------------+-----------------+', '| Namespace | Classname |', '+=============+=================+', '| root/cimv2 | CIM_BaseEmb |', '+-------------+-----------------+', '| root/cimv2 | CIM_BaseRef |', '+-------------+-----------------+', '| root/cimv2 | CIM_Foo |', '+-------------+-----------------+', '| root/cimv2 | CIM_FooAssoc |', '+-------------+-----------------+', '| root/cimv2 | CIM_FooEmb1 |', '+-------------+-----------------+', '| root/cimv2 | CIM_FooEmb2 |', '+-------------+-----------------+', '| root/cimv2 | CIM_FooEmb3 |', '+-------------+-----------------+', '| root/cimv2 | CIM_FooRef1 |', '+-------------+-----------------+', '| root/cimv2 | CIM_FooRef2 |', '+-------------+-----------------+', '| root/cimv2 | CIM_Foo_sub |', '+-------------+-----------------+', '| root/cimv2 | CIM_Foo_sub2 |', '+-------------+-----------------+', '| root/cimv2 | CIM_Foo_sub_sub |', '+-------------+-----------------+'], 'test': 'lines'}, SIMPLE_MOCK_FILE, OK], ['Verify class command verify nothing found for BLAH_ regex', ['find', 'BLAH_*', '-n', 'root/cimv2'], {'stdout': "", 'test': 'lines'}, SIMPLE_MOCK_FILE, OK], ['Verify class command find simple name in known namespace with wildcard', ['find', '*sub2', '-n', 'root/cimv2'], {'stdout': " root/cimv2: CIM_Foo_sub2", 'test': 'lines'}, SIMPLE_MOCK_FILE, OK], ['Verify class command find with --association filter', ['find', '*TST_*', '-n', 'root/cimv2', '--association'], {'stdout': ['TST_Lineage', 'TST_MemberOfFamilyCollection', 'TST_MemberOfFamilyCollectionExp'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command find with --indication filter', ['find', '*TST_*', '-n', 'root/cimv2', '--indication'], {'stdout': ['TST_Indication', 'root/cimv2:TST_IndicationExperimental'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command find with --indication & -no-experimental filters', ['find', '*TST_*', '-n', 'root/cimv2', '--indication', '--no-experimental'], {'stdout': ['TST_Indication'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command find with --association & --experimental filters', ['find', '*TST_*', '-n', 'root/cimv2', '--association', '--experimental'], {'stdout': ['TST_MemberOfFamilyCollectionExp'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command find with --no-association & --no-experimental, ' 'filters', ['find', 'TST_*', '-n', 'root/cimv2', '--no-association', '--no-experimental', '--no-indication'], {'stdout': ['root/cimv2: TST_FamilyCollection', 'root/cimv2: TST_Person', 'root/cimv2: TST_PersonClsDep', 'root/cimv2: TST_PersonDep', 'root/cimv2: TST_PersonPropDep', 'root/cimv2: TST_PersonSub'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command find with --no-association & --deprecated, ', ['find', 'TST_*', '-n', 'root/cimv2', '--no-association', '--deprecated'], {'stdout': ['root/cimv2: TST_IndicationDeprecated', 'root/cimv2: TST_PersonClsDep', 'root/cimv2: TST_PersonDep', 'root/cimv2: TST_PersonExpProperty', 'root/cimv2: TST_PersonPropDep'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command find with --experimental and --since 2.42.0.', ['find', "*", '--experimental', '--since', '2.42.0'], {'stdout': ['TST_IndicationExperimental'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command find with --experimental and --since 2.45.0.', ['find', "*", '--experimental', '--since', '2.45.0'], {'stdout': [], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command find with --experimental and --since 2.45.x.', ['find', "*", '--experimental', '--since', '2.45.x'], {'stderr': ['--since option value invalid. ', 'Must contain 3 integer elements', '2.45.x'], 'rc': 1, 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command find with --schema "BLA".', ['find', '*', '--schema', 'BLA'], {'stdout': ['BLA_Person', ], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command find with --schema "EXP".', ['find', '*', '--schema', 'EXP'], {'stdout': ['EXP_TestExperimental1', 'EXP_TestExperimental2', 'EXP_TestExperimental3', 'EXP_TestExperimental4'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command find with --schema "EXP". test not-innows', ['find', '*', '--schema', 'EXP'], {'stdout': ['BLA_Person', 'TST_FamilyCollection', 'TST_Indication', 'TST_IndicationDeprecated', 'TST_IndicationExperimental', 'TST_Lineage', 'TST_MemberOfFamilyCollection', 'TST_MemberOfFamilyCollectionDep', 'TST_MemberOfFamilyCollectionExp', 'TST_Person', 'TST_PersonClsDep', 'TST_PersonDep', 'TST_PersonExp', 'TST_PersonExpProperty', 'TST_PersonPropDep', 'TST_PersonSub'], 'test': 'not-innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command find with --schema "EXP" and --experimental.', ['find', '*', '--schema', 'EXP', '--experimental'], {'stdout': ['EXP_TestExperimental1', 'EXP_TestExperimental2', 'EXP_TestExperimental3', 'EXP_TestExperimental4'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command find with --subclass-of.', ['find', '*', '--subclass-of', 'TST_Person'], {'stdout': ['root/cimv2: TST_PersonClsDep', 'root/cimv2: TST_PersonDep', 'root/cimv2: TST_PersonExp', 'root/cimv2: TST_PersonExpProperty', 'root/cimv2: TST_PersonPropDep', 'root/cimv2: TST_PersonSub'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command find with --subclass-of.', ['find', '*Sub', '--subclass-of', 'TST_Person'], {'stdout': ['root/cimv2: TST_PersonSub'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], # Tests with --leaf-classes ['Verify class command enumerate with --leaf-classes. test innows', ['enumerate', '--di', '--no', '--leaf-classes'], {'stdout': ['BLA_Person', 'EXP_TestExperimental1', 'EXP_TestExperimental2', 'EXP_TestExperimental3', 'EXP_TestExperimental4', 'TST_FamilyCollection', 'TST_Indication', 'TST_IndicationDeprecated', 'TST_IndicationExperimental', 'TST_Lineage', 'TST_MemberOfFamilyCollection', 'TST_MemberOfFamilyCollectionDep', 'TST_MemberOfFamilyCollectionExp', 'TST_PersonClsDep', 'TST_PersonDep', 'TST_PersonExp', 'TST_PersonExpProperty', 'TST_PersonPropDep', 'TST_PersonSub'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --leaf-classes. test not-innows', ['enumerate', '--di', '--no', '--leaf-classes'], {'stdout': ['TST_Person'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --leaf-classes & --subclass-of', ['enumerate', '--di', '--no', '--leaf-classes', '--subclass-of', 'TST_Person'], {'stdout': ['TST_PersonClsDep', 'TST_PersonDep', 'TST_PersonExp' 'TST_PersonExpProperty', 'TST_PersonPropDep', 'TST_PersonSub'], 'test': 'innows'}, QUALIFIER_FILTER_MODEL, OK], ['Verify class command enumerate with --leaf-classes & --subclass-of, ' 'not-innows', ['enumerate', '--di', '--no', '--leaf-classes', '--subclass-of', 'TST_Person'], {'stdout': ['BLA_Person', 'EXP_TestExperimental1', 'EXP_TestExperimental2', 'EXP_TestExperimental3', 'EXP_TestExperimental4', 'TST_FamilyCollection', 'TST_Indication', 'TST_IndicationDeprecated', 'TST_IndicationExperimental', 'TST_Lineage', 'TST_MemberOfFamilyCollection', 'TST_MemberOfFamilyCollectionDep', 'TST_MemberOfFamilyCollectionExp'], 'test': 'not-innows'}, QUALIFIER_FILTER_MODEL, OK], # # command "class delete" # ['Verify class command delete --help response', ['delete', '--help'], {'stdout': CLASS_DELETE_HELP_LINES, 'test': 'innows'}, None, OK], ['Verify class command delete -h response', ['delete', '-h'], {'stdout': CLASS_DELETE_HELP_LINES, 'test': 'innows'}, None, OK], # Class delete successful ['Verify class command delete successful with no subclasses, ' '--force (deprecated)', {'args': ['delete', 'CIM_Foo_sub_sub', '--force'], 'general': ['--warn']}, {'stderr': ['DeprecationWarning: The --force / -f option has been ' 'deprecated'], 'test': 'in'}, SIMPLE_MOCK_FILE, OK], ['Verify class command delete successful with no subclasses, ' '-f (deprecated)', {'args': ['delete', 'CIM_Foo_sub_sub', '-f'], 'general': ['--warn']}, {'stderr': ['DeprecationWarning: The --force / -f option has been ' 'deprecated'], 'test': 'in'}, SIMPLE_MOCK_FILE, OK], ['Verify class command delete successful with no subclasses, ' '--include-instances', ['delete', 'CIM_Foo_sub_sub', '--include-instances'], {'stdout': ['Deleted instance root/cimv2:CIM_Foo_sub_sub.', 'Deleted class CIM_Foo_sub_sub'], 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command delete successful with no subclasses, --namespace ' 'and --include-instances', ['delete', 'CIM_Foo_sub_sub', '--namespace', 'root/cimv2', '--include-instances'], {'stdout': ['Deleted instance root/cimv2:CIM_Foo_sub_sub.', 'Deleted class CIM_Foo_sub_sub'], 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command delete (interactive) successful with no subclasses, ' '--include-instances, --dry-run', {'stdin': ['class delete CIM_Foo_sub_sub --include-instances --dry-run', 'class get CIM_Foo_sub_sub', 'instance count CIM_Foo_sub_sub']}, {'stdout': ['Dry run: Deleted instance root/cimv2:CIM_Foo_sub_sub.' 'InstanceID="CIM_Foo_sub_sub1"', 'Dry run: Deleted class CIM_Foo_sub_sub', 'class CIM_Foo_sub_sub : CIM_Foo_sub {', 'root/cimv2 CIM_Foo_sub_sub 3'], 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], # Class delete errors ['Verify class command delete no classname', ['delete'], {'stderr': ['Error: Missing argument .CLASSNAME.'], 'rc': 2, 'test': 'regex'}, SIMPLE_MOCK_FILE, OK], ['Verify class command delete nonexistent classname fails', ['delete', 'Argh'], {'stderr': ['CIMError', 'CIM_ERR_INVALID_CLASS', '5'], 'rc': 1, 'test': 'regex'}, SIMPLE_MOCK_FILE, OK], ['Verify class command delete fail instances exist', ['delete', 'CIM_Foo_sub_sub'], {'stderr': 'Cannot delete class CIM_Foo_sub_sub because it has ' '3 instances', 'rc': 1, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command delete fail subclasses exist', ['delete', 'CIM_Foo'], {'stderr': 'Cannot delete class CIM_Foo because it has 12 instances', 'rc': 1, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command delete fail subclasses exist, --include-instances', ['delete', 'CIM_Foo', '--include-instances'], {'stderr': 'Cannot delete class CIM_Foo because these classes depend on ' 'it: CIM_Foo_sub, CIM_Foo_sub2', 'rc': 1, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command delete fail referencing class CIM_FooRef1 exist', ['delete', 'CIM_FooRef1'], {'stderr': 'Cannot delete class CIM_FooRef1 because it has 1 instances', 'rc': 1, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command delete fail referencing class CIM_FooRef1 exist, ' '--include-instances', ['delete', 'CIM_FooRef1', '--include-instances'], {'stderr': 'Cannot delete class CIM_FooRef1 because these classes depend ' 'on it: CIM_Foo', 'rc': 1, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command delete fail referencing class CIM_FooRef2 exist', ['delete', 'CIM_FooRef2'], {'stderr': 'Cannot delete class CIM_FooRef2 because it has 1 instances', 'rc': 1, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command delete fail referencing class CIM_FooRef2 exist, ' '--include-instances', ['delete', 'CIM_FooRef2', '--include-instances'], {'stderr': 'Cannot delete class CIM_FooRef2 because these classes depend ' 'on it: CIM_Foo', 'rc': 1, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command delete fail referencing class CIM_FooAssoc exist', ['delete', 'CIM_FooAssoc'], {'stderr': 'Cannot delete class CIM_FooAssoc because it has 1 instances', 'rc': 1, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command delete succesd for referencing class CIM_FooAssoc, ' '--include-instances', ['delete', 'CIM_FooAssoc', '--include-instances'], {'stdout': '', 'test': 'in'}, SIMPLE_MOCK_FILE, OK], ['Verify class command delete fail embedding class CIM_FooEmb1 exist', ['delete', 'CIM_FooEmb1'], {'stderr': 'Cannot delete class CIM_FooEmb1 because these classes depend ' 'on it: CIM_Foo', 'rc': 1, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command delete fail embedding class CIM_FooEmb1 exist, ' '--include-instances', ['delete', 'CIM_FooEmb1', '--include-instances'], {'stderr': 'Cannot delete class CIM_FooEmb1 because these classes depend ' 'on it: CIM_Foo', 'rc': 1, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command delete fail embedding class CIM_FooEmb2 exist', ['delete', 'CIM_FooEmb2'], {'stderr': 'Cannot delete class CIM_FooEmb2 because these classes depend ' 'on it: CIM_Foo', 'rc': 1, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command delete fail embedding class CIM_FooEmb2 exist, ' '--include-instances', ['delete', 'CIM_FooEmb2', '--include-instances'], {'stderr': 'Cannot delete class CIM_FooEmb2 because these classes depend ' 'on it: CIM_Foo', 'rc': 1, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command delete fail embedding class CIM_FooEmb3 exist', ['delete', 'CIM_FooEmb3'], {'stderr': 'Cannot delete class CIM_FooEmb3 because these classes depend ' 'on it: CIM_Foo', 'rc': 1, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command delete fail embedding class CIM_FooEmb3 exist, ' '--include-instances', ['delete', 'CIM_FooEmb3', '--include-instances'], {'stderr': 'Cannot delete class CIM_FooEmb3 because these classes depend ' 'on it: CIM_Foo', 'rc': 1, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command delete fails if instance provider rejects delete', {'args': ['delete', 'CIM_Foo_sub_sub', '--include-instances']}, {'stderr': ['CIM_ERR_FAILED', 'Deletion of CIM_Foo_sub_sub instances is rejected'], 'rc': 1, 'test': 'innows'}, [SIMPLE_MOCK_FILE, 'reject_deleteinstance_provider.py'], MOCK_SETUP_SUPPORTED], ['Verify class command delete using --namespace interop fails because of ' 'instances', ['delete', 'CIM_ObjectManager', '-n', 'interop'], {'stderr': ['Cannot delete class', 'instances'], 'rc': 1, 'test': 'innows'}, SIMPLE_INTEROP_MOCK_FILE, OK], # # command "class tree" # ['Verify class command tree --help response', ['tree', '--help'], {'stdout': CLASS_TREE_HELP_LINES, 'test': 'innows'}, None, OK], ['Verify class command tree -h response', ['tree', '-h'], {'stdout': CLASS_TREE_HELP_LINES, 'test': 'innows'}, None, OK], ['Verify class command tree top down. Uses simple mock, no argument', ['tree'], {'stdout': """root +-- CIM_BaseEmb | +-- CIM_FooEmb1 | +-- CIM_FooEmb2 | +-- CIM_FooEmb3 +-- CIM_BaseRef | +-- CIM_FooRef1 | +-- CIM_FooRef2 +-- CIM_Foo | +-- CIM_Foo_sub | | +-- CIM_Foo_sub_sub | +-- CIM_Foo_sub2 +-- CIM_FooAssoc """, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command tree top down starting at defined class ', ['tree', 'CIM_Foo_sub'], {'stdout': """CIM_Foo_sub +-- CIM_Foo_sub_sub """, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command tree top down starting at leaf class', ['tree', 'CIM_Foo_sub'], {'stdout': """CIM_Foo_sub_sub """, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command tree bottom up. -s', ['tree', '-s', 'CIM_Foo_sub_sub'], {'stdout': """root +-- CIM_Foo +-- CIM_Foo_sub +-- CIM_Foo_sub_sub """, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command tree -s from top class', ['tree', '-s', 'CIM_Foo'], {'stdout': """root +-- CIM_Foo """, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command tree bottom up. --superclasses', ['tree', '--superclasses', 'CIM_Foo_sub_sub'], {'stdout': """root +-- CIM_Foo +-- CIM_Foo_sub +-- CIM_Foo_sub_sub """, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify class command tree with --detail', ['tree', '--detail'], {'stdout': """root +-- CIM_Foo (Version=2.30.0) | +-- CIM_Foo_sub (Version=2.31.0) | +-- CIM_Foo_sub_sub (Version=2.20.1) +-- CIM_Foo_no_version () +-- CIM_Indication (Abstract,Indication,Version=2.24.0) +-- CIM_Indication_no_version (Abstract,Indication) +-- TST_Lineage (Association,Version=2.20.1) +-- TST_Lineage_no_version (Association) """, 'test': 'innows'}, TREE_TEST_MOCK_FILE, OK], # class tree' error tests ['Verify class command tree with invalid CLASSNAME fails', ['tree', '-s', 'CIM_Foo_subx'], {'stderr': ['CIMError:'], 'rc': 1, 'test': 'regex'}, SIMPLE_MOCK_FILE, OK], ['Verify class command tree with superclass option, CLASSNAME fails', ['tree', '-s'], {'stderr': ['Error: CLASSNAME argument required for --superclasses ' 'option'], 'rc': 1, 'test': 'regex'}, SIMPLE_MOCK_FILE, OK], # # associators command tests # # ['Verify class command associators --help response', ['associators', '--help'], {'stdout': CLASS_ASSOCIATORS_HELP_LINES, 'test': 'innows'}, None, OK], ['Verify class command associators -h response', ['associators', '-h'], {'stdout': CLASS_ASSOCIATORS_HELP_LINES, 'test': 'innows'}, None, OK], ['Verify class command associators simple request,', ['associators', 'TST_Person'], {'stdout': [FAKEURL_STR + '/root/cimv2:TST_Person', 'class TST_Person {', '', ' [Key ( true ),', ' Description ( "This is key prop" )]', ' string name;', '', ' string extraProperty = "defaultvalue";', '', ' [ValueMap { "1", "2" },', ' Values { "female", "male" }]', ' uint16 gender;', '', ' [ValueMap { "1", "2" },', ' Values { "books", "movies" }]', ' uint16 likes[];', '', '};', ''], 'test': 'lines'}, SIMPLE_ASSOC_MOCK_FILE, OK], ['Verify class command associators simple request names only,', ['associators', 'TST_Person', '--names-only'], {'stdout': [FAKEURL_STR + '/root/cimv2:TST_Person'], 'test': 'lines'}, SIMPLE_ASSOC_MOCK_FILE, OK], ['Verify class command associators simple request, one parameter', ['associators', 'TST_Person', '--ac', 'TST_MemberOfFamilyCollection'], {'stdout': [FAKEURL_STR + '/root/cimv2:TST_Person', 'class TST_Person {', '', ' [Key ( true ),', ' Description ( "This is key prop" )]', ' string name;', '', ' string extraProperty = "defaultvalue";', '', ' [ValueMap { "1", "2" },', ' Values { "female", "male" }]', ' uint16 gender;', '', ' [ValueMap { "1", "2" },', ' Values { "books", "movies" }]', ' uint16 likes[];', '', '};', ''], 'test': 'lines'}, SIMPLE_ASSOC_MOCK_FILE, OK], ['Verify class command associators request, all filters long', ['associators', 'TST_Person', '--assoc-class', 'TST_MemberOfFamilyCollection', '--role', 'member', '--result-role', 'family', '--result-class', 'TST_Person'], {'stdout': [FAKEURL_STR + '/root/cimv2:TST_Person', 'class TST_Person {', '', ' [Key ( true ),', ' Description ( "This is key prop" )]', ' string name;', '', ' string extraProperty = "defaultvalue";', '', ' [ValueMap { "1", "2" },', ' Values { "female", "male" }]', ' uint16 gender;', '', ' [ValueMap { "1", "2" },', ' Values { "books", "movies" }]', ' uint16 likes[];', '', '};', ''], 'test': 'lines'}, SIMPLE_ASSOC_MOCK_FILE, OK], ['Verify class command associators request, all filters short', ['associators', 'TST_Person', '--ac', 'TST_MemberOfFamilyCollection', '-r', 'member', '--rr', 'family', '--rc', 'TST_Person'], {'stdout': [FAKEURL_STR + '/root/cimv2:TST_Person', 'class TST_Person {', '', ' [Key ( true ),', ' Description ( "This is key prop" )]', ' string name;', '', ' string extraProperty = "defaultvalue";', '', ' [ValueMap { "1", "2" },', ' Values { "female", "male" }]', ' uint16 gender;', '', ' [ValueMap { "1", "2" },', ' Values { "books", "movies" }]', ' uint16 likes[];', '', '};', ''], 'test': 'lines'}, SIMPLE_ASSOC_MOCK_FILE, OK], # Behavior changed pywbem 0.15.0 to exception rtn ['Verify class command associators request, all filters short, -ac ' 'not valid class', ['associators', 'TST_Person', '--ac', 'TST_MemberOfFamilyCollectionx', '-r', 'member', '--rr', 'family', '--rc', 'TST_Person'], {'stderr': ['CIM_ERR_INVALID_PARAMETER'], 'rc': 1, 'test': 'innows'}, SIMPLE_ASSOC_MOCK_FILE, OK], # Behavior changed pywbem 0.15.0 to exception rtn ['Verify class command associators request, all filters short, -r ' 'not valid role', ['associators', 'TST_Person', '--ac', 'TST_MemberOfFamilyCollection', '-r', 'memberx', '--rr', 'family', '--rc', 'TST_Person'], {'stdout': [], 'test': 'lines'}, SIMPLE_ASSOC_MOCK_FILE, OK], # Behavior changed pywbem 0.15.0 to exception rtn ['Verify class command associators request, all filters short, --rc ' 'does not valid class', ['associators', 'TST_Person', '--ac', 'TST_MemberOfFamilyCollection', '-r', 'member', '--rr', 'family', '--rc', 'TST_Personx'], {'stderr': ['CIM_ERR_INVALID_PARAMETER'], 'rc': 1, 'test': 'innows'}, SIMPLE_ASSOC_MOCK_FILE, OK], # Behavior changed pywbem 0.15.0 to exception rtn ['Verify class command associators request, all filters long ' 'does not pass test', ['associators', 'TST_Person', '--assoc-class', 'TST_MemberOfFamilyCollection', '--role', 'member', '--result-role', 'family', '--result-class', 'TST_Personx'], {'stderr': ['CIM_ERR_INVALID_PARAMETER'], 'rc': 1, 'test': 'innows'}, SIMPLE_ASSOC_MOCK_FILE, OK], # Associator errors ['Verify class command associators no CLASSNAME', ['associators'], {'stderr': ['Error: Missing argument .CLASSNAME.'], 'rc': 2, 'test': 'regex'}, None, OK], # Behavior changed pywbem 0.15.0 to exception rtn ['Verify class command associators non-existent CLASSNAME rtns error', ['associators', 'CIM_Nonexistentclass'], {'stderr': ["CIM_ERR_INVALID_PARAMETER"], 'rc': 1, 'test': 'innows'}, SIMPLE_ASSOC_MOCK_FILE, OK], ['Verify class command associators non-existent namespace fails', ['associators', 'TST_Person', '--namespace', 'blah'], {'stderr': ['CIMError', 'CIM_ERR_INVALID_NAMESPACE'], 'rc': 1, 'test': 'regex'}, SIMPLE_ASSOC_MOCK_FILE, OK], # # references command tests # ['Verify class command references --help response', ['references', '--help'], {'stdout': CLASS_REFERENCES_HELP_LINES, 'test': 'innows'}, None, OK], ['Verify class command references -h response', ['references', '-h'], {'stdout': CLASS_REFERENCES_HELP_LINES, 'test': 'innows'}, None, OK], ['Verify class command references simple request', ['references', 'TST_Person'], {'stdout': ['class TST_Lineage {', 'Lineage defines the relationship', 'string InstanceID;', 'TST_Person REF parent;', 'TST_Person REF child;', '[Association ( true )', 'Description ( " Family gathers person to family." )', 'class TST_MemberOfFamilyCollection {', '[key ( true )]', 'TST_Person REF family;', 'TST_Person REF member;', ], 'test': 'innows'}, SIMPLE_ASSOC_MOCK_FILE, OK], ['Verify class command references simple request -o', ['references', 'TST_Person', '--no'], {'stdout': [FAKEURL_STR + '/root/cimv2:TST_Lineage', FAKEURL_STR + '/root/cimv2:TST_MemberOfFamilyCollection'], 'test': 'linesnows'}, SIMPLE_ASSOC_MOCK_FILE, OK], ['Verify class command references request, all filters long', ['references', 'TST_Person', '--role', 'member', '--result-class', 'TST_MemberOfFamilyCollection'], {'stdout': REFERENCES_CLASS_RTN_QUALS2, 'test': 'innows'}, SIMPLE_ASSOC_MOCK_FILE, OK], ['Verify class command references request, filters short', ['references', 'TST_Person', '-r', 'member', '--rc', 'TST_MemberOfFamilyCollection'], {'stdout': REFERENCES_CLASS_RTN_QUALS2, 'test': 'innows'}, SIMPLE_ASSOC_MOCK_FILE, OK], ['Verify class command refereces table output fails).', {'args': ['associators', 'TST_Person'], 'general': ['--output-format', 'table']}, {'stderr': ['Output format "table" ', 'not allowed', 'Only CIM formats:'], 'rc': 1, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], # Reference errors ['Verify class command references no CLASSNAME', ['references'], {'stderr': ['Error: Missing argument .CLASSNAME.'], 'rc': 2, 'test': 'regex'}, None, OK], # Behavior changed pywbem 0.15.0, references bad param rtns except. ['Verify class command references non-existent CLASSNAME rtns error', ['references', 'CIM_Nonexistentclass'], {'stderr': ["CIM_ERR_INVALID_PARAMETER"], 'rc': 1, 'test': 'innows'}, SIMPLE_ASSOC_MOCK_FILE, OK], ['Verify class command references non-existent namespace fails', ['references', 'TST_Person', '--namespace', 'blah'], {'stderr': ['CIMError', 'CIM_ERR_INVALID_NAMESPACE'], 'rc': 1, 'test': 'regex'}, SIMPLE_ASSOC_MOCK_FILE, OK], # # invokemethod command tests # ['Verify class command invokemethod --help response', ['invokemethod', '--help'], {'stdout': CLASS_INVOKEMETHOD_HELP_LINES, 'test': 'innows'}, None, OK], ['Verify class command invokemethod -h response', ['invokemethod', '-h'], {'stdout': CLASS_INVOKEMETHOD_HELP_LINES, 'test': 'innows'}, None, OK], # # class invokemethod command without parameters # ['Verify class command invokemethod CIM_Foo.FuzzyStatic() - no in parms', ['invokemethod', 'CIM_Foo', 'FuzzyStatic'], {'stdout': ["ReturnValue=0"], 'rc': 0, 'test': 'lines'}, [SIMPLE_MOCK_FILE, INVOKE_METHOD_MOCK_FILE], OK], ['Verify class command invokemethod CIM_Foo.FuzzyStatic() with --namespace', ['invokemethod', 'CIM_Foo', 'FuzzyStatic', '--namespace', 'root/cimv2'], {'stdout': ["ReturnValue=0"], 'rc': 0, 'test': 'lines'}, [SIMPLE_MOCK_FILE, INVOKE_METHOD_MOCK_FILE], OK], # Cannot do a test with interop as default because of issue #991 ['Verify class command invokemethod CIM_Foo.FuzzyStatic() with --namespace' ' interop not found to validate that --namspace used', ['invokemethod', 'CIM_Foo', 'FuzzyStatic', '--namespace', 'interop'], {'stderr': ["CIM_ERR_NOT_FOUND", "not found in namespace 'interop'"], 'rc': 1, 'test': 'innows'}, [SIMPLE_INTEROP_MOCK_FILE, INVOKE_METHOD_MOCK_FILE], OK], ['Verify class command invokemethod CIM_Foo.FuzzyStatic() - one in parm', ['invokemethod', 'CIM_Foo', 'FuzzyStatic', '-p', 'TestInOutParameter="blah"'], {'stdout': ['ReturnValue=0', 'TestInOutParameter=', 'blah'], 'rc': 0, 'test': 'innows'}, [SIMPLE_MOCK_FILE, INVOKE_METHOD_MOCK_FILE], OK], ['Verify class command invokemethod fails Invalid Class', ['invokemethod', 'CIM_Foox', 'FuzzyStatic'], {'stderr': ['CIMError', '6'], 'rc': 1, 'test': 'innows'}, [SIMPLE_MOCK_FILE, INVOKE_METHOD_MOCK_FILE], OK], ['Verify class command invokemethod fails Invalid Method', ['invokemethod', 'CIM_Foo', 'Fuzzyx'], {'stderr': ['Class CIM_Foo does not have a method Fuzzyx'], 'rc': 1, 'test': 'innows'}, [SIMPLE_MOCK_FILE, INVOKE_METHOD_MOCK_FILE], OK], ['Verify class command invokemethod fails non-static method, pywbem 1.0', ['invokemethod', 'CIM_Foo', 'Fuzzy'], {'stderr': ["Non-static method 'Fuzzy' in class 'CIM_Foo'"], 'rc': 1, 'test': 'innows'}, [SIMPLE_MOCK_FILE, INVOKE_METHOD_MOCK_FILE], PYWBEM_1_0_0], ['Verify class command invokemethod succeeds non-static method, pywbem 0.x', ['invokemethod', 'CIM_Foo', 'Fuzzy'], {'stdout': ['ReturnValue=0'], 'rc': 0, 'test': 'innows'}, [SIMPLE_MOCK_FILE, INVOKE_METHOD_MOCK_FILE], not PYWBEM_1_0_0], ['Verify class command invokemethod fails Method not registered', ['invokemethod', 'CIM_Foo', 'Fuzzy'], {'stderr': ['CIMError'], 'rc': 1, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify --timestats gets stats output. Cannot test with lines,execution ' 'time is variable.', {'args': ['get', 'CIM_Foo'], 'general': ['--timestats']}, {'stdout': ['Operation Count Errors', 'GetClass 1 0'], 'rc': 0, 'test': 'innows'}, SIMPLE_MOCK_FILE, OK], ['Verify single command with stdin works', {'stdin': ['class get -h']}, {'stdout': ['Usage: pywbemcli [GENERAL-OPTIONS] class get '], 'rc': 0, 'test': 'innows'}, None, OK], ['Verify multiple commands with stdin work', {'stdin': ['class get -h', 'class enumerate -h']}, {'stdout': ['Usage: pywbemcli [GENERAL-OPTIONS] class enumerate ', 'Usage: pywbemcli [GENERAL-OPTIONS] class get '], 'rc': 0, 'test': 'innows'}, None, OK], ] # TODO command class delete. Extend this test to use stdin (delete, test) # namespace # TODO: add test for errors: class invalid, namespace invalid # other tests. Test local-only on top level class TestSubcmdClass(CLITestsBase): # pylint: disable=too-few-public-methods """ Test all of the class command variations. """ command_group = 'class' @pytest.mark.parametrize( "desc, inputs, exp_response, mock, condition", TEST_CASES) def test_class(self, desc, inputs, exp_response, mock, condition): """ Common test method for those commands and options in the class command that can be tested. This includes: * Subcommands like help that do not require access to a server * Subcommands that can be tested with a single execution of a pywbemcli command. """ self.command_test(desc, self.command_group, inputs, exp_response, mock, condition)
89,305
29,703
""" _InsertRecoReleaseConfig_ Oracle implementation of InsertRecoReleaseConfig """ from WMCore.Database.DBFormatter import DBFormatter class InsertRecoReleaseConfig(DBFormatter): def execute(self, binds, conn = None, transaction = False): sql = """INSERT INTO reco_release_config (RUN_ID, PRIMDS_ID, FILESET, DELAY, DELAY_OFFSET) VALUES (:RUN, (SELECT id FROM primary_dataset WHERE name = :PRIMDS), :FILESET, 0, 0) """ self.dbi.processData(sql, binds, conn = conn, transaction = transaction) return
713
185
import wave import os import sys from glob import glob import argparse parser = argparse.ArgumentParser() parser.add_argument("--length", type=int, default=30) parser.add_argument("--offset", type=int, default=15) args = parser.parse_args() unit_time_length = args.length start_time_offset = args.offset output_dir = os.path.join(".", "output") os.makedirs(output_dir, exist_ok=True) downloads_dir = os.path.join(".", "downloads") target_files = glob(os.path.join(downloads_dir, "*.wav")) for base_filepath in target_files: base_filename = os.path.basename(base_filepath) print(f"Processing for {base_filename}...") params = None data_raw = None with wave.open(base_filepath, "rb") as wave_read: params = wave_read.getparams() data_raw = wave_read.readframes(params.nframes) wave_read.close() unit_nframes = unit_time_length * params.framerate * params.nchannels * params.sampwidth start_frame_offset = start_time_offset * params.framerate * params.nchannels * params.sampwidth file_count = 0 for t in range(0, len(data_raw), start_frame_offset): file_count += 1 picked_data = data_raw[t:t+unit_nframes] output_filename = os.path.join(output_dir, f"{base_filename}_{file_count:09}.wav") with wave.open(output_filename, "wb") as wave_write: wave_write.setparams(( params.nchannels, params.sampwidth, params.framerate, len(picked_data), params.comptype, params.compname )) wave_write.writeframes(picked_data) wave_write.close() # os.remove(base_filepath) print("Done.")
1,652
549
import sys import qbittorrentapi import json from ui import UI print("Connecting to qBittorrent WebUI...") with open('config.json') as json_file: config = json.load(json_file) host = config['host'] username = config['username'] password = config['password'] if not host: host = input("Enter host: ") if not username: username = input("Enter username: ") if not password: password = input("Enter password: ") torrent_client = qbittorrentapi.Client(host=host, username=username, password=password) try: torrent_client.auth_log_in() except qbittorrentapi.LoginFailed as e: print("Failed to connect:", e) sys.exit(1) ui = UI(torrent_client) ui.run()
726
253
from tkinter import * janela = Tk() lista = [] texto1 = StringVar() texto2 = StringVar() texto3 = StringVar() texto4 = StringVar() #--------------------- PROCESSAMENTO DO COMANDO ------ def click_bt1(): lista.append(int(et1.get())) lista.append(int(et2.get())) lista.append(int(et3.get())) lista.append(int(et4.get())) txt1.delete(0.0,'end') for i in lista: if i % 2 == 0: txt1.insert(0.0, f'{i} ') txt1.insert(0.0, '- Os valores pares digitados foram: ') if 3 in lista: txt1.insert(0.0, f'- O número 3 apareceu na {lista.index(3)+1} posição.\n') else: txt1.insert(0.0,'- O número 3 não apareceu na lista.\n') txt1.insert(0.0, f'- Você digitou o número "9" {lista.count(9)} vezes.\n') texto1.set(str('')) texto2.set(str('')) texto3.set(str('')) texto4.set(str('')) print(lista) #------------------------------------------------------ #---------------INSERÇÃO DOS WIDGETS --------------- lb1 = Label(janela, text='Digite o primeiro número: ') lb1.grid(row=0,column=0, stick=W) lb2 = Label(janela, text='Digite o segundo número: ') lb2.grid(row=1,column=0, stick=W) lb3 = Label(janela, text='Digite o terceiro número: ') lb3.grid(row=2,column=0, stick=W) lb4 = Label(janela, text='Digite o quarto número: ') lb4.grid(row=3,column=0, stick=W) et1 = Entry(janela, textvariable=texto1, width=5) et1.grid(row=0,column=1,sticky=E) et2 = Entry(janela, textvariable=texto2, width=5) et2.grid(row=1,column=1,sticky=E) et3 = Entry(janela, textvariable=texto3, width=5) et3.grid(row=2,column=1,sticky=E) et4 = Entry(janela, textvariable=texto4, width=5) et4.grid(row=3,column=1,sticky=E) bt1 = Button(janela,text='PROCESSAR', font=('arialblack',11,'bold'),command=click_bt1) bt1.grid(row=0,column=2,rowspan=4) txt1 = Text(janela,width=40,height=10,bd=5) txt1.grid(row=5,column=0,columnspan=3) #---------------------------------------------------------------------- #------------------- DIMENSIONAMENTO E CENTRALIZAÇÃO DA JANELA -------- janela.title('Exercicio - Ex075') janela_width = 330 janela_height = 260 scream_width = janela.winfo_screenwidth() scream_height = janela.winfo_screenheight() cord_x = int((scream_width/2) - (janela_width/2)) cord_y = int((scream_height/2) - (janela_height/2)) janela.geometry(f'{janela_width}x{janela_height}+{cord_x}+{cord_y}') #--------------------------------------------------------------------- janela.mainloop()
2,454
981
#!/usr/bin/env python3 # -*- coding:utf-8 -*- """ プレイヤーのうち、NPCのクラス """ """ - Playerクラスを継承している - 全体的に未実装 - 将来的にはここでAIを読み込ませる? """ __author__ = "aseruneko" __date__ = "28 May 2020" from main.Player import Player class Computer(Player): pass
247
148
# More details on this kata # https://www.codewars.com/kata/52b7ed099cdc285c300001cd def sum_of_intervals(intervals): s, ret = [list(x) for x in sorted(intervals)], 0 if len(s) == 1: return abs(s[0][0] - s[0][1]) for i in range(len(s)): if i + 1 > len(s) - 1: break if s[i][0] <= s[i + 1][0] <= s[i][1]: if i + 1 > len(s) - 1: break while s[i][0] <= s[i + 1][0] <= s[i][1]: if s[i][1] <= s[i + 1][1]: s[i][1] = s[i + 1][1] del s[i + 1] if i + 1 > len(s) - 1: break for i in s: ret += abs(i[0] - i[1]) return ret
650
296
""" This module contains white-box unit tests of CertDB package """ # pylint: disable=W0212, C0103, C0302 import sys import os import subprocess import time import shutil import string import random import unittest import unittest.mock from collections import OrderedDict import toml from cevast.utils import make_PEM_filename from cevast.certdb import ( CertDB, CertFileDB, CertFileDBReadOnly, CertNotAvailableError, CertInvalidError, CompositeCertDB, CompositeCertDBReadOnly, ) # Helper functions TEST_DATA_PATH = 'tests/data/' TEST_CERTS_1 = TEST_DATA_PATH + 'test_certs_1.csv' TEST_CERTS_2 = TEST_DATA_PATH + 'test_certs_2.csv' def insert_test_certs(database: CertDB, certs_file: str) -> list: """ Insert certificates from certs_file to database Return list of inserted certificates. """ certs = [] with open(certs_file) as r_file: for line in r_file: els = [e.strip() for e in line.split(',')] database.insert(els[0], els[1]) certs.append(els[0]) return certs def insert_random_certs(database: CertDB, certs_cnt: int) -> list: """ Insert number(certs_cnt) randomly generated certificates to database Return list of inserted certificates. """ def random_string(length: int) -> str: return ''.join(random.choice(string.ascii_letters) for i in range(length)) certs = [] for _ in range(certs_cnt): cert_id = random_string(16) database.insert(cert_id, random_string(8)) certs.append(cert_id) return certs def delete_test_certs(database: CertDB, certs_file: str) -> list: """ Delete certificates from certs_file from database Return list of deleted certificates. """ certs = [] with open(certs_file) as r_file: for line in r_file: els = [e.strip() for e in line.split(',')] database.delete(els[0]) certs.append(els[0]) return certs def commit_test_certs(database: CertDB, certs_file: str) -> list: """ Insert and commit certificates from certs_file to database Return list of committed certificates. """ certs = insert_test_certs(database, certs_file) database.commit() return certs class TestCertFileDBReadOnly(unittest.TestCase): """Unit test class of CertFileDBReadOnly class""" TEST_STORAGE = 'tests/test_storage' def tearDown(self): # Clear test storage shutil.rmtree(self.TEST_STORAGE, ignore_errors=True) def test_setup(self): """ Test implementation of CertFileDBReadOnly setup method """ # Check wrong paramaters self.assertRaises(ValueError, CertFileDBReadOnly.setup, self.TEST_STORAGE, 'ass') # Setup and check DB CertFileDBReadOnly.setup(self.TEST_STORAGE, 5, 'DES', 'Testing DB', 'unittest') assert os.path.exists(self.TEST_STORAGE) cfg = toml.load(os.path.join(self.TEST_STORAGE, CertFileDBReadOnly.CONF_FILENAME)) meta = toml.load(os.path.join(self.TEST_STORAGE, CertFileDBReadOnly.META_FILENAME)) self.assertEqual(cfg['PARAMETERS']['storage'], os.path.abspath(self.TEST_STORAGE)) self.assertEqual(cfg['PARAMETERS']['structure_level'], 5) self.assertEqual(cfg['PARAMETERS']['cert_format'], 'DES') self.assertEqual(cfg['PARAMETERS']['maintain_info'], True) self.assertEqual(meta['INFO']['description'], 'Testing DB') self.assertEqual(meta['INFO']['owner'], 'unittest') assert 'compression_method' in cfg['PARAMETERS'] # Try to setup different DB on the same storage self.assertRaises(ValueError, CertFileDB.setup, self.TEST_STORAGE, 1, 'PEM', 'Testing DB 2', 'unittest') # Try to open DB configured manually, and commit something new_loc = os.path.join(self.TEST_STORAGE, 'new') os.makedirs(new_loc) shutil.move( os.path.join(self.TEST_STORAGE, CertFileDBReadOnly.CONF_FILENAME), os.path.join(new_loc, CertFileDBReadOnly.CONF_FILENAME) ) assert os.path.exists(os.path.join(new_loc, CertFileDBReadOnly.CONF_FILENAME)) db = CertFileDB(new_loc) commit_test_certs(db, TEST_CERTS_1) def test_init(self): """ Test of CertFileDBReadOnly initialization """ self.assertRaises(ValueError, CertFileDBReadOnly, self.TEST_STORAGE) CertFileDBReadOnly.setup(self.TEST_STORAGE, structure_level=5) # Storage should be now properly initialized db = CertFileDBReadOnly(self.TEST_STORAGE) self.assertEqual(db._params['structure_level'], 5) self.assertEqual(db._params['storage'], os.path.abspath(self.TEST_STORAGE)) def test_get(self): """ Test implementation of CertDB method GET """ CertFileDBReadOnly.setup(self.TEST_STORAGE, maintain_info=False) db = CertFileDB(self.TEST_STORAGE) db_ronly = CertFileDBReadOnly(self.TEST_STORAGE) fake_cert_id = 'fakecertid' # Insert and commit some certificates and try to retrieve them back commit_test_certs(db, TEST_CERTS_1) with open(TEST_CERTS_1) as r_file: for line in r_file: cert_id, cert = line.split(',') # Certificates should exists - transaction was committed self.assertEqual(db_ronly.get(cert_id), cert.strip()) # Only insert other certificates and try to retrieve them back inserted = insert_test_certs(db, TEST_CERTS_2) for cert_id in inserted: # Certificates should NOT exists - transaction was NOT committed self.assertRaises(CertNotAvailableError, db_ronly.get, cert_id) # Test fake certificate that doesn't exist self.assertRaises(CertNotAvailableError, db_ronly.get, fake_cert_id) def test_export(self): """ Test implementation of CertDB method EXPORT """ def test_permission(db, valid_cert_id): if not sys.platform.startswith('linux'): return # works only on Linux like systems fake_target_dir = 'tests/fake_export' os.mkdir(fake_target_dir) subprocess.call(['chmod', '-w', fake_target_dir]) self.assertRaises(PermissionError, db.export, valid_cert_id, fake_target_dir) subprocess.call(['chmod', '+w', fake_target_dir]) os.rmdir(fake_target_dir) CertFileDBReadOnly.setup(self.TEST_STORAGE, maintain_info=False) db = CertFileDB(self.TEST_STORAGE) db_ronly = CertFileDBReadOnly(self.TEST_STORAGE) target_dir = self.TEST_STORAGE + '/export' os.mkdir(target_dir) fake_cert_id = 'fakecertid' # Insert and commit some certificates and export them commit_test_certs(db, TEST_CERTS_1) with open(TEST_CERTS_1) as r_file: for line in r_file: cert_id, cert = line.split(',') expected = os.path.join(target_dir, make_PEM_filename(cert_id)) self.assertEqual(db_ronly.export(cert_id, target_dir), expected) with open(expected) as target: self.assertEqual(target.read(), cert.strip()) # Check export without unnecessary copying - should copy anyway because persisted self.assertEqual(db_ronly.export(cert_id, target_dir, copy_if_exists=False), expected) # Tests writing permissions for exporting from zipfile test_permission(db_ronly, cert_id) # Only insert other certificates and try to retrieve them back inserted = insert_test_certs(db, TEST_CERTS_2) for cert_id in inserted: # Certificates should NOT exists - transaction was NOT committed self.assertRaises(CertNotAvailableError, db_ronly.export, cert_id, target_dir) self.assertRaises(CertNotAvailableError, db_ronly.export, cert_id, target_dir, False) # Test fake certificate that doesn't exist self.assertRaises(CertNotAvailableError, db_ronly.export, fake_cert_id, target_dir) def test_exists(self): """ Test implementation of CertDB method EXISTS """ CertFileDBReadOnly.setup(self.TEST_STORAGE, maintain_info=False) db = CertFileDB(self.TEST_STORAGE) db_ronly = CertFileDBReadOnly(self.TEST_STORAGE) fake_cert = 'fakecertid' # Insert and commit some certificates and check if exists committed = commit_test_certs(db, TEST_CERTS_1) for cert in committed: assert db_ronly.exists(cert) assert db_ronly.exists_all(committed) # Only insert other certificates and check if exists inserted = insert_test_certs(db, TEST_CERTS_2) for cert in inserted: assert not db_ronly.exists(cert) assert not db_ronly.exists_all(inserted) # Test fake certificate that doesn't exist committed.append(fake_cert) assert not db_ronly.exists(fake_cert) assert not db_ronly.exists_all(committed) def test_cache(self): """ Test implementation of CertFileDB certificate existance cache """ CertFileDB.setup(self.TEST_STORAGE, maintain_info=False) db = CertFileDB(self.TEST_STORAGE) db_ronly = CertFileDBReadOnly(self.TEST_STORAGE) # Insert and commit some certificates and check cache committed = commit_test_certs(db, TEST_CERTS_1) for cert in committed: assert cert not in db_ronly._cache db_ronly.exists(cert) assert cert in db_ronly._cache self.assertEqual(db_ronly._cache, set(committed)) # Insert and commit some certificates and check cache after exists_all call committed = commit_test_certs(db, TEST_CERTS_2) assert not set(committed).issubset(db_ronly._cache) db_ronly.exists_all(committed) assert set(committed).issubset(db_ronly._cache) # Check DELETE effect on cache db.exists_all(committed) self.assertEqual(set(committed), db._cache) db.delete(committed[0]) assert committed[0] not in db._cache self.assertNotEqual(set(committed), db._cache) db.rollback() # Check speed improvement using cache - on large number of certs inserted = insert_random_certs(db, 1000) db.commit() t0 = time.clock() for cert in inserted: db_ronly.exists(cert) t1 = time.clock() for cert in inserted: db_ronly.exists(cert) t2 = time.clock() self.assertGreater(t1 - t0, t2 - t1) class TestCertFileDB(unittest.TestCase): """Unit test class of CertFileDB class""" TEST_STORAGE = 'tests/test_storage' def tearDown(self): # Clear test storage shutil.rmtree(self.TEST_STORAGE, ignore_errors=True) if os.path.exists(self.TEST_STORAGE + '.zip'): os.remove(self.TEST_STORAGE + '.zip') def test_init(self): """ Test of CertFileDB initialization """ self.assertRaises(ValueError, CertFileDB, self.TEST_STORAGE) CertFileDB.setup(self.TEST_STORAGE, structure_level=5) # Storage should be now properly initialized db = CertFileDB(self.TEST_STORAGE) self.assertEqual(db._params['structure_level'], 5) self.assertEqual(db._params['storage'], os.path.abspath(self.TEST_STORAGE)) def test_get(self): """ Test implementation of CertDB method GET """ CertFileDB.setup(self.TEST_STORAGE, maintain_info=False) db = CertFileDB(self.TEST_STORAGE) fake_cert_id = 'fakecertid' # Insert and commit some certificates and retrieve them back committed = commit_test_certs(db, TEST_CERTS_1) with open(TEST_CERTS_1) as r_file: for line in r_file: cert_id, cert = line.split(',') self.assertEqual(db.get(cert_id), cert.strip()) # Only insert other certificates and retrieve them back inserted = insert_test_certs(db, TEST_CERTS_2) with open(TEST_CERTS_2) as r_file: for line in r_file: cert_id, cert = line.split(',') self.assertEqual(db.get(cert_id), cert.strip()) # Rollback and try to retrieve them again db.rollback() for cert_id in inserted: self.assertRaises(CertNotAvailableError, db.get, cert_id) # Test DELETE method effect db.delete(committed[0]) self.assertRaises(CertNotAvailableError, db.get, committed[0]) # Test fake certificate that doesn't exist self.assertRaises(CertNotAvailableError, db.get, fake_cert_id) def test_export(self): """ Test implementation of CertDB method EXPORT """ def test_permission(db, valid_cert_id): if not sys.platform.startswith('linux'): return # works only on Linux like systems fake_target_dir = 'tests/fake_export' os.mkdir(fake_target_dir) subprocess.call(['chmod', '-w', fake_target_dir]) self.assertRaises(PermissionError, db.export, valid_cert_id, fake_target_dir) subprocess.call(['chmod', '+w', fake_target_dir]) os.rmdir(fake_target_dir) CertFileDB.setup(self.TEST_STORAGE, maintain_info=False) db = CertFileDB(self.TEST_STORAGE) target_dir = self.TEST_STORAGE + '/export' os.mkdir(target_dir) fake_cert_id = 'fakecertid' # Insert and commit some certificates and export them committed = commit_test_certs(db, TEST_CERTS_1) with open(TEST_CERTS_1) as r_file: for line in r_file: cert_id, cert = line.split(',') expected = os.path.join(target_dir, make_PEM_filename(cert_id)) self.assertEqual(db.export(cert_id, target_dir), expected) with open(expected) as target: self.assertEqual(target.read(), cert.strip()) # Check export without unnecessary copying - should copy anyway because persisted self.assertEqual(db.export(cert_id, target_dir, copy_if_exists=False), expected) # Tests writing permissions for exporting from zipfile test_permission(db, cert_id) # Only insert other certificates and retrieve them back insert_test_certs(db, TEST_CERTS_2) with open(TEST_CERTS_2) as r_file: for line in r_file: cert_id, cert = line.split(',') expected = os.path.join(target_dir, make_PEM_filename(cert_id)) self.assertEqual(db.export(cert_id, target_dir), expected) with open(expected) as target: self.assertEqual(target.read(), cert.strip()) # Check export without unnecessary copying file = db.export(cert_id, target_dir, copy_if_exists=False) self.assertNotEqual(file, expected) with open(file) as target: self.assertEqual(target.read(), cert.strip()) # Tests writing permissions for exporting from transaction test_permission(db, cert_id) # Rollback and try to retrieve them again db.rollback() r_file.seek(0) for line in r_file: cert_id = line.split(',')[0] self.assertRaises(CertNotAvailableError, db.export, cert_id, target_dir) # Test DELETE method effect db.delete(committed[0]) self.assertRaises(CertNotAvailableError, db.get, committed[0]) # Test fake certificate that doesn't exist self.assertRaises(CertNotAvailableError, db.export, fake_cert_id, target_dir) def test_exists(self): """ Test implementation of CertDB method EXISTS """ CertFileDB.setup(self.TEST_STORAGE, maintain_info=False) db = CertFileDB(self.TEST_STORAGE) fake_cert = 'fakecertid' # Insert and commit some certificates and check if exists committed = commit_test_certs(db, TEST_CERTS_1) for cert in committed: assert db.exists(cert) assert db.exists_all(committed) # Only insert other certificates and check if exists inserted = insert_test_certs(db, TEST_CERTS_2) for cert in inserted: assert db.exists(cert) assert db.exists_all(inserted) # Test DELETE method effect db.delete(committed[0]) assert not db.exists(committed[0]) # Test fake certificate that doesn't exist committed.append(fake_cert) assert not db.exists(fake_cert) assert not db.exists_all(committed) def test_insert(self): """ Test implementation of CertDB method INSERT """ CertFileDB.setup(self.TEST_STORAGE, maintain_info=False) db = CertFileDB(self.TEST_STORAGE) # Insert some invalid certificates self.assertRaises(CertInvalidError, db.insert, None, None) self.assertRaises(CertInvalidError, db.insert, '', '') self.assertRaises(CertInvalidError, db.insert, '', 'valid') self.assertRaises(CertInvalidError, db.insert, 'valid', None) # Insert some valid certificates inserted = insert_test_certs(db, TEST_CERTS_1) blocks = {**db._to_insert} # transaction should contain certificates from open transcation and certs should exist self.assertTrue(db._to_insert) for cert in inserted: block_path = db._get_block_path(cert) assert os.path.exists(os.path.join(block_path, cert)) # Insert different certificates under the same IDs certs = {} with open(TEST_CERTS_1) as r_file: for line in r_file: els = [e.strip() for e in line.split(',')] db.insert(els[0], els[1] + '_open') certs[els[0]] = els[1] # IDs should be same and certificates should not be changed self.assertTrue(blocks == db._to_insert) for k, v in certs.items(): self.assertTrue(db.get(k) == v) # Commit transaction and commit different certificates under the same IDs db.commit() self.assertFalse(db._to_insert) certs = {} with open(TEST_CERTS_1) as r_file: for line in r_file: els = [el.strip() for el in line.split(',')] db.insert(els[0], els[1] + '_commit') certs[els[0]] = els[1] # IDs should be same and persisted certificates should not be changed self.assertTrue(blocks == db._to_insert) db.commit() self.assertFalse(db._to_insert) for k, v in certs.items(): self.assertTrue(db.get(k) == v) def test_delete(self): """ Test implementation of CertDB method DELETE """ CertFileDB.setup(self.TEST_STORAGE, maintain_info=False) db = CertFileDB(self.TEST_STORAGE) # Delete some invalid certificates self.assertRaises(CertInvalidError, db.delete, None) self.assertRaises(CertInvalidError, db.delete, '') # Insert and delete the same certs before commit inserted = insert_test_certs(db, TEST_CERTS_1) deleted = delete_test_certs(db, TEST_CERTS_1) # transaction should be clear and files should not exist self.assertFalse(db._to_delete) self.assertFalse(db._to_insert) for cert in inserted: block_path = db._get_block_path(cert) assert not os.path.exists(os.path.join(block_path, cert)) # Delete and insert the same certs before commit deleted = delete_test_certs(db, TEST_CERTS_1) inserted = insert_test_certs(db, TEST_CERTS_1) # transaction should contain deleted and inserted certificates self.assertTrue(db._to_delete) self.assertTrue(db._to_insert) for certs in db._to_delete.values(): assert certs.issubset(set(deleted)) for certs in db._to_insert.values(): assert certs.issubset(set(inserted)) # and files should exist for cert in inserted: block_path = db._get_block_path(cert) assert os.path.exists(os.path.join(block_path, cert)) # now commit and check that files were persisted ins, dlt = db.commit() # the certs should be only inserted self.assertEqual(ins, len(inserted)) self.assertEqual(dlt, 0) self.assertFalse(db._to_delete) self.assertFalse(db._to_insert) # Delete inserted certs, commit and check that they were deleted assert db.exists_all(inserted) del_cert = inserted.pop() db.delete(del_cert) assert not db.exists(del_cert) db.commit() assert not db.exists(del_cert) for cert in inserted: db.delete(cert) ins, dlt = db.commit() self.assertEqual(ins, 0) self.assertEqual(dlt, len(inserted)) # storage should be empty self.assertFalse(os.listdir(db.storage).remove(db.CONF_FILENAME)) # Delete the same cert multiple times should not have effect self.assertFalse(db._to_delete) db.delete('validcert') blocks_to_delete = {**db._to_delete} self.assertTrue(db._to_delete) db.delete('validcert') self.assertTrue(db._to_delete) self.assertEqual(blocks_to_delete, db._to_delete) def test_rollback(self): """ Test implementation of CertDB method ROLLBACK """ CertFileDB.setup(self.TEST_STORAGE, maintain_info=False) db = CertFileDB(self.TEST_STORAGE) # Test rollback without inserts db.rollback() self.assertFalse(db._to_insert) self.assertFalse(db._to_delete) # Insert some certificates, rollback and check that blocks are deleted inserted = insert_test_certs(db, TEST_CERTS_1) db.rollback() for cert in inserted: block_path = db._get_block_path(cert) assert not os.path.exists(os.path.join(block_path, cert)) # Transaction should be empty self.assertFalse(db._to_insert) # Commit some certs, insert other certs and rollback committed = commit_test_certs(db, TEST_CERTS_1) inserted = insert_test_certs(db, TEST_CERTS_2) db.rollback() # Transaction should be empty self.assertFalse(db._to_insert) # Commited certs should be compressed in zip files for cert in committed: assert not os.path.exists(db._get_block_path(cert) + cert) assert os.path.exists(db._get_block_archive(cert)) # Rollbacked certs files should not exists for cert in inserted: block_path = db._get_block_path(cert) assert not os.path.exists(os.path.join(block_path, cert)) # Check rollback of delete method deleted = delete_test_certs(db, TEST_CERTS_1) self.assertTrue(db._to_delete) for cert in deleted: assert not db.exists(cert) db.rollback() self.assertFalse(db._to_delete) # All deleted certs should still exist assert db.exists_all(deleted) def test_commit(self): """ Test implementation of CertDB method COMMIT """ CertFileDB.setup(self.TEST_STORAGE, maintain_info=False) db = CertFileDB(self.TEST_STORAGE) # Test commit without inserts ins, dlt = db.commit() self.assertEqual(ins, 0) self.assertEqual(dlt, 0) self.assertFalse(db._to_insert) # Insert some certificates and check commit inserted = insert_test_certs(db, TEST_CERTS_1) # Certificates and blocks from open transaction should exist self.assertTrue(db._to_insert) for certs in db._to_insert.values(): assert certs.issubset(set(inserted)) for cert in inserted: block_path = db._get_block_path(cert) assert os.path.exists(os.path.join(block_path, cert)) # check correct number of committed certs ins, dlt = db.commit() self.assertEqual(ins, len(inserted)) self.assertEqual(dlt, 0) # transaction should be empty and certs should be compressed in zip files self.assertFalse(db._to_insert) for cert in inserted: assert not os.path.exists(db._get_block_path(cert) + cert) assert os.path.exists(db._get_block_archive(cert)) # Insert already persisted certs and some others and commit inserted_again = insert_test_certs(db, TEST_CERTS_1) inserted_new = insert_test_certs(db, TEST_CERTS_2) ins, dlt = db.commit() # only the other certs should be committed self.assertEqual(ins, len(inserted_new)) self.assertEqual(dlt, 0) # and the same ones should be deleted from transaction for cert in inserted_again: block_path = db._get_block_path(cert) assert not os.path.exists(os.path.join(block_path, cert)) # Delete and insert the same not yet persisted cert and commit valid_cert = ['valid_cert', 'validvalidvalidvalidvalid'] db.delete(valid_cert[0]) db.insert(*valid_cert) db.commit() # check that cert is persisted assert db.exists(valid_cert[0]) assert os.path.exists(db._get_block_archive(valid_cert[0])) assert not os.path.exists(db._get_block_path(valid_cert[0]) + valid_cert[0]) # Delete and insert the same already persisted cert and commit valid_cert = ['valid_cert', 'validvalidvalidvalidvalid_new'] db.delete(valid_cert[0]) db.insert(*valid_cert) db.commit() # check that the cert was replaced assert db.exists(valid_cert[0]) self.assertEqual(db.get(valid_cert[0]), valid_cert[1]) def test_parallel_transactions(self): """ Test of using multiple instances of CertDB with the same storage. """ def test_config_info_maintain(self): """ Test maintaining commit HISTORY and INFO upon commit """ CertFileDB.setup(self.TEST_STORAGE, maintain_info=True) db = CertFileDB(self.TEST_STORAGE) meta_path = os.path.join(db.storage, db.META_FILENAME) # Insert some certificates and check INFO after commit committed = commit_test_certs(db, TEST_CERTS_1) meta = toml.load(meta_path, OrderedDict) last_commit_nr = str(len(meta['HISTORY'])) self.assertEqual(last_commit_nr, '1') self.assertEqual(meta['INFO']['number_of_certificates'], len(committed)) self.assertEqual(meta['INFO']['last_commit'], meta['HISTORY'][last_commit_nr]['date']) self.assertEqual(meta['HISTORY'][last_commit_nr]['inserted'], len(committed)) self.assertEqual(meta['HISTORY'][last_commit_nr]['deleted'], 0) # Delete all the inserted certs and check INFO after commit deleted = delete_test_certs(db, TEST_CERTS_1) db.commit() meta = toml.load(meta_path, OrderedDict) last_commit_nr = str(len(meta['HISTORY'])) self.assertEqual(last_commit_nr, '2') self.assertEqual(meta['INFO']['number_of_certificates'], 0) self.assertEqual(meta['INFO']['last_commit'], meta['HISTORY'][last_commit_nr]['date']) self.assertEqual(meta['HISTORY'][last_commit_nr]['inserted'], 0) self.assertEqual(meta['HISTORY'][last_commit_nr]['deleted'], len(deleted)) # Insert and delete some certs and check INFO after commit committed = commit_test_certs(db, TEST_CERTS_1) inserted = insert_test_certs(db, TEST_CERTS_2) deleted = delete_test_certs(db, TEST_CERTS_1) db.commit() meta = toml.load(meta_path, OrderedDict) last_commit_nr = str(len(meta['HISTORY'])) self.assertEqual(last_commit_nr, '4') self.assertEqual(meta['INFO']['number_of_certificates'], len(inserted)) self.assertEqual(meta['INFO']['last_commit'], meta['HISTORY'][last_commit_nr]['date']) self.assertEqual(meta['HISTORY'][last_commit_nr]['inserted'], len(inserted)) self.assertEqual(meta['HISTORY'][last_commit_nr]['deleted'], len(deleted)) def test_zero_structure_level(self): """ Test CertFileDB with 0 structure_level """ CertFileDB.setup(self.TEST_STORAGE, structure_level=0) db = CertFileDB(self.TEST_STORAGE) storage_dir = os.path.join(self.TEST_STORAGE, os.path.basename(self.TEST_STORAGE)) # Commit some certificates and check zipfile committed = commit_test_certs(db, TEST_CERTS_1) assert db.exists_all(committed) assert os.path.exists(storage_dir + '.zip') # Insert some certificates and check files existance in root folder inserted = insert_test_certs(db, TEST_CERTS_2) for cert in inserted: assert os.path.exists(os.path.join(self.TEST_STORAGE, cert)) assert db.exists(cert) assert db.exists_all(inserted) # Rollback check file cleanup db.rollback() for cert in inserted: assert not os.path.exists(os.path.join(storage_dir, cert)) assert not db.exists(cert) # Delete inserted certificates and check file cleanup inserted = insert_test_certs(db, TEST_CERTS_2) delete_test_certs(db, TEST_CERTS_2) for cert in inserted: assert not os.path.exists(os.path.join(storage_dir, cert)) assert not db.exists(cert) self.assertFalse(db._to_insert) self.assertFalse(db._to_delete) # Retrieve and check persisted certs with open(TEST_CERTS_1) as r_file: for line in r_file: cert_id, cert = line.split(',') self.assertEqual(db.get(cert_id), cert.strip()) # Delete all remaining certificates and check zip cleanup deleted = delete_test_certs(db, TEST_CERTS_1) db.commit() for cert in deleted: assert not os.path.exists(os.path.join(storage_dir, cert)) assert not db.exists(cert) assert not os.path.exists(storage_dir + '.zip') def test_async_commit(self): """ Test implementation multiprocessing version of CertDB method COMMIT """ CertFileDB.setup(self.TEST_STORAGE, maintain_info=False) db = CertFileDB(self.TEST_STORAGE, 100) # Test commit without inserts ins, dlt = db.commit() self.assertEqual(ins, 0) self.assertEqual(dlt, 0) self.assertFalse(db._to_insert) # Insert some certificates and check commit inserted = insert_test_certs(db, TEST_CERTS_1) # Certificates and blocks from open transaction should exist for cert in inserted: block_path = db._get_block_path(cert) assert os.path.exists(os.path.join(block_path, cert)) # check correct number of committed certs ins, dlt = db.commit() self.assertEqual(ins, len(inserted)) self.assertEqual(dlt, 0) # transaction should be empty and certs should be compressed in zip files self.assertFalse(db._to_insert) for cert in inserted: assert not os.path.exists(db._get_block_path(cert) + cert) assert os.path.exists(db._get_block_archive(cert)) # Insert already persisted certs and some others and commit inserted_again = insert_test_certs(db, TEST_CERTS_1) inserted_new = insert_test_certs(db, TEST_CERTS_2) ins, dlt = db.commit() # only the other certs should be committed self.assertEqual(ins, len(inserted_new)) self.assertEqual(dlt, 0) # and the same ones should be deleted from transaction for cert in inserted_again: block_path = db._get_block_path(cert) assert not os.path.exists(os.path.join(block_path, cert)) # Delete and insert the same not yet persisted cert and commit valid_cert = ['valid_cert', 'validvalidvalidvalidvalid'] db.delete(valid_cert[0]) db.insert(*valid_cert) db.commit() # check that cert is persisted assert db.exists(valid_cert[0]) assert os.path.exists(db._get_block_archive(valid_cert[0])) assert not os.path.exists(db._get_block_path(valid_cert[0]) + valid_cert[0]) # Delete and insert the same already persisted cert and commit valid_cert = ['valid_cert', 'validvalidvalidvalidvalid_new'] db.delete(valid_cert[0]) db.insert(*valid_cert) db.commit() # check that the cert was replaced assert db.exists(valid_cert[0]) self.assertEqual(db.get(valid_cert[0]), valid_cert[1]) class TestCompositeCertDB(unittest.TestCase): """Unit test class of CompositeCertDB class""" TEST_STORAGE_1 = 'tests/test_storage1' TEST_STORAGE_2 = 'tests/test_storage2' TEST_STORAGE_3 = 'tests/test_storage3' def tearDown(self): # Clear test storage shutil.rmtree(self.TEST_STORAGE_1, ignore_errors=True) shutil.rmtree(self.TEST_STORAGE_2, ignore_errors=True) shutil.rmtree(self.TEST_STORAGE_3, ignore_errors=True) def setUp(self): CertFileDB.setup(self.TEST_STORAGE_1) CertFileDB.setup(self.TEST_STORAGE_2) CertFileDB.setup(self.TEST_STORAGE_3) def test_component_management(self): """ Test implementation of CompositeCertDB management methods and design """ valid_cert = 'validcertid' real_db = CertFileDBReadOnly(self.TEST_STORAGE_1) composite_db_read_only = CompositeCertDBReadOnly() composite_db = CompositeCertDB() # Mock method EXISTS real_db.exists = unittest.mock.MagicMock() real_db.exists.return_value = False # Check register/unregister method composite_db_read_only.register(real_db) assert not composite_db_read_only.exists(valid_cert) assert composite_db_read_only.is_registered(real_db) # component's EXISTS method should be executed real_db.exists.assert_called_once_with(valid_cert) composite_db_read_only.unregister(real_db) # component's EXISTS method should NOT be executed assert not composite_db_read_only.exists(valid_cert) self.assertEqual(real_db.exists.call_count, 1) assert not composite_db_read_only.is_registered(real_db) # Check registering the same object twice composite_db_read_only.register(real_db) composite_db_read_only.register(real_db) assert not composite_db_read_only.exists(valid_cert) self.assertEqual(real_db.exists.call_count, 2) assert composite_db_read_only.is_registered(real_db) # Check unregistering unknown object composite_db.unregister(real_db) assert not composite_db.is_registered(real_db) assert not composite_db.exists(valid_cert) # Check registering composite DB into another composite DB self.assertEqual(real_db.exists.call_count, 2) composite_db.register(real_db) composite_db.register(composite_db_read_only) assert not composite_db.exists(valid_cert) self.assertEqual(real_db.exists.call_count, 4) assert composite_db.is_registered(real_db) assert composite_db.is_registered(composite_db_read_only) assert composite_db_read_only.is_registered(real_db) def test_combine_read_only(self): """ Test implementation of CompositeCertDB management with mixed component types """ valid_cert = ('validcertid', 'adadadadadadadadada') real_db = CertFileDB(self.TEST_STORAGE_1) real_db_read_only = CertFileDBReadOnly(self.TEST_STORAGE_2) composite_db = CompositeCertDB() # Mock method EXISTS and INSERT real_db.insert = unittest.mock.MagicMock() real_db_read_only.insert = unittest.mock.MagicMock() real_db.exists = unittest.mock.MagicMock() real_db.exists.return_value = False real_db_read_only.exists = unittest.mock.MagicMock() real_db_read_only.exists.return_value = False # Register both DBs to composite DB and call EXISTS composite_db.register(real_db) composite_db.register(real_db_read_only) assert not composite_db.exists(valid_cert[0]) # both component's EXISTS method should be executed real_db.exists.assert_called_once_with(valid_cert[0]) real_db_read_only.exists.assert_called_once_with(valid_cert[0]) # Call INSERT and check that only CertFileDB was executed composite_db.insert(*valid_cert) real_db.insert.assert_called_once_with(*valid_cert) assert not real_db_read_only.insert.called def test_get(self): """ Test implementation of CompositeCertDB method GET """ real_db = CertFileDB(self.TEST_STORAGE_1) real_db2 = CertFileDB(self.TEST_STORAGE_2) real_db_read_only = CertFileDBReadOnly(self.TEST_STORAGE_1) composite_db = CompositeCertDB() composite_db.register(real_db) composite_db.register(real_db2) composite_db.register(real_db_read_only) fake_cert_id = 'fakecertid' # Insert and commit some certificates and retrieve them back committed = commit_test_certs(composite_db, TEST_CERTS_1) with open(TEST_CERTS_1) as r_file: for line in r_file: cert_id, cert = line.split(',') self.assertEqual(composite_db.get(cert_id), cert.strip()) # ReadOnly DB should also have it self.assertEqual(real_db_read_only.get(cert_id), cert.strip()) # Only insert other certificates and retrieve them back inserted = insert_test_certs(composite_db, TEST_CERTS_2) with open(TEST_CERTS_2) as r_file: for line in r_file: cert_id, cert = line.split(',') self.assertEqual(composite_db.get(cert_id), cert.strip()) # ReadOnly DB should not have it self.assertRaises(CertNotAvailableError, real_db_read_only.get, cert_id) # Rollback and try to retrieve them again composite_db.rollback() for cert_id in inserted: self.assertRaises(CertNotAvailableError, composite_db.get, cert_id) # Test DELETE method effect real_db.delete(committed[0]) # compositeDB should still have it in real_db2 assert composite_db.get(committed[0]) composite_db.delete(committed[0]) # compositeDB should still have it in real_db_read_only before commit assert composite_db.get(committed[0]) composite_db.commit() # compositeDB should no longer have the cert self.assertRaises(CertNotAvailableError, composite_db.get, committed[0]) # Test fake certificate that doesn't exist self.assertRaises(CertNotAvailableError, composite_db.get, fake_cert_id) def test_export(self): """ Test implementation of CompositeCertDB method EXPORT """ real_db = CertFileDB(self.TEST_STORAGE_1) real_db2 = CertFileDB(self.TEST_STORAGE_2) real_db_read_only = CertFileDBReadOnly(self.TEST_STORAGE_1) composite_db = CompositeCertDB() composite_db.register(real_db) composite_db.register(real_db2) composite_db.register(real_db_read_only) fake_cert_id = 'fakecertid' target_dir = self.TEST_STORAGE_1 + '/export' os.mkdir(target_dir) # Insert and commit some certificates and export them committed = commit_test_certs(composite_db, TEST_CERTS_1) with open(TEST_CERTS_1) as r_file: for line in r_file: cert_id, cert = line.split(',') expected = os.path.join(target_dir, make_PEM_filename(cert_id)) self.assertEqual(composite_db.export(cert_id, target_dir), expected) with open(expected) as target: self.assertEqual(target.read(), cert.strip()) # Check export without unnecessary copying - should copy anyway because persisted self.assertEqual(composite_db.export(cert_id, target_dir, copy_if_exists=False), expected) # ReadOnly DB should also have it self.assertEqual(real_db_read_only.export(cert_id, target_dir), expected) # Only insert other certificates and retrieve them back insert_test_certs(composite_db, TEST_CERTS_2) with open(TEST_CERTS_2) as r_file: for line in r_file: cert_id, cert = line.split(',') expected = os.path.join(target_dir, make_PEM_filename(cert_id)) self.assertEqual(composite_db.export(cert_id, target_dir), expected) with open(expected) as target: self.assertEqual(target.read(), cert.strip()) # Check export without unnecessary copying file = composite_db.export(cert_id, target_dir, copy_if_exists=False) self.assertNotEqual(file, expected) with open(file) as target: self.assertEqual(target.read(), cert.strip()) # ReadOnly DB should not have it self.assertRaises(CertNotAvailableError, real_db_read_only.export, cert_id, target_dir) # Rollback and try to retrieve them again composite_db.rollback() r_file.seek(0) for line in r_file: cert_id = line.split(',')[0] self.assertRaises(CertNotAvailableError, composite_db.export, cert_id, target_dir) # Test DELETE method effect real_db.delete(committed[0]) # compositeDB should still have it in real_db2 assert composite_db.export(committed[0], target_dir) composite_db.delete(committed[0]) # compositeDB should still have it in real_db_read_only before commit assert composite_db.export(committed[0], target_dir) composite_db.commit() # compositeDB should no longer have the cert self.assertRaises(CertNotAvailableError, composite_db.export, committed[0], target_dir) # Test fake certificate that doesn't exist self.assertRaises(CertNotAvailableError, composite_db.export, fake_cert_id, target_dir) def test_exists(self): """ Test implementation of CompositeCertDB method EXISTS """ real_db = CertFileDB(self.TEST_STORAGE_1) real_db2 = CertFileDB(self.TEST_STORAGE_2) real_db_read_only = CertFileDBReadOnly(self.TEST_STORAGE_1) composite_db = CompositeCertDB() composite_db.register(real_db) composite_db.register(real_db2) composite_db.register(real_db_read_only) fake_cert = 'fakecertid' # Insert and commit some certificates and check if exists committed = commit_test_certs(composite_db, TEST_CERTS_1) for cert in committed: assert composite_db.exists(cert) # ReadOnly DB should also have it assert real_db_read_only.exists(cert) assert composite_db.exists_all(committed) # Only insert other certificates and check if exists inserted = insert_test_certs(composite_db, TEST_CERTS_2) for cert in inserted: assert composite_db.exists(cert) # ReadOnly DB should NOT have it assert not real_db_read_only.exists(cert) assert composite_db.exists_all(inserted) # Test DELETE method effect real_db.delete(committed[0]) # compositeDB should still have it in real_db2 assert composite_db.exists(committed[0]) composite_db.delete(committed[0]) # compositeDB should still have it in real_db_read_only before commit assert composite_db.exists(committed[0]) composite_db.commit() # compositeDB should no longer have the cert but cache in real_db_read_only have assert not real_db.exists(committed[0]) assert not real_db2.exists(committed[0]) # get method upon failure should clear the cache if seems invalidated self.assertRaises(CertNotAvailableError, real_db_read_only.get, committed[0]) assert not real_db_read_only.exists(committed[0]) # Have 1 cert in one DB and other cert in other DB and check EXISTS method real_db.delete(committed[2]) assert not real_db.exists(committed[2]) real_db2.delete(committed[3]) assert not real_db2.exists(committed[3]) # composite_db should return True assert composite_db.exists(committed[2]) assert composite_db.exists(committed[3]) assert composite_db.exists_all([committed[2], committed[3]]) # Test fake certificate that doesn't exist committed.append(fake_cert) assert not composite_db.exists(fake_cert) assert not composite_db.exists_all(committed) def test_insert(self): """ Test implementation of CompositeCertDB method INSERT """ real_db = CertFileDB(self.TEST_STORAGE_1) real_db2 = CertFileDB(self.TEST_STORAGE_2) composite_db = CompositeCertDB() composite_db.register(real_db) composite_db.register(real_db2) # Insert some invalid certificates self.assertRaises(CertInvalidError, composite_db.insert, None, None) self.assertRaises(CertInvalidError, composite_db.insert, '', '') self.assertRaises(CertInvalidError, composite_db.insert, '', 'valid') self.assertRaises(CertInvalidError, composite_db.insert, 'valid', None) # Insert some valid certificates inserted = insert_test_certs(composite_db, TEST_CERTS_1) blocks = {**real_db._to_insert} blocks2 = {**real_db2._to_insert} # transaction should contain certificates from open transcation and certs should exist self.assertTrue(real_db._to_insert) self.assertTrue(real_db2._to_insert) for cert in inserted: block_path = real_db._get_block_path(cert) block_path2 = real_db2._get_block_path(cert) assert os.path.exists(os.path.join(block_path, cert)) assert os.path.exists(os.path.join(block_path2, cert)) # Insert different certificates under the same IDs certs = {} with open(TEST_CERTS_1) as r_file: for line in r_file: els = [e.strip() for e in line.split(',')] composite_db.insert(els[0], els[1] + '_open') certs[els[0]] = els[1] # IDs should be same and certificates should not be changed self.assertTrue(blocks == real_db._to_insert) self.assertTrue(blocks2 == real_db2._to_insert) for k, v in certs.items(): self.assertTrue(real_db.get(k) == v) self.assertTrue(real_db2.get(k) == v) # Commit transaction and commit different certificates under the same IDs composite_db.commit() self.assertFalse(real_db._to_insert) self.assertFalse(real_db2._to_insert) certs = {} with open(TEST_CERTS_1) as r_file: for line in r_file: els = [el.strip() for el in line.split(',')] composite_db.insert(els[0], els[1] + '_commit') certs[els[0]] = els[1] # IDs should be same and persisted certificates should not be changed self.assertTrue(blocks == real_db._to_insert) self.assertTrue(blocks2 == real_db2._to_insert) composite_db.commit() self.assertFalse(real_db._to_insert) self.assertFalse(real_db2._to_insert) for k, v in certs.items(): self.assertTrue(real_db.get(k) == v) self.assertTrue(real_db2.get(k) == v) def test_delete(self): """ Test implementation of CompositeCertDB method DELETE """ real_db = CertFileDB(self.TEST_STORAGE_1) real_db2 = CertFileDB(self.TEST_STORAGE_2) composite_db = CompositeCertDB() composite_db.register(real_db) composite_db.register(real_db2) # Delete some invalid certificates self.assertRaises(CertInvalidError, composite_db.delete, None) self.assertRaises(CertInvalidError, composite_db.delete, '') # Insert and delete the same certs before commit inserted = insert_test_certs(composite_db, TEST_CERTS_1) deleted = delete_test_certs(composite_db, TEST_CERTS_1) # transaction should be clear and files should not exist self.assertFalse(real_db._to_delete) self.assertFalse(real_db2._to_delete) self.assertFalse(real_db._to_insert) self.assertFalse(real_db2._to_insert) for cert in inserted: block_path = real_db._get_block_path(cert) block_path2 = real_db2._get_block_path(cert) assert not os.path.exists(os.path.join(block_path, cert)) assert not os.path.exists(os.path.join(block_path2, cert)) # Delete and insert the same certs before commit deleted = delete_test_certs(composite_db, TEST_CERTS_1) inserted = insert_test_certs(composite_db, TEST_CERTS_1) # transaction should contain deleted and inserted certificates self.assertTrue(real_db._to_delete) self.assertTrue(real_db2._to_delete) self.assertTrue(real_db._to_insert) self.assertTrue(real_db2._to_insert) for certs in real_db._to_delete.values(): assert certs.issubset(set(deleted)) for certs in real_db2._to_delete.values(): assert certs.issubset(set(deleted)) for certs in real_db._to_insert.values(): assert certs.issubset(set(inserted)) for certs in real_db2._to_insert.values(): assert certs.issubset(set(inserted)) # and files should exist for cert in inserted: block_path = real_db._get_block_path(cert) block_path2 = real_db2._get_block_path(cert) assert os.path.exists(os.path.join(block_path, cert)) assert os.path.exists(os.path.join(block_path2, cert)) # now commit and check that files were persisted ins, dlt = composite_db.commit() # the certs should be only inserted self.assertEqual(ins, len(inserted)) self.assertEqual(dlt, 0) self.assertFalse(real_db._to_delete) self.assertFalse(real_db2._to_delete) self.assertFalse(real_db._to_insert) self.assertFalse(real_db2._to_insert) # Delete inserted certs, commit and check that they were deleted assert composite_db.exists_all(inserted) del_cert = inserted.pop() composite_db.delete(del_cert) assert not real_db.exists(del_cert) assert not real_db2.exists(del_cert) composite_db.commit() assert not real_db.exists(del_cert) assert not real_db2.exists(del_cert) for cert in inserted: composite_db.delete(cert) ins, dlt = composite_db.commit() self.assertEqual(ins, 0) self.assertEqual(dlt, len(inserted)) # storage should be empty self.assertFalse(os.listdir(real_db.storage).remove(real_db.CONF_FILENAME)) self.assertFalse(os.listdir(real_db2.storage).remove(real_db2.CONF_FILENAME)) # Delete the same cert multiple times should not have effect self.assertFalse(real_db._to_delete) self.assertFalse(real_db2._to_delete) composite_db.delete('validcert') blocks_to_delete = {**real_db._to_delete} blocks_to_delete2 = {**real_db2._to_delete} self.assertTrue(real_db._to_delete) self.assertTrue(real_db2._to_delete) composite_db.delete('validcert') self.assertTrue(real_db._to_delete) self.assertTrue(real_db2._to_delete) self.assertEqual(blocks_to_delete, real_db._to_delete) self.assertEqual(blocks_to_delete2, real_db2._to_delete) def test_commit(self): """ Test implementation of CompositeCertDB method COMMIT """ real_db = CertFileDB(self.TEST_STORAGE_1) real_db2 = CertFileDB(self.TEST_STORAGE_2) composite_db = CompositeCertDB() composite_db.register(real_db) composite_db.register(real_db2) # Test commit without inserts ins, dlt = composite_db.commit() self.assertEqual(ins, 0) self.assertEqual(dlt, 0) # Insert some certificates and check correct number of committed certs inserted = insert_test_certs(composite_db, TEST_CERTS_1) ins, dlt = composite_db.commit() self.assertEqual(ins, len(inserted)) self.assertEqual(dlt, 0) # transaction should be empty and certs should be compressed in zip files self.assertFalse(real_db._to_insert) self.assertFalse(real_db2._to_insert) for cert in inserted: block_path = real_db._get_block_path(cert) block_path2 = real_db2._get_block_path(cert) archive_path = real_db._get_block_archive(cert) archive_path2 = real_db2._get_block_archive(cert) assert not os.path.exists(os.path.join(block_path, cert)) assert not os.path.exists(os.path.join(block_path2, cert)) assert os.path.exists(archive_path) assert os.path.exists(archive_path2) # Insert already persisted certs and some others and commit inserted_again = insert_test_certs(composite_db, TEST_CERTS_1) inserted_new = insert_test_certs(composite_db, TEST_CERTS_2) ins, dlt = composite_db.commit() # only the other certs should be committed self.assertEqual(ins, len(inserted_new)) self.assertEqual(dlt, 0) # and the same ones should NOT for cert in inserted_again: block_path = real_db._get_block_path(cert) block_path2 = real_db2._get_block_path(cert) assert not os.path.exists(os.path.join(block_path, cert)) assert not os.path.exists(os.path.join(block_path2, cert)) # Delete and insert the same not yet persisted cert and commit valid_cert = ['valid_cert', 'validvalidvalidvalidvalid'] composite_db.delete(valid_cert[0]) composite_db.insert(*valid_cert) composite_db.commit() # check that cert is persisted block_path = real_db._get_block_path(valid_cert[0]) block_path2 = real_db2._get_block_path(valid_cert[0]) archive_path = real_db._get_block_archive(valid_cert[0]) archive_path2 = real_db2._get_block_archive(valid_cert[0]) assert composite_db.exists(valid_cert[0]) assert not os.path.exists(os.path.join(block_path, valid_cert[0])) assert not os.path.exists(os.path.join(block_path2, valid_cert[0])) assert os.path.exists(archive_path) assert os.path.exists(archive_path2) # Delete and insert the same already persisted cert and commit valid_cert = ['valid_cert', 'validvalidvalidvalidvalid_new'] composite_db.delete(valid_cert[0]) composite_db.insert(*valid_cert) composite_db.commit() # check that the cert was replaced assert composite_db.exists(valid_cert[0]) self.assertEqual(real_db.get(valid_cert[0]), valid_cert[1]) self.assertEqual(real_db2.get(valid_cert[0]), valid_cert[1]) def test_rollback(self): """Test implementation of CompositeCertDB method ROLLBACK""" real_db = CertFileDB(self.TEST_STORAGE_1) real_db2 = CertFileDB(self.TEST_STORAGE_2) composite_db = CompositeCertDB() composite_db.register(real_db) composite_db.register(real_db2) # Test rollback without inserts composite_db.rollback() # Insert some certificates, rollback and check that blocks are deleted inserted = insert_test_certs(composite_db, TEST_CERTS_1) composite_db.rollback() self.assertFalse(real_db._to_insert) self.assertFalse(real_db2._to_insert) for cert in inserted: block_path = real_db._get_block_path(cert) block_path2 = real_db2._get_block_path(cert) assert not os.path.exists(os.path.join(block_path, cert)) assert not os.path.exists(os.path.join(block_path2, cert)) # Commit some certs, insert other certs and rollback committed = commit_test_certs(composite_db, TEST_CERTS_1) inserted = insert_test_certs(composite_db, TEST_CERTS_2) composite_db.rollback() # Transaction should be empty self.assertFalse(real_db._to_insert) self.assertFalse(real_db2._to_insert) # Commited certs should be compressed in zip files for cert in committed: block_path = real_db._get_block_path(cert) block_path2 = real_db2._get_block_path(cert) archive_path = real_db._get_block_archive(cert) archive_path2 = real_db2._get_block_archive(cert) assert not os.path.exists(os.path.join(block_path, cert)) assert not os.path.exists(os.path.join(block_path2, cert)) assert os.path.exists(archive_path) assert os.path.exists(archive_path2) # Rollbacked certs files should not exists for cert in inserted: block_path = real_db._get_block_path(cert) block_path2 = real_db2._get_block_path(cert) archive_path = real_db._get_block_archive(cert) archive_path2 = real_db2._get_block_archive(cert) assert not os.path.exists(os.path.join(block_path, cert)) assert not os.path.exists(os.path.join(block_path2, cert)) assert not os.path.exists(archive_path) assert not os.path.exists(archive_path2) # Check rollback of delete method deleted = delete_test_certs(composite_db, TEST_CERTS_1) self.assertTrue(real_db._to_delete) self.assertTrue(real_db2._to_delete) for cert in deleted: assert not composite_db.exists(cert) assert not real_db.exists(cert) assert not real_db2.exists(cert) composite_db.rollback() self.assertFalse(real_db._to_delete) self.assertFalse(real_db2._to_delete) # All deleted certs should still exist assert composite_db.exists_all(deleted) assert real_db.exists_all(deleted) assert real_db2.exists_all(deleted) if __name__ == '__main__': unittest.main()
59,170
17,934
import uuid import numpy as np class BColors: HEADER = '\033[95m' OKBLUE = '\033[94m' OKGREEN = '\033[92m' WARNING = '\033[93m' FAIL = '\033[91m' ENDC = '\033[0m' BOLD = '\033[1m' UNDERLINE = '\033[4m' class Indv: def __init__(self, indv_size): self.indv_size = indv_size self.weights = np.random.uniform(-4.0, 4.0, indv_size) self.uuid = uuid.uuid4() def __str__(self): return 'Indv weights: {self.weights} uuid: {self.uuid}'.format(self=self) def __repr__(self): return self.weights.__str__() def __len__(self): return self.indv_size class Population: def __init__(self, pop_size, indv_size): self.population = [] self.pop_size = pop_size self.indv_size = indv_size self.uuid = uuid.uuid4() for indv_idx in range(self.pop_size): self.population.append(Indv(indv_size)) def __str__(self): return 'Population of {self.pop_size} indviduals with uuid: {self.uuid}\n{self.population}'.format(self=self) def __repr__(self): return self.population.__str__() def __len__(self): return len(self.population) def fitness_offsprings(mutated_offsprings, func_weights): fitness_array = [] for indv in mutated_offsprings: fitness_array.append(fitness_max_function(indv.weights, func_weights)) print("Actual offsprings fitness values:") print(fitness_array) return fitness_array def fitness_population(population, func_weights): fitness_array = [] for indv in population.population: fitness_array.append(fitness_max_function(indv.weights, func_weights)) # Calculate the fitness value for each indv of the population print("Actual fitness values:") print(fitness_array) return fitness_array def fitness_max_function(indv_weights, func_weights): return np.dot(indv_weights, func_weights) def select_parents(population, fitness_values): num_parents = int(len(population) / 2) # We want the best 50% of the parents parents = [] for parent_num in range(num_parents): # For each parent max_fitness_idx = np.where(fitness_values == np.max(fitness_values)) # Get the index of the max fitness value max_fitness_idx = max_fitness_idx[0][0] parents.append(population.population[max_fitness_idx]) fitness_values[max_fitness_idx] = -99999999999 # Change his fitness to a minimum print("Selected parents:") print(parents) return parents
2,325
960
# Generated by Django 2.2.1 on 2020-01-14 19:19 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('main', '0022_auto_20200111_2058'), ] operations = [ migrations.RemoveField( model_name='users', name='expires_at', ), migrations.AddField( model_name='users', name='ytd_distance', field=models.IntegerField(default=0, verbose_name='distance'), ), ]
516
175
import sc2 from sc2.constants import * #our own classes from unit_counters import UnitCounter from warpprism import WarpPrism as wpControl from immortal import Immortal as imControl from stalker import Stalker as skControl from zealot import Zealot as zlControl from sentry import Sentry as snControl from adept import Adept as adControl from colossus import Colossus as coControl from voidray import VoidRay as vrControl from tempest import Tempest as tpControl from phoenix import Phoenix as pxControl from probe import Probe as pbControl from shade import Shade as sdControl from hightemplar import HighTemplar as htControl from observer import Observer as obControl from disruptor import Disruptor as dsControl from disruptor_phased import DisruptorPhased as dpControl from carrier import Carrier as crControl from mothership import Mothership as msControl from archon import Archon as arControl from cannon import Cannon as cnControl class UnitList(): def __init__(self): self.unit_objects = {} self.unitCounter = UnitCounter() def make_decisions(self, game): self.game = game self.update_units() for unit in self.game.units(): obj = self.unit_objects.get(unit.tag) if obj: obj.make_decision(self.game, unit) def update_units(self): for unit in self.game.units(): obj = self.unit_objects.get(unit.tag) if obj: obj.unit = unit def getObjectByTag(self, unit_tag): if self.unit_objects.get(unit_tag): return self.unit_objects.get(unit_tag) return None def remove_object(self, unit_tag): if self.unit_objects.get(unit_tag): unit_obj = self.unit_objects.get(unit_tag) #check to see if it's a probe, if so remove it from gathering. if unit_obj.unit.name == 'Probe': unit_obj.removeGatherer() if unit_obj.unit.name == 'DisruptorPhased': unit_obj.clearMines() unit_obj.clearLurkers() #check to see if it's our probe scout, if so create another. # if unit_obj.unit.name == 'Probe' and unit_obj.scout: # #was a scout, create a new one. # self.assignScout() del self.unit_objects[unit_tag] def load_object(self, unit): #print ('Unit Created:', unit.name, unit.tag) #check to see if an object already exists for this tag if self.getObjectByTag(unit.tag): return if unit.name == 'WarpPrism': obj = wpControl(unit) self.unit_objects.update({unit.tag:obj}) elif unit.name == 'Immortal': obj = imControl(unit) self.unit_objects.update({unit.tag:obj}) elif unit.name == 'Stalker': obj = skControl(unit) self.unit_objects.update({unit.tag:obj}) elif unit.name == 'Zealot': obj = zlControl(unit) self.unit_objects.update({unit.tag:obj}) elif unit.name == 'Sentry': obj = snControl(unit) self.unit_objects.update({unit.tag:obj}) elif unit.name == 'Adept': obj = adControl(unit) self.unit_objects.update({unit.tag:obj}) elif unit.name == 'Colossus': obj = coControl(unit) self.unit_objects.update({unit.tag:obj}) elif unit.name == 'VoidRay': obj = vrControl(unit) self.unit_objects.update({unit.tag:obj}) elif unit.name == 'Phoenix': obj = pxControl(unit) self.unit_objects.update({unit.tag:obj}) elif unit.name == 'Probe': obj = pbControl(unit) self.unit_objects.update({unit.tag:obj}) elif unit.name == 'Tempest': obj = tpControl(unit) self.unit_objects.update({unit.tag:obj}) elif unit.name == 'AdeptPhaseShift': obj = sdControl(unit) self.unit_objects.update({unit.tag:obj}) elif unit.name == 'HighTemplar': obj = htControl(unit) self.unit_objects.update({unit.tag:obj}) elif unit.name == 'Observer': obj = obControl(unit) self.unit_objects.update({unit.tag:obj}) elif unit.name == 'Disruptor': obj = dsControl(unit) self.unit_objects.update({unit.tag:obj}) elif unit.name == 'DisruptorPhased': obj = dpControl(unit) self.unit_objects.update({unit.tag:obj}) elif unit.name == 'Carrier': obj = crControl(unit) self.unit_objects.update({unit.tag:obj}) elif unit.name == 'Mothership': obj = msControl(unit) self.unit_objects.update({unit.tag:obj}) elif unit.name == 'Archon': obj = arControl(unit) self.unit_objects.update({unit.tag:obj}) elif unit.name == 'PhotonCannon': obj = cnControl(unit) self.unit_objects.update({unit.tag:obj}) # else: # print ('Unit Created:', unit.name, unit.tag) def unitPosition(self, ownerUnit): if self.unit_objects.get(ownerUnit.tag): unit_obj = self.unit_objects.get(ownerUnit.tag) return unit_obj.saved_position return None def phaseTargets(self): phaseList = {k : v for k,v in self.unit_objects.items() if v.unit.name == 'DisruptorPhased' } targets = [] for key, phase in phaseList.items(): targets.append(phase.currentTarget) return targets def adeptChaseTarget(self, ownerUnit): #get the object by the unit_tag. if self.unit_objects.get(ownerUnit.tag): unit_obj = self.unit_objects.get(ownerUnit.tag) return unit_obj.chasePosition return None def unitDamaged(self, ownerUnit): if self.unit_objects.get(ownerUnit.tag): unit_obj = self.unit_objects.get(ownerUnit.tag) return unit_obj.wasDamaged return False def unitHomeTarget(self, ownerUnit): #get the object by the unit_tag. if self.unit_objects.get(ownerUnit.tag): unit_obj = self.unit_objects.get(ownerUnit.tag) return unit_obj.homeTarget return None def unitTarget(self, ownerUnit): #get the object by the unit_tag. if self.unit_objects.get(ownerUnit.tag): unit_obj = self.unit_objects.get(ownerUnit.tag) return unit_obj.last_target return None def disruptorBallCancel(self, owner_tag) -> bool: ballList = {k : v for k,v in self.unit_objects.items() if v.unit.type_id == DISRUPTORPHASED and v.requestCancel and v.ownerTag == owner_tag} if len(ballList) > 0: return True return False def adeptOrder(self, ownerUnit): #get the object by the unit_tag. if self.unit_objects.get(ownerUnit.tag): unit_obj = self.unit_objects.get(ownerUnit.tag) return unit_obj.shadeOrder return None def assignScout(self): #if it's late in the game and we aren't attacking, then don't make a replacement. if self.game.defend_only and self.game.time > 360: return #find a probe to assign as a scout. probeList = {k : v for k,v in self.unit_objects.items() if v.unit.name == 'Probe' and not v.collect_only and not v.scout } for key, probe in probeList.items(): probe.becomeScout() probe.removeGatherer() return def unitCount(self, unit_name): unitList = {k : v for k,v in self.unit_objects.items() if v.unit.name == unit_name } return len(unitList) def shieldSafe(self, inc_unit): #check for other sentries near by with shields that are active. shieldingList = {k : v for k,v in self.unit_objects.items() if v.unit.name == 'Sentry' and v.shieldActive and v.unit.distance_to(inc_unit.unit) < 2.5 } if len(shieldingList) > 0: return False return True def freeNexusBuilders(self): probeList = {k : v for k,v in self.unit_objects.items() if v.unit.name == 'Probe' and v.nexus_builder } if len(probeList) > 0: for key, probe in probeList.items(): probe.nexus_builder = False probe.nexus_position = None @property def nexusBuilderAssigned(self) -> bool: probeList = {k : v for k,v in self.unit_objects.items() if v.unit.name == 'Probe' and v.nexus_builder } if len(probeList) > 0: return True return False @property def hallucinationScore(self) -> int: hallList = {k : v for k,v in self.unit_objects.items() if v.isHallucination } hall_score = 0 for key, unit_obj in hallList.items(): hall_score += self.unitCounter.getUnitPower(unit_obj.unit.name) return hall_score def phoenixScouting(self): phoenixList = {k : v for k,v in self.unit_objects.items() if v.unit.name == 'Phoenix' and v.isHallucination } if len(phoenixList) > 0: return True return False def getGravitonTarget(self, inc_unit): phoenixList = {k : v for k,v in self.unit_objects.items() if v.unit.name == 'Phoenix' and v.isBeaming } #print (len(phoenixList), inc_unit.unit.name, len(self.unit_objects)) target = None #get the closest. mindist = 1000 for key, phoenix in phoenixList.items(): #get the distance to th if inc_unit.unit.position.to2.distance_to(phoenix.position.to2) < mindist: target = phoenix.beam_unit mindist = inc_unit.unit.position.to2.distance_to(phoenix.unit.position.to2) if mindist < 10: return target return None def getWorkers(self): return {k : v for k,v in self.unit_objects.items() if v.unit.name == 'Probe' }.items() def friendlyEngagedFighters(self, closestEnemy, friendRange=10): #find all the units near the closest Enemy that aren't retreating. baselist = {k : v for k,v in self.unit_objects.items() if v.unit.position.to2.distance_to(closestEnemy.position.to2) < friendRange } #find out how much DPS we have going on. friendDPStoGround = 0 friendDPStoAir = 0 friendAirHealth = 0 friendGroundHealth = 0 friendTotalDPS = 0 for k, friendObj in baselist.items(): if friendObj.unit.is_flying: friendAirHealth += friendObj.unit.health + friendObj.unit.shield else: friendGroundHealth += friendObj.unit.health + friendObj.unit.shield friendDPStoGround += friendObj.unit.ground_dps friendDPStoAir += friendObj.unit.air_dps if friendObj.unit.ground_dps > friendObj.unit.air_dps: friendTotalDPS += friendObj.unit.ground_dps else: friendTotalDPS += friendObj.unit.air_dps return [friendDPStoGround, friendDPStoAir, friendAirHealth, friendGroundHealth, friendTotalDPS] def friendlyFighters(self, inc_unit, friendRange=10): #find all the units near the passed units position that aren't retreating. #baselist = {k : v for k,v in self.unit_objects.items() if not v.isRetreating and v.unit.position.to2.distance_to(inc_unit.position.to2) < friendRange } baselist = {k : v for k,v in self.unit_objects.items() if v.unit.position.to2.distance_to(inc_unit.position.to2) < friendRange } #find out how much DPS we have going on. friendDPStoGround = 0 friendDPStoAir = 0 friendAirHealth = 0 friendGroundHealth = 0 friendTotalDPS = 0 for k, friendObj in baselist.items(): if friendObj.unit.is_flying: friendAirHealth += friendObj.unit.health + friendObj.unit.shield else: friendGroundHealth += friendObj.unit.health + friendObj.unit.shield friendDPStoGround += friendObj.unit.ground_dps friendDPStoAir += friendObj.unit.air_dps if friendObj.unit.ground_dps > friendObj.unit.air_dps: friendTotalDPS += friendObj.unit.ground_dps else: friendTotalDPS += friendObj.unit.air_dps return [friendDPStoGround, friendDPStoAir, friendAirHealth, friendGroundHealth, friendTotalDPS] #properties. @property def amount(self) -> int: return len(self.unit_objects)
11,197
4,584
from .Versioned import Versioned class Addon(Versioned): """ Implements the addon support to the versioned. """ def __init__(self, *args, **kwargs): """ Create an addon object. """ super(Addon, self).__init__(*args, **kwargs) # setting default options self.setOption('enabled', True) def bverEnabledName(self, software): """ Return the enabled environment variable name for the addon versioned. """ return 'BVER_{}_{}_ENABLED'.format( software.name().upper(), self.name().upper() )
619
171
# Generated by Django 3.2.8 on 2021-11-15 20:32 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('usuarios', '0005_rename_ponto_id_funcionario_ponto'), ('agendamentos', '0001_initial'), ] operations = [ migrations.RenameModel( old_name='Ponto', new_name='Agendamentos', ), ]
393
144
from datetime import date from django.contrib import admin, messages from django.core.management import call_command from django.utils.html import format_html from django.http import HttpResponseRedirect from django.urls import path # Register your models here. from .models import ( FilingList, Filing, Holding, Cik, Cusip, CikObservation, CusipObservation, ) from .tasks import process_filing, process_filing_list class CikAdmin(admin.ModelAdmin): class Meta: model = Cik search_fields = ("cik_number",) list_display = ("cik_number", "filer_name") class CusipAdmin(admin.ModelAdmin): class Meta: model = Cusip search_fields = ("cusip_number", "company_name", "symbol") list_display = ("cusip_number", "company_name", "symbol") class CikObservationAdmin(admin.ModelAdmin): class Meta: model = CikObservation search_fields = ("cik__cik_number", "name") readonly_fields = ("cik", "filing_list", "name") list_display = ("id", "cik", "name", "filing_list") class CusipObservationAdmin(admin.ModelAdmin): class Meta: model = CusipObservation search_fields = ("cusip__cusip_number", "name") # raw_id_fields = ["cusip", "name"] readonly_fields = ("cusip", "name", "filing") list_display = ("id", "cusip", "name", "filing") class FilingListAdmin(admin.ModelAdmin): class Meta: model = FilingList list_display = ("id", "datafile", "quarter", "filing_count") readonly_fields = ("quarter",) change_form_template = "admin/filing/filinglist/change_form.html" def save_model(self, request, obj, form, change): if not obj.quarter: obj.quarter = date(int(obj.filing_year), ((int(obj.filing_quarter) - 1) * 3) + 1, 1) super(FilingListAdmin, self).save_model(request, obj, form, change) def process_filings(self, request, queryset): for filing_list in queryset: filing_list.process_filing_list() messages.add_message(request, messages.INFO, 'Processing filing') actions = [process_filings] process_filings.short_description = "Process Filings" def get_urls(self): urls = super().get_urls() filing_list_urls = [path("generate/", self.generate_filing_lists)] return urls + filing_list_urls def generate_filing_lists(self, request): call_command("generate_filing_lists") self.message_user( request, "Filing lists have been generated (1993 - 2020)." ) return HttpResponseRedirect("../") def response_change(self, request, obj): if "_process_filing_list" in request.POST: process_filing_list.apply_async(args=(obj.id,)) self.message_user(request, "Filing list is being processed.") return HttpResponseRedirect(".") return super().response_change(request, obj) class FilingAdmin(admin.ModelAdmin): class Meta: model = Filing # https://stackoverflow.com/questions/46756086/django-admin-edit-model-select-prefetch-related list_select_related = ("filing_list", "cik") readonly_fields = ("filing_list",) list_display = ( "id", "cik", "form_type", "date_filed", "filing_list_link", "datafile", "holding_count", ) search_fields = ("form_type",) # def holding_count(self, obj=None): # return obj.holding_count() def holding_count(self, obj=None): return format_html( f"<a href='/admin/filing/holding/?filing__id={obj.id}'>{obj.holding_count()}</a>" # noqa ) holding_count.admin_order_field = "holdingcount" def filing_list_link(self, obj=None): return format_html( f'<a target="_blank" href="/admin/filing/filinglist/{obj.filing_list.id}/change/">{str(obj.filing_list)}</a>' # noqa ) change_form_template = "admin/filing/filing/change_form.html" def response_change(self, request, obj): if "_process_filing" in request.POST: process_filing.apply_async(args=(obj.id,)) self.message_user(request, "Filing is being processed.") return HttpResponseRedirect(".") return super().response_change(request, obj) class HoldingAdmin(admin.ModelAdmin): class Meta: model = Holding # raw_id_fields = ["filing"] list_select_related = ( "filing", "cusip", "filing__cik", "filing__filing_list", ) readonly_fields = ("filing",) list_display = ( "id", "cik", "filing_link", "filing", "date_filed", "nameOfIssuer", "titleOfClass", "cusip", "value", "sshPrnamt", "sshPrnamtType", "investmentDiscretion", "putCall", "otherManager", "sole", "shared", "nonee", ) def cik(self, obj=None): return format_html( f'<a target="_blank" href="/cik/{obj.filing.cik}">{obj.filing.cik}</a>' # noqa ) def date_filed(self, obj=None): return obj.filing.date_filed # https://stackoverflow.com/questions/2168475/django-admin-how-to-sort-by-one-of-the-custom-list-display-fields-that-has-no-d date_filed.admin_order_field = "filing__date_filed" def filing_link(self, obj=None): return format_html( f'<a target="_blank" href="/admin/filing/filing/{obj.filing.id}/change/">Link</a>' # noqa ) search_fields = ( "nameOfIssuer", "cusip__cusip_number", "cusip__company_name", ) admin.site.register(Holding, HoldingAdmin) admin.site.register(FilingList, FilingListAdmin) admin.site.register(Filing, FilingAdmin) admin.site.register(Cik, CikAdmin) admin.site.register(CikObservation, CikObservationAdmin) admin.site.register(Cusip, CusipAdmin) admin.site.register(CusipObservation, CusipObservationAdmin)
5,988
2,006
"""Advent of Code 2019 Day 6 - Universal Orbit Map.""" with open('inputs/day_06.txt', 'r') as f: orbits = f.read().split() objects_dict = {} for orbit in orbits: orbited, orbiter = orbit.split(')') objects_dict[orbiter] = orbited num_orbits = 0 for orbiter in objects_dict: next_orbit = objects_dict.get(orbiter, None) while next_orbit: next_orbit = objects_dict.get(next_orbit, None) num_orbits += 1 # Answer One print("Number of direct and indirect orbits:", num_orbits) you_path = {} on_you_path = set() transfers = 0 next_orbit = objects_dict.get("YOU", None) while next_orbit: transfers += 1 you_path[next_orbit] = transfers on_you_path.add(next_orbit) next_orbit = objects_dict.get(next_orbit, None) transfers = 0 next_orbit = objects_dict.get("SAN", None) while next_orbit and next_orbit not in on_you_path: transfers += 1 next_orbit = objects_dict.get(next_orbit, None) # Answer Two print("Transfers between you and Santa:", transfers + you_path[next_orbit] - 1)
1,039
394
from hookery import Handler, Hook def test_handler_from_func(): def f(a, b): return a - b h = Handler(f, Hook('hook1')) assert callable(h) assert h.__name__ == 'f' assert h.name == 'f' assert h.hook_name == 'hook1' assert not h.is_generator assert h(c=8, b=10, a=25) == 15 assert h(b=5, a=10) == 5 def test_handler_of_handler_uses_original_func_as_original_func(): def f(a, b): return a + b handler1 = Handler(f, Hook('hook')) assert handler1._original_func is f handler2 = Handler(handler1, Hook('hook')) assert handler2._original_func is f assert handler1._original_func is f
663
248
#!/usr/bin/env python # -*- coding: utf-8 -*- # @Time : 2019/2/25 14:14 # @Author : ZJJ # @Email : 597105373@qq.com from .views import UserViewset, ChangeUserPasswdView, GroupsViewset, UserGroupViewset, GroupMemberViewset, \ PermissionViewset, GroupPermViewset, PersonalInfoViewset from rest_framework.routers import DefaultRouter permcontrol_router = DefaultRouter() permcontrol_router.register("personinfo", PersonalInfoViewset, base_name="personinfo") permcontrol_router.register("users", UserViewset, base_name="users") permcontrol_router.register("chuserpasswd", ChangeUserPasswdView, base_name="chuserpasswd") permcontrol_router.register("groups", GroupsViewset, base_name="groups") permcontrol_router.register("usergroup", UserGroupViewset, base_name="usergroup") permcontrol_router.register("groupmember", GroupMemberViewset, base_name="groupmember") permcontrol_router.register("permission", PermissionViewset, base_name="permission") permcontrol_router.register("groupperm", GroupPermViewset, base_name="groupperm")
1,057
346
# TODO: Add an appropriate license to your skill before publishing. See # the LICENSE file for more information. # Below is the list of outside modules you'll be using in your skill. # They might be built-in to Python, from mycroft-core or from external # libraries. If you use an external library, be sure to include it # in the requirements.txt file so the library is installed properly # when the skill gets installed later by a user. from adapt.intent import IntentBuilder from mycroft.skills.core import MycroftSkill, intent_handler from mycroft.util.log import LOG from mycroft.skills.context import adds_context, removes_context class EvolucareSkill(MycroftSkill): def __init__(self): super(EvolucareSkill, self).__init__(name="EvolucareSkill") self.last_tension = 0 @intent_handler(IntentBuilder("TensionMesure") .require("mesure") .require("tension")) @adds_context('TensionProtocol') def handle_tension_question_mesure(self, message): self.speak_dialog('tension.mesure.protocol') @intent_handler(IntentBuilder("TensionQuestion") .require("tension")) @adds_context('TensionContext') def handle_tension_question(self, message): self.speak_dialog('tension.question', expect_response=True) @intent_handler(IntentBuilder("TensionQuestionDecline") .require("negation") .require("TensionContext") .build()) @removes_context('TensionContext') def handle_tension_question_decline(self, message): self.speak_dialog('tension.question.decline') @intent_handler(IntentBuilder("TensionProtocolIntent") .require("acceptation") .require("TensionContext") .build()) @adds_context('TensionProtocol') @removes_context('TensionContext') def handle_tension_question_accept(self, message): self.speak_dialog('tension.mesure.protocol') @intent_handler(IntentBuilder("TensionCalculIntent") .require("pret") .require("TensionProtocol") .optionally("negation") .build()) def handle_tension_calcul_intent(self, message): neg = message.data.get("negation") if not neg: self.TensionCalulate() else: self.speak_dialog("tension.protocol.wait") @removes_context('TensionProtocol') def TensionCalulate(self): self.speak_dialog('tension.calcul') # TO DO : calculate and return tension self.last_tension = 0 self.speak_dialog("tension.response", data={"tension": self.last_tension} ) #@intent_handler(IntentBuilder("")) #def handle_default_intent(self, message): #self.speak_dialog("response", data={"response": message.data["utterance"]}) #@intent_handler(IntentBuilder("").require("Count").require("Dir")) #def handle_count_intent(self, message): #if message.data["Dir"] == "up": #self.count += 1 #else: # assume "down" #self.count -= 1 #self.speak_dialog("count.is.now", data={"count": self.count}) # The "stop" method defines what Mycroft does when told to stop during # the skill's execution. In this case, since the skill's functionality # is extremely simple, there is no need to override it. If you DO # need to implement stop, you should return True to indicate you handled # it. # # def stop(self): # return False def create_skill(): return EvolucareSkill()
3,742
1,071
"""Set up the package interface.""" from .add_update import add, update from .show import show from .delete import delete
123
33
import webbrowser as web from bs4 import BeautifulSoup STARTING_URL = 'https://www.google.com/search?q=' def get_first_website(phrase): phrase_split = phrase.split() phrase_url = '+'.join(phrase_split) search_url = STARTING_URL + phrase_url web.open_new_tab(search_url)
300
112
"""Test suite for the Talks Feedback microservice."""
54
16
from django.db import models from django.contrib.auth.models import User import json class Attribute(models.Model): name = models.CharField(max_length=200, unique=True) label = models.CharField(max_length=200) description = models.TextField() attribute_type = models.CharField(max_length=30) class Record(models.Model): data = models.TextField() class Dataset(models.Model): def toJSON(self): return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True, indent=4) title = models.CharField(max_length=200) description = models.TextField(max_length=200) deleted = models.BooleanField(default=False) # foreign keys parent = models.ForeignKey('Dataset', on_delete=models.DO_NOTHING, null=True) attributes = models.ManyToManyField(Attribute) records = models.ManyToManyField(Record)
867
268
from .bigGay import bigGay def setup(bot): cog = bigGay(bot) bot.add_cog(cog)
86
40
from ai_constants import * import ai_generator def get_tab_string(tabment): return '\t' * tabment def express_node(cur_node, tabment = 0): child_nodes = cur_node.children if cur_node.type == 'DEFRULE': return "(defrule\n" \ + express_node(child_nodes[0], tabment + 1) \ + "=>\n" \ + express_node(child_nodes[1], tabment + 1) \ + ")" elif cur_node.type == 'CONDITIONS' or cur_node.type == 'ACTIONS': variable_amount_return = "" for child_node in child_nodes: variable_amount_return += str(express_node(child_node, tabment)) return variable_amount_return elif cur_node.value in FLOW: variable_amount_return = get_tab_string(tabment) + "(" + str(cur_node.value) + "\n" for child_node in child_nodes: variable_amount_return += express_node(child_node, tabment + 1) variable_amount_return += get_tab_string(tabment) + ")\n" return variable_amount_return elif cur_node.type == 'FACT*' or cur_node.type == 'ACTION*': variable_amount_return = get_tab_string(tabment) + "(" + str(cur_node.value) for child_node in child_nodes: variable_amount_return += " " + str(express_node(child_node, tabment)) variable_amount_return += ")\n" return variable_amount_return return cur_node.value def express_script(script_tree): script_text = "" for rule in script_tree: script_text += express_node(rule) script_text += "\n\n" return script_text def write_script(script_tree, file_path): with open(file_path, 'w') as f: f.write(express_script(script_tree)) f.flush() if __name__ == '__main__': # generate and express a rule rule_tree = ai_generator.generate_rule() rule_script = express_node(rule_tree) print(rule_script) # generate and write a script script_tree = ai_generator.generate_script() write_script(script_tree, "random_script.per")
2,039
656
import numpy as np from tabulate import tabulate np.set_printoptions(linewidth=400, threshold=100000) def product(gen): ans = 1 for g in gen: ans *= g + 1 return ans def count_divs_pow(p): if p == 0 or p == 1: return 0 else: full_size = 7**(p-1) * (7**(p-1) - 1) // 2 fulls = 21 * full_size smalls = 28 * count_divs_pow(p-1) return fulls + smalls def base7(n): ans = [] while n > 0: ans.append(n % 7) n //= 7 return ans def num_not_divisible(i): return product(base7(i)) def pascal(n): pascal = np.zeros((n, n)) for x, y in np.ndindex(n, n): if x == 0 or y == 0: pascal[x, y] = 1 else: pascal[x, y] = (pascal[x-1, y] + pascal[x, y-1]) % 7 print(pascal) def pascal_zeroes(n): row = [1] zeroes = [[0, 0, 0]] for i in range(1, n): row = [1] + [(a + b) % 7 for a, b in zip(row, row[1::])] + [1] count = len([r for r in row if r == 0]) zeroes.append([i, count, count - zeroes[i-1][1]]) return tabulate(zeroes, ['Row index', 'Count Zeros']) def c(n): ans = 0 for i in range(n): ans += num_not_divisible(i) if i % 1000000 == 0: print(i, ans) return ans print(c(1000000000))
1,316
573
from django.conf import settings if 'modeltranslation' in settings.INSTALLED_APPS: from modeltranslation.translator import translator, NotRegistered def get_translated_fields(model): """Get translated fields from a model""" try: mto = translator.get_options_for_model(model) except NotRegistered: translated_fields = [] else: translated_fields = mto.fields.keys() return translated_fields
440
127
# # PySNMP MIB module OSPF-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/OSPF-MIB # Produced by pysmi-0.3.4 at Wed May 1 11:18:45 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ConstraintsIntersection, SingleValueConstraint, ConstraintsUnion, ValueSizeConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "SingleValueConstraint", "ConstraintsUnion", "ValueSizeConstraint", "ValueRangeConstraint") InterfaceIndexOrZero, = mibBuilder.importSymbols("IF-MIB", "InterfaceIndexOrZero") ObjectGroup, ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "ModuleCompliance", "NotificationGroup") IpAddress, TimeTicks, MibIdentifier, iso, ObjectIdentity, Counter32, MibScalar, MibTable, MibTableRow, MibTableColumn, mib_2, Counter64, ModuleIdentity, Gauge32, Integer32, NotificationType, Unsigned32, Bits = mibBuilder.importSymbols("SNMPv2-SMI", "IpAddress", "TimeTicks", "MibIdentifier", "iso", "ObjectIdentity", "Counter32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "mib-2", "Counter64", "ModuleIdentity", "Gauge32", "Integer32", "NotificationType", "Unsigned32", "Bits") TruthValue, RowStatus, TimeStamp, TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TruthValue", "RowStatus", "TimeStamp", "TextualConvention", "DisplayString") ospf = ModuleIdentity((1, 3, 6, 1, 2, 1, 14)) ospf.setRevisions(('2006-11-10 00:00', '1995-01-20 12:25',)) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): if mibBuilder.loadTexts: ospf.setRevisionsDescriptions(('Updated for latest changes to OSPF Version 2: - updated the General Group with the new ospfRFC1583Compatibility, ospfReferenceBandwidth and ospfDiscontinuityTime objects - added graceful-restart-related objects - added stub-router-related objects - updated the Area Table with NSSA-related objects - added ospfAreaAggregateExtRouteTag object - added Opaque LSA-related objects - updates to the Compliances and Security sections - added area LSA counter table - added section describing translation of notification parameters between SNMP versions - added ospfComplianceObsolete to contain obsolete object groups - deprecated ospfExtLsdbTable See Appendix B of RFC 4750 for more details. This version published as part of RFC 4750', 'The initial SMIv2 revision of this MIB module, published in RFC 1850.',)) if mibBuilder.loadTexts: ospf.setLastUpdated('200611100000Z') if mibBuilder.loadTexts: ospf.setOrganization('IETF OSPF Working Group') if mibBuilder.loadTexts: ospf.setContactInfo('WG E-Mail: ospf@ietf.org WG Chairs: acee@cisco.com rohit@gmail.com Editors: Dan Joyal Nortel 600 Technology Park Drive Billerica, MA 01821 djoyal@nortel.com Piotr Galecki Airvana 19 Alpha Road Chelmsford, MA 01824 pgalecki@airvana.com Spencer Giacalone CSFB Eleven Madison Ave New York, NY 10010-3629 spencer.giacalone@gmail.com') if mibBuilder.loadTexts: ospf.setDescription('The MIB module to describe the OSPF Version 2 Protocol. Note that some objects in this MIB module may pose a significant security risk. Refer to the Security Considerations section in RFC 4750 for more information. Copyright (C) The IETF Trust (2006). This version of this MIB module is part of RFC 4750; see the RFC itself for full legal notices.') class AreaID(TextualConvention, IpAddress): description = 'An OSPF Area Identifier. Note that the Area ID, in OSPF, has the same format as an IP address, but has the function of defining a summarization point for link state advertisements.' status = 'current' class RouterID(TextualConvention, IpAddress): description = 'A OSPF Router Identifier. Note that the Router ID, in OSPF, has the same format as an IP address, but identifies the router independent of its IP address.' status = 'current' class Metric(TextualConvention, Integer32): description = 'The OSPF internal metric. Note that the OSPF metric is defined as an unsigned value in the range.' status = 'current' displayHint = 'd-0' subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 65535) class BigMetric(TextualConvention, Integer32): description = 'The OSPF external metric.' status = 'current' displayHint = 'd-0' subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 16777215) class Status(TextualConvention, Integer32): description = "An indication of the operability of an OSPF function or feature. For example, the status of an interface: 'enabled' indicates that it is willing to communicate with other OSPF routers, and 'disabled' indicates that it is not." status = 'current' subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2)) namedValues = NamedValues(("enabled", 1), ("disabled", 2)) class PositiveInteger(TextualConvention, Integer32): description = 'A positive integer. Values in excess are precluded as unnecessary and prone to interoperability issues.' status = 'current' displayHint = 'd-0' subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 2147483647) class HelloRange(TextualConvention, Integer32): description = 'The range of intervals in seconds on which Hello messages are exchanged.' status = 'current' displayHint = 'd-0' subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(1, 65535) class UpToMaxAge(TextualConvention, Integer32): description = 'The values in seconds that one might find or configure for variables bounded by the maximum age of an LSA.' status = 'current' displayHint = 'd-0' subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 3600) class DesignatedRouterPriority(TextualConvention, Integer32): description = 'The range of values defined for the priority of a system for becoming the designated router.' status = 'current' displayHint = 'd-0' subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 255) class TOSType(TextualConvention, Integer32): description = 'Type of Service (TOS) is defined as a mapping to the IP Type of Service Flags as defined in the IP Forwarding Table MIB +-----+-----+-----+-----+-----+-----+-----+-----+ | | | | | PRECEDENCE | TYPE OF SERVICE | 0 | | | | | +-----+-----+-----+-----+-----+-----+-----+-----+ IP TOS IP TOS Field Policy Field Policy Contents Code Contents Code 0 0 0 0 ==> 0 0 0 0 1 ==> 2 0 0 1 0 ==> 4 0 0 1 1 ==> 6 0 1 0 0 ==> 8 0 1 0 1 ==> 10 0 1 1 0 ==> 12 0 1 1 1 ==> 14 1 0 0 0 ==> 16 1 0 0 1 ==> 18 1 0 1 0 ==> 20 1 0 1 1 ==> 22 1 1 0 0 ==> 24 1 1 0 1 ==> 26 1 1 1 0 ==> 28 1 1 1 1 ==> 30 The remaining values are left for future definition.' status = 'current' displayHint = 'd-0' subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 30) class OspfAuthenticationType(TextualConvention, Integer32): description = 'The authentication type.' status = 'current' subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2)) namedValues = NamedValues(("none", 0), ("simplePassword", 1), ("md5", 2)) ospfGeneralGroup = MibIdentifier((1, 3, 6, 1, 2, 1, 14, 1)) ospfRouterId = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 1), RouterID()).setMaxAccess("readwrite") if mibBuilder.loadTexts: ospfRouterId.setReference('OSPF Version 2, C.1 Global parameters') if mibBuilder.loadTexts: ospfRouterId.setStatus('current') if mibBuilder.loadTexts: ospfRouterId.setDescription("A 32-bit integer uniquely identifying the router in the Autonomous System. By convention, to ensure uniqueness, this should default to the value of one of the router's IP interface addresses. This object is persistent and when written the entity SHOULD save the change to non-volatile storage.") ospfAdminStat = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 2), Status()).setMaxAccess("readwrite") if mibBuilder.loadTexts: ospfAdminStat.setStatus('current') if mibBuilder.loadTexts: ospfAdminStat.setDescription("The administrative status of OSPF in the router. The value 'enabled' denotes that the OSPF Process is active on at least one interface; 'disabled' disables it on all interfaces. This object is persistent and when written the entity SHOULD save the change to non-volatile storage.") ospfVersionNumber = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2))).clone(namedValues=NamedValues(("version2", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfVersionNumber.setReference('OSPF Version 2, Title') if mibBuilder.loadTexts: ospfVersionNumber.setStatus('current') if mibBuilder.loadTexts: ospfVersionNumber.setDescription('The current version number of the OSPF protocol is 2.') ospfAreaBdrRtrStatus = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 4), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfAreaBdrRtrStatus.setReference('OSPF Version 2, Section 3 Splitting the AS into Areas') if mibBuilder.loadTexts: ospfAreaBdrRtrStatus.setStatus('current') if mibBuilder.loadTexts: ospfAreaBdrRtrStatus.setDescription('A flag to note whether this router is an Area Border Router.') ospfASBdrRtrStatus = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 5), TruthValue()).setMaxAccess("readwrite") if mibBuilder.loadTexts: ospfASBdrRtrStatus.setReference('OSPF Version 2, Section 3.3 Classification of routers') if mibBuilder.loadTexts: ospfASBdrRtrStatus.setStatus('current') if mibBuilder.loadTexts: ospfASBdrRtrStatus.setDescription('A flag to note whether this router is configured as an Autonomous System Border Router. This object is persistent and when written the entity SHOULD save the change to non-volatile storage.') ospfExternLsaCount = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 6), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfExternLsaCount.setReference('OSPF Version 2, Appendix A.4.5 AS external link advertisements') if mibBuilder.loadTexts: ospfExternLsaCount.setStatus('current') if mibBuilder.loadTexts: ospfExternLsaCount.setDescription('The number of external (LS type-5) link state advertisements in the link state database.') ospfExternLsaCksumSum = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 7), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfExternLsaCksumSum.setStatus('current') if mibBuilder.loadTexts: ospfExternLsaCksumSum.setDescription("The 32-bit sum of the LS checksums of the external link state advertisements contained in the link state database. This sum can be used to determine if there has been a change in a router's link state database and to compare the link state database of two routers. The value should be treated as unsigned when comparing two sums of checksums.") ospfTOSSupport = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 8), TruthValue()).setMaxAccess("readwrite") if mibBuilder.loadTexts: ospfTOSSupport.setReference('OSPF Version 2, Appendix F.1.2 Optional TOS support') if mibBuilder.loadTexts: ospfTOSSupport.setStatus('current') if mibBuilder.loadTexts: ospfTOSSupport.setDescription("The router's support for type-of-service routing. This object is persistent and when written the entity SHOULD save the change to non-volatile storage.") ospfOriginateNewLsas = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 9), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfOriginateNewLsas.setStatus('current') if mibBuilder.loadTexts: ospfOriginateNewLsas.setDescription('The number of new link state advertisements that have been originated. This number is incremented each time the router originates a new LSA. Discontinuities in the value of this counter can occur at re-initialization of the management system, and at other times as indicated by the value of ospfDiscontinuityTime.') ospfRxNewLsas = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 10), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfRxNewLsas.setStatus('current') if mibBuilder.loadTexts: ospfRxNewLsas.setDescription('The number of link state advertisements received that are determined to be new instantiations. This number does not include newer instantiations of self-originated link state advertisements. Discontinuities in the value of this counter can occur at re-initialization of the management system, and at other times as indicated by the value of ospfDiscontinuityTime.') ospfExtLsdbLimit = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1, 2147483647)).clone(-1)).setMaxAccess("readwrite") if mibBuilder.loadTexts: ospfExtLsdbLimit.setStatus('current') if mibBuilder.loadTexts: ospfExtLsdbLimit.setDescription("The maximum number of non-default AS-external LSAs entries that can be stored in the link state database. If the value is -1, then there is no limit. When the number of non-default AS-external LSAs in a router's link state database reaches ospfExtLsdbLimit, the router enters overflow state. The router never holds more than ospfExtLsdbLimit non-default AS-external LSAs in its database. OspfExtLsdbLimit MUST be set identically in all routers attached to the OSPF backbone and/or any regular OSPF area (i.e., OSPF stub areas and NSSAs are excluded). This object is persistent and when written the entity SHOULD save the change to non-volatile storage.") ospfMulticastExtensions = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 12), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: ospfMulticastExtensions.setStatus('current') if mibBuilder.loadTexts: ospfMulticastExtensions.setDescription("A bit mask indicating whether the router is forwarding IP multicast (Class D) datagrams based on the algorithms defined in the multicast extensions to OSPF. Bit 0, if set, indicates that the router can forward IP multicast datagrams in the router's directly attached areas (called intra-area multicast routing). Bit 1, if set, indicates that the router can forward IP multicast datagrams between OSPF areas (called inter-area multicast routing). Bit 2, if set, indicates that the router can forward IP multicast datagrams between Autonomous Systems (called inter-AS multicast routing). Only certain combinations of bit settings are allowed, namely: 0 (no multicast forwarding is enabled), 1 (intra-area multicasting only), 3 (intra-area and inter-area multicasting), 5 (intra-area and inter-AS multicasting), and 7 (multicasting everywhere). By default, no multicast forwarding is enabled. This object is persistent and when written the entity SHOULD save the change to non-volatile storage.") ospfExitOverflowInterval = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 13), PositiveInteger()).setMaxAccess("readwrite") if mibBuilder.loadTexts: ospfExitOverflowInterval.setStatus('current') if mibBuilder.loadTexts: ospfExitOverflowInterval.setDescription('The number of seconds that, after entering OverflowState, a router will attempt to leave OverflowState. This allows the router to again originate non-default AS-external LSAs. When set to 0, the router will not leave overflow state until restarted. This object is persistent and when written the entity SHOULD save the change to non-volatile storage.') ospfDemandExtensions = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 14), TruthValue()).setMaxAccess("readwrite") if mibBuilder.loadTexts: ospfDemandExtensions.setReference('Extending OSPF to Support Demand Circuits') if mibBuilder.loadTexts: ospfDemandExtensions.setStatus('current') if mibBuilder.loadTexts: ospfDemandExtensions.setDescription("The router's support for demand routing. This object is persistent and when written the entity SHOULD save the change to non-volatile storage.") ospfRFC1583Compatibility = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 15), TruthValue()).setMaxAccess("readwrite") if mibBuilder.loadTexts: ospfRFC1583Compatibility.setReference('OSPF Version 2, Section 16.4.1 External path preferences') if mibBuilder.loadTexts: ospfRFC1583Compatibility.setStatus('current') if mibBuilder.loadTexts: ospfRFC1583Compatibility.setDescription('Indicates metrics used to choose among multiple AS-external LSAs. When RFC1583Compatibility is set to enabled, only cost will be used when choosing among multiple AS-external LSAs advertising the same destination. When RFC1583Compatibility is set to disabled, preference will be driven first by type of path using cost only to break ties. This object is persistent and when written the entity SHOULD save the change to non-volatile storage.') ospfOpaqueLsaSupport = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 16), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfOpaqueLsaSupport.setReference('The OSPF Opaque LSA Option') if mibBuilder.loadTexts: ospfOpaqueLsaSupport.setStatus('current') if mibBuilder.loadTexts: ospfOpaqueLsaSupport.setDescription("The router's support for Opaque LSA types.") ospfReferenceBandwidth = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 17), Unsigned32()).setUnits('kilobits per second').setMaxAccess("readwrite") if mibBuilder.loadTexts: ospfReferenceBandwidth.setStatus('current') if mibBuilder.loadTexts: ospfReferenceBandwidth.setDescription('Reference bandwidth in kilobits/second for calculating default interface metrics. The default value is 100,000 KBPS (100 MBPS). This object is persistent and when written the entity SHOULD save the change to non-volatile storage.') ospfRestartSupport = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("none", 1), ("plannedOnly", 2), ("plannedAndUnplanned", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: ospfRestartSupport.setStatus('current') if mibBuilder.loadTexts: ospfRestartSupport.setDescription("The router's support for OSPF graceful restart. Options include: no restart support, only planned restarts, or both planned and unplanned restarts. This object is persistent and when written the entity SHOULD save the change to non-volatile storage.") ospfRestartInterval = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 19), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 1800))).setUnits('seconds').setMaxAccess("readwrite") if mibBuilder.loadTexts: ospfRestartInterval.setStatus('current') if mibBuilder.loadTexts: ospfRestartInterval.setDescription('Configured OSPF graceful restart timeout interval. This object is persistent and when written the entity SHOULD save the change to non-volatile storage.') ospfRestartStrictLsaChecking = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 20), TruthValue()).setMaxAccess("readwrite") if mibBuilder.loadTexts: ospfRestartStrictLsaChecking.setStatus('current') if mibBuilder.loadTexts: ospfRestartStrictLsaChecking.setDescription('Indicates if strict LSA checking is enabled for graceful restart. This object is persistent and when written the entity SHOULD save the change to non-volatile storage.') ospfRestartStatus = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("notRestarting", 1), ("plannedRestart", 2), ("unplannedRestart", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfRestartStatus.setStatus('current') if mibBuilder.loadTexts: ospfRestartStatus.setDescription('Current status of OSPF graceful restart.') ospfRestartAge = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 22), Unsigned32()).setUnits('seconds').setMaxAccess("readonly") if mibBuilder.loadTexts: ospfRestartAge.setStatus('current') if mibBuilder.loadTexts: ospfRestartAge.setDescription('Remaining time in current OSPF graceful restart interval.') ospfRestartExitReason = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("none", 1), ("inProgress", 2), ("completed", 3), ("timedOut", 4), ("topologyChanged", 5)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfRestartExitReason.setStatus('current') if mibBuilder.loadTexts: ospfRestartExitReason.setDescription("Describes the outcome of the last attempt at a graceful restart. If the value is 'none', no restart has yet been attempted. If the value is 'inProgress', a restart attempt is currently underway.") ospfAsLsaCount = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 24), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfAsLsaCount.setStatus('current') if mibBuilder.loadTexts: ospfAsLsaCount.setDescription('The number of AS-scope link state advertisements in the AS-scope link state database.') ospfAsLsaCksumSum = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 25), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfAsLsaCksumSum.setStatus('current') if mibBuilder.loadTexts: ospfAsLsaCksumSum.setDescription("The 32-bit unsigned sum of the LS checksums of the AS link state advertisements contained in the AS-scope link state database. This sum can be used to determine if there has been a change in a router's AS-scope link state database, and to compare the AS-scope link state database of two routers.") ospfStubRouterSupport = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 26), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfStubRouterSupport.setReference('OSPF Stub Router Advertisement') if mibBuilder.loadTexts: ospfStubRouterSupport.setStatus('current') if mibBuilder.loadTexts: ospfStubRouterSupport.setDescription("The router's support for stub router functionality.") ospfStubRouterAdvertisement = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 27), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("doNotAdvertise", 1), ("advertise", 2)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: ospfStubRouterAdvertisement.setStatus('current') if mibBuilder.loadTexts: ospfStubRouterAdvertisement.setDescription('This object controls the advertisement of stub router LSAs by the router. The value doNotAdvertise will result in the advertisement of a standard router LSA and is the default value. This object is persistent and when written the entity SHOULD save the change to non-volatile storage.') ospfDiscontinuityTime = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 28), TimeStamp()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfDiscontinuityTime.setStatus('current') if mibBuilder.loadTexts: ospfDiscontinuityTime.setDescription("The value of sysUpTime on the most recent occasion at which any one of this MIB's counters suffered a discontinuity. If no such discontinuities have occurred since the last re-initialization of the local management subsystem, then this object contains a zero value.") ospfAreaTable = MibTable((1, 3, 6, 1, 2, 1, 14, 2), ) if mibBuilder.loadTexts: ospfAreaTable.setReference('OSPF Version 2, Section 6 The Area Data Structure') if mibBuilder.loadTexts: ospfAreaTable.setStatus('current') if mibBuilder.loadTexts: ospfAreaTable.setDescription("Information describing the configured parameters and cumulative statistics of the router's attached areas. The interfaces and virtual links are configured as part of these areas. Area 0.0.0.0, by definition, is the backbone area.") ospfAreaEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 2, 1), ).setIndexNames((0, "OSPF-MIB", "ospfAreaId")) if mibBuilder.loadTexts: ospfAreaEntry.setStatus('current') if mibBuilder.loadTexts: ospfAreaEntry.setDescription("Information describing the configured parameters and cumulative statistics of one of the router's attached areas. The interfaces and virtual links are configured as part of these areas. Area 0.0.0.0, by definition, is the backbone area. Information in this table is persistent and when this object is written the entity SHOULD save the change to non-volatile storage.") ospfAreaId = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 2, 1, 1), AreaID()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfAreaId.setReference('OSPF Version 2, Appendix C.2 Area parameters') if mibBuilder.loadTexts: ospfAreaId.setStatus('current') if mibBuilder.loadTexts: ospfAreaId.setDescription('A 32-bit integer uniquely identifying an area. Area ID 0.0.0.0 is used for the OSPF backbone.') ospfAuthType = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 2, 1, 2), OspfAuthenticationType().clone('none')).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfAuthType.setReference('OSPF Version 2, Appendix D Authentication') if mibBuilder.loadTexts: ospfAuthType.setStatus('obsolete') if mibBuilder.loadTexts: ospfAuthType.setDescription('The authentication type specified for an area.') ospfImportAsExtern = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("importExternal", 1), ("importNoExternal", 2), ("importNssa", 3))).clone('importExternal')).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfImportAsExtern.setReference('OSPF Version 2, Appendix C.2 Area parameters') if mibBuilder.loadTexts: ospfImportAsExtern.setStatus('current') if mibBuilder.loadTexts: ospfImportAsExtern.setDescription('Indicates if an area is a stub area, NSSA, or standard area. Type-5 AS-external LSAs and type-11 Opaque LSAs are not imported into stub areas or NSSAs. NSSAs import AS-external data as type-7 LSAs') ospfSpfRuns = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 2, 1, 4), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfSpfRuns.setStatus('current') if mibBuilder.loadTexts: ospfSpfRuns.setDescription("The number of times that the intra-area route table has been calculated using this area's link state database. This is typically done using Dijkstra's algorithm. Discontinuities in the value of this counter can occur at re-initialization of the management system, and at other times as indicated by the value of ospfDiscontinuityTime.") ospfAreaBdrRtrCount = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 2, 1, 5), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfAreaBdrRtrCount.setStatus('current') if mibBuilder.loadTexts: ospfAreaBdrRtrCount.setDescription('The total number of Area Border Routers reachable within this area. This is initially zero and is calculated in each Shortest Path First (SPF) pass.') ospfAsBdrRtrCount = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 2, 1, 6), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfAsBdrRtrCount.setStatus('current') if mibBuilder.loadTexts: ospfAsBdrRtrCount.setDescription('The total number of Autonomous System Border Routers reachable within this area. This is initially zero and is calculated in each SPF pass.') ospfAreaLsaCount = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 2, 1, 7), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfAreaLsaCount.setStatus('current') if mibBuilder.loadTexts: ospfAreaLsaCount.setDescription("The total number of link state advertisements in this area's link state database, excluding AS-external LSAs.") ospfAreaLsaCksumSum = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 2, 1, 8), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfAreaLsaCksumSum.setStatus('current') if mibBuilder.loadTexts: ospfAreaLsaCksumSum.setDescription("The 32-bit sum of the link state advertisements' LS checksums contained in this area's link state database. This sum excludes external (LS type-5) link state advertisements. The sum can be used to determine if there has been a change in a router's link state database, and to compare the link state database of two routers. The value should be treated as unsigned when comparing two sums of checksums.") ospfAreaSummary = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 2, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("noAreaSummary", 1), ("sendAreaSummary", 2))).clone('noAreaSummary')).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfAreaSummary.setStatus('current') if mibBuilder.loadTexts: ospfAreaSummary.setDescription('The variable ospfAreaSummary controls the import of summary LSAs into stub and NSSA areas. It has no effect on other areas. If it is noAreaSummary, the router will not originate summary LSAs into the stub or NSSA area. It will rely entirely on its default route. If it is sendAreaSummary, the router will both summarize and propagate summary LSAs.') ospfAreaStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 2, 1, 10), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfAreaStatus.setStatus('current') if mibBuilder.loadTexts: ospfAreaStatus.setDescription('This object permits management of the table by facilitating actions such as row creation, construction, and destruction. The value of this object has no effect on whether other objects in this conceptual row can be modified.') ospfAreaNssaTranslatorRole = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 2, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("always", 1), ("candidate", 2))).clone('candidate')).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfAreaNssaTranslatorRole.setStatus('current') if mibBuilder.loadTexts: ospfAreaNssaTranslatorRole.setDescription("Indicates an NSSA border router's ability to perform NSSA translation of type-7 LSAs into type-5 LSAs.") ospfAreaNssaTranslatorState = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 2, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enabled", 1), ("elected", 2), ("disabled", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfAreaNssaTranslatorState.setStatus('current') if mibBuilder.loadTexts: ospfAreaNssaTranslatorState.setDescription("Indicates if and how an NSSA border router is performing NSSA translation of type-7 LSAs into type-5 LSAs. When this object is set to enabled, the NSSA Border router's OspfAreaNssaExtTranslatorRole has been set to always. When this object is set to elected, a candidate NSSA Border router is Translating type-7 LSAs into type-5. When this object is set to disabled, a candidate NSSA border router is NOT translating type-7 LSAs into type-5.") ospfAreaNssaTranslatorStabilityInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 2, 1, 13), PositiveInteger().clone(40)).setUnits('seconds').setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfAreaNssaTranslatorStabilityInterval.setStatus('current') if mibBuilder.loadTexts: ospfAreaNssaTranslatorStabilityInterval.setDescription('The number of seconds after an elected translator determines its services are no longer required, that it should continue to perform its translation duties.') ospfAreaNssaTranslatorEvents = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 2, 1, 14), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfAreaNssaTranslatorEvents.setStatus('current') if mibBuilder.loadTexts: ospfAreaNssaTranslatorEvents.setDescription('Indicates the number of translator state changes that have occurred since the last boot-up. Discontinuities in the value of this counter can occur at re-initialization of the management system, and at other times as indicated by the value of ospfDiscontinuityTime.') ospfStubAreaTable = MibTable((1, 3, 6, 1, 2, 1, 14, 3), ) if mibBuilder.loadTexts: ospfStubAreaTable.setReference('OSPF Version 2, Appendix C.2, Area Parameters') if mibBuilder.loadTexts: ospfStubAreaTable.setStatus('current') if mibBuilder.loadTexts: ospfStubAreaTable.setDescription('The set of metrics that will be advertised by a default Area Border Router into a stub area.') ospfStubAreaEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 3, 1), ).setIndexNames((0, "OSPF-MIB", "ospfStubAreaId"), (0, "OSPF-MIB", "ospfStubTOS")) if mibBuilder.loadTexts: ospfStubAreaEntry.setReference('OSPF Version 2, Appendix C.2, Area Parameters') if mibBuilder.loadTexts: ospfStubAreaEntry.setStatus('current') if mibBuilder.loadTexts: ospfStubAreaEntry.setDescription('The metric for a given Type of Service that will be advertised by a default Area Border Router into a stub area. Information in this table is persistent and when this object is written the entity SHOULD save the change to non-volatile storage.') ospfStubAreaId = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 3, 1, 1), AreaID()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfStubAreaId.setStatus('current') if mibBuilder.loadTexts: ospfStubAreaId.setDescription('The 32-bit identifier for the stub area. On creation, this can be derived from the instance.') ospfStubTOS = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 3, 1, 2), TOSType()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfStubTOS.setStatus('current') if mibBuilder.loadTexts: ospfStubTOS.setDescription('The Type of Service associated with the metric. On creation, this can be derived from the instance.') ospfStubMetric = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 3, 1, 3), BigMetric()).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfStubMetric.setStatus('current') if mibBuilder.loadTexts: ospfStubMetric.setDescription('The metric value applied at the indicated Type of Service. By default, this equals the least metric at the Type of Service among the interfaces to other areas.') ospfStubStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 3, 1, 4), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfStubStatus.setStatus('current') if mibBuilder.loadTexts: ospfStubStatus.setDescription('This object permits management of the table by facilitating actions such as row creation, construction, and destruction. The value of this object has no effect on whether other objects in this conceptual row can be modified.') ospfStubMetricType = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 3, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("ospfMetric", 1), ("comparableCost", 2), ("nonComparable", 3))).clone('ospfMetric')).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfStubMetricType.setStatus('current') if mibBuilder.loadTexts: ospfStubMetricType.setDescription('This variable displays the type of metric advertised as a default route.') ospfLsdbTable = MibTable((1, 3, 6, 1, 2, 1, 14, 4), ) if mibBuilder.loadTexts: ospfLsdbTable.setReference('OSPF Version 2, Section 12 Link State Advertisements') if mibBuilder.loadTexts: ospfLsdbTable.setStatus('current') if mibBuilder.loadTexts: ospfLsdbTable.setDescription("The OSPF Process's link state database (LSDB). The LSDB contains the link state advertisements from throughout the areas that the device is attached to.") ospfLsdbEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 4, 1), ).setIndexNames((0, "OSPF-MIB", "ospfLsdbAreaId"), (0, "OSPF-MIB", "ospfLsdbType"), (0, "OSPF-MIB", "ospfLsdbLsid"), (0, "OSPF-MIB", "ospfLsdbRouterId")) if mibBuilder.loadTexts: ospfLsdbEntry.setStatus('current') if mibBuilder.loadTexts: ospfLsdbEntry.setDescription('A single link state advertisement.') ospfLsdbAreaId = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 4, 1, 1), AreaID()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfLsdbAreaId.setReference('OSPF Version 2, Appendix C.2 Area parameters') if mibBuilder.loadTexts: ospfLsdbAreaId.setStatus('current') if mibBuilder.loadTexts: ospfLsdbAreaId.setDescription('The 32-bit identifier of the area from which the LSA was received.') ospfLsdbType = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 4, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 10))).clone(namedValues=NamedValues(("routerLink", 1), ("networkLink", 2), ("summaryLink", 3), ("asSummaryLink", 4), ("asExternalLink", 5), ("multicastLink", 6), ("nssaExternalLink", 7), ("areaOpaqueLink", 10)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfLsdbType.setReference('OSPF Version 2, Appendix A.4.1 The Link State Advertisement header') if mibBuilder.loadTexts: ospfLsdbType.setStatus('current') if mibBuilder.loadTexts: ospfLsdbType.setDescription('The type of the link state advertisement. Each link state type has a separate advertisement format. Note: External link state advertisements are permitted for backward compatibility, but should be displayed in the ospfAsLsdbTable rather than here.') ospfLsdbLsid = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 4, 1, 3), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfLsdbLsid.setReference('OSPF Version 2, Section 12.1.4 Link State ID') if mibBuilder.loadTexts: ospfLsdbLsid.setStatus('current') if mibBuilder.loadTexts: ospfLsdbLsid.setDescription('The Link State ID is an LS Type Specific field containing either a Router ID or an IP address; it identifies the piece of the routing domain that is being described by the advertisement.') ospfLsdbRouterId = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 4, 1, 4), RouterID()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfLsdbRouterId.setReference('OSPF Version 2, Appendix C.1 Global parameters') if mibBuilder.loadTexts: ospfLsdbRouterId.setStatus('current') if mibBuilder.loadTexts: ospfLsdbRouterId.setDescription('The 32-bit number that uniquely identifies the originating router in the Autonomous System.') ospfLsdbSequence = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 4, 1, 5), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfLsdbSequence.setReference('OSPF Version 2, Section 12.1.6 LS sequence number') if mibBuilder.loadTexts: ospfLsdbSequence.setStatus('current') if mibBuilder.loadTexts: ospfLsdbSequence.setDescription("The sequence number field is a signed 32-bit integer. It starts with the value '80000001'h, or -'7FFFFFFF'h, and increments until '7FFFFFFF'h. Thus, a typical sequence number will be very negative. It is used to detect old and duplicate Link State Advertisements. The space of sequence numbers is linearly ordered. The larger the sequence number, the more recent the advertisement.") ospfLsdbAge = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 4, 1, 6), Integer32()).setUnits('seconds').setMaxAccess("readonly") if mibBuilder.loadTexts: ospfLsdbAge.setReference('OSPF Version 2, Section 12.1.1 LS age') if mibBuilder.loadTexts: ospfLsdbAge.setStatus('current') if mibBuilder.loadTexts: ospfLsdbAge.setDescription('This field is the age of the link state advertisement in seconds.') ospfLsdbChecksum = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 4, 1, 7), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfLsdbChecksum.setReference('OSPF Version 2, Section 12.1.7 LS checksum') if mibBuilder.loadTexts: ospfLsdbChecksum.setStatus('current') if mibBuilder.loadTexts: ospfLsdbChecksum.setDescription("This field is the checksum of the complete contents of the advertisement, excepting the age field. The age field is excepted so that an advertisement's age can be incremented without updating the checksum. The checksum used is the same that is used for ISO connectionless datagrams; it is commonly referred to as the Fletcher checksum.") ospfLsdbAdvertisement = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 4, 1, 8), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 65535))).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfLsdbAdvertisement.setReference('OSPF Version 2, Section 12 Link State Advertisements') if mibBuilder.loadTexts: ospfLsdbAdvertisement.setStatus('current') if mibBuilder.loadTexts: ospfLsdbAdvertisement.setDescription('The entire link state advertisement, including its header. Note that for variable length LSAs, SNMP agents may not be able to return the largest string size.') ospfAreaRangeTable = MibTable((1, 3, 6, 1, 2, 1, 14, 5), ) if mibBuilder.loadTexts: ospfAreaRangeTable.setReference('OSPF Version 2, Appendix C.2 Area parameters') if mibBuilder.loadTexts: ospfAreaRangeTable.setStatus('obsolete') if mibBuilder.loadTexts: ospfAreaRangeTable.setDescription('The Address Range Table acts as an adjunct to the Area Table. It describes those Address Range Summaries that are configured to be propagated from an Area to reduce the amount of information about it that is known beyond its borders. It contains a set of IP address ranges specified by an IP address/IP network mask pair. For example, class B address range of X.X.X.X with a network mask of 255.255.0.0 includes all IP addresses from X.X.0.0 to X.X.255.255. Note that this table is obsoleted and is replaced by the Area Aggregate Table.') ospfAreaRangeEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 5, 1), ).setIndexNames((0, "OSPF-MIB", "ospfAreaRangeAreaId"), (0, "OSPF-MIB", "ospfAreaRangeNet")) if mibBuilder.loadTexts: ospfAreaRangeEntry.setReference('OSPF Version 2, Appendix C.2 Area parameters') if mibBuilder.loadTexts: ospfAreaRangeEntry.setStatus('obsolete') if mibBuilder.loadTexts: ospfAreaRangeEntry.setDescription('A single area address range. Information in this table is persistent and when this object is written the entity SHOULD save the change to non-volatile storage.') ospfAreaRangeAreaId = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 5, 1, 1), AreaID()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfAreaRangeAreaId.setReference('OSPF Version 2, Appendix C.2 Area parameters') if mibBuilder.loadTexts: ospfAreaRangeAreaId.setStatus('obsolete') if mibBuilder.loadTexts: ospfAreaRangeAreaId.setDescription('The area that the address range is to be found within.') ospfAreaRangeNet = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 5, 1, 2), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfAreaRangeNet.setReference('OSPF Version 2, Appendix C.2 Area parameters') if mibBuilder.loadTexts: ospfAreaRangeNet.setStatus('obsolete') if mibBuilder.loadTexts: ospfAreaRangeNet.setDescription('The IP address of the net or subnet indicated by the range.') ospfAreaRangeMask = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 5, 1, 3), IpAddress()).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfAreaRangeMask.setReference('OSPF Version 2, Appendix C.2 Area parameters') if mibBuilder.loadTexts: ospfAreaRangeMask.setStatus('obsolete') if mibBuilder.loadTexts: ospfAreaRangeMask.setDescription('The subnet mask that pertains to the net or subnet.') ospfAreaRangeStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 5, 1, 4), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfAreaRangeStatus.setStatus('obsolete') if mibBuilder.loadTexts: ospfAreaRangeStatus.setDescription('This object permits management of the table by facilitating actions such as row creation, construction, and destruction. The value of this object has no effect on whether other objects in this conceptual row can be modified.') ospfAreaRangeEffect = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 5, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("advertiseMatching", 1), ("doNotAdvertiseMatching", 2))).clone('advertiseMatching')).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfAreaRangeEffect.setStatus('obsolete') if mibBuilder.loadTexts: ospfAreaRangeEffect.setDescription("Subnets subsumed by ranges either trigger the advertisement of the indicated summary (advertiseMatching) or result in the subnet's not being advertised at all outside the area.") ospfHostTable = MibTable((1, 3, 6, 1, 2, 1, 14, 6), ) if mibBuilder.loadTexts: ospfHostTable.setReference('OSPF Version 2, Appendix C.7 Host route parameters') if mibBuilder.loadTexts: ospfHostTable.setStatus('current') if mibBuilder.loadTexts: ospfHostTable.setDescription('The Host/Metric Table indicates what hosts are directly attached to the router, what metrics and types of service should be advertised for them, and what areas they are found within.') ospfHostEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 6, 1), ).setIndexNames((0, "OSPF-MIB", "ospfHostIpAddress"), (0, "OSPF-MIB", "ospfHostTOS")) if mibBuilder.loadTexts: ospfHostEntry.setStatus('current') if mibBuilder.loadTexts: ospfHostEntry.setDescription('A metric to be advertised, for a given type of service, when a given host is reachable. Information in this table is persistent and when this object is written the entity SHOULD save the change to non-volatile storage.') ospfHostIpAddress = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 6, 1, 1), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfHostIpAddress.setReference('OSPF Version 2, Appendix C.7 Host route parameters') if mibBuilder.loadTexts: ospfHostIpAddress.setStatus('current') if mibBuilder.loadTexts: ospfHostIpAddress.setDescription('The IP address of the host.') ospfHostTOS = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 6, 1, 2), TOSType()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfHostTOS.setReference('OSPF Version 2, Appendix C.7 Host route parameters') if mibBuilder.loadTexts: ospfHostTOS.setStatus('current') if mibBuilder.loadTexts: ospfHostTOS.setDescription('The Type of Service of the route being configured.') ospfHostMetric = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 6, 1, 3), Metric()).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfHostMetric.setReference('OSPF Version 2, Appendix C.7 Host route parameters') if mibBuilder.loadTexts: ospfHostMetric.setStatus('current') if mibBuilder.loadTexts: ospfHostMetric.setDescription('The metric to be advertised.') ospfHostStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 6, 1, 4), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfHostStatus.setStatus('current') if mibBuilder.loadTexts: ospfHostStatus.setDescription('This object permits management of the table by facilitating actions such as row creation, construction, and destruction. The value of this object has no effect on whether other objects in this conceptual row can be modified.') ospfHostAreaID = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 6, 1, 5), AreaID()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfHostAreaID.setReference('OSPF Version 2, Appendix C.7 Host parameters') if mibBuilder.loadTexts: ospfHostAreaID.setStatus('deprecated') if mibBuilder.loadTexts: ospfHostAreaID.setDescription('The OSPF area to which the host belongs. Deprecated by ospfHostCfgAreaID.') ospfHostCfgAreaID = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 6, 1, 6), AreaID()).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfHostCfgAreaID.setReference('OSPF Version 2, Appendix C.7 Host parameters') if mibBuilder.loadTexts: ospfHostCfgAreaID.setStatus('current') if mibBuilder.loadTexts: ospfHostCfgAreaID.setDescription('To configure the OSPF area to which the host belongs.') ospfIfTable = MibTable((1, 3, 6, 1, 2, 1, 14, 7), ) if mibBuilder.loadTexts: ospfIfTable.setReference('OSPF Version 2, Appendix C.3 Router interface parameters') if mibBuilder.loadTexts: ospfIfTable.setStatus('current') if mibBuilder.loadTexts: ospfIfTable.setDescription('The OSPF Interface Table describes the interfaces from the viewpoint of OSPF. It augments the ipAddrTable with OSPF specific information.') ospfIfEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 7, 1), ).setIndexNames((0, "OSPF-MIB", "ospfIfIpAddress"), (0, "OSPF-MIB", "ospfAddressLessIf")) if mibBuilder.loadTexts: ospfIfEntry.setStatus('current') if mibBuilder.loadTexts: ospfIfEntry.setDescription('The OSPF interface entry describes one interface from the viewpoint of OSPF. Information in this table is persistent and when this object is written the entity SHOULD save the change to non-volatile storage.') ospfIfIpAddress = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 1), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfIfIpAddress.setStatus('current') if mibBuilder.loadTexts: ospfIfIpAddress.setDescription('The IP address of this OSPF interface.') ospfAddressLessIf = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 2), InterfaceIndexOrZero()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfAddressLessIf.setStatus('current') if mibBuilder.loadTexts: ospfAddressLessIf.setDescription('For the purpose of easing the instancing of addressed and addressless interfaces; this variable takes the value 0 on interfaces with IP addresses and the corresponding value of ifIndex for interfaces having no IP address.') ospfIfAreaId = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 3), AreaID().clone(hexValue="00000000")).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfIfAreaId.setStatus('current') if mibBuilder.loadTexts: ospfIfAreaId.setDescription('A 32-bit integer uniquely identifying the area to which the interface connects. Area ID 0.0.0.0 is used for the OSPF backbone.') ospfIfType = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 5))).clone(namedValues=NamedValues(("broadcast", 1), ("nbma", 2), ("pointToPoint", 3), ("pointToMultipoint", 5)))).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfIfType.setStatus('current') if mibBuilder.loadTexts: ospfIfType.setDescription("The OSPF interface type. By way of a default, this field may be intuited from the corresponding value of ifType. Broadcast LANs, such as Ethernet and IEEE 802.5, take the value 'broadcast', X.25 and similar technologies take the value 'nbma', and links that are definitively point to point take the value 'pointToPoint'.") ospfIfAdminStat = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 5), Status().clone('enabled')).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfIfAdminStat.setStatus('current') if mibBuilder.loadTexts: ospfIfAdminStat.setDescription("The OSPF interface's administrative status. The value formed on the interface, and the interface will be advertised as an internal route to some area. The value 'disabled' denotes that the interface is external to OSPF.") ospfIfRtrPriority = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 6), DesignatedRouterPriority().clone(1)).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfIfRtrPriority.setStatus('current') if mibBuilder.loadTexts: ospfIfRtrPriority.setDescription('The priority of this interface. Used in multi-access networks, this field is used in the designated router election algorithm. The value 0 signifies that the router is not eligible to become the designated router on this particular network. In the event of a tie in this value, routers will use their Router ID as a tie breaker.') ospfIfTransitDelay = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 7), UpToMaxAge().clone(1)).setUnits('seconds').setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfIfTransitDelay.setStatus('current') if mibBuilder.loadTexts: ospfIfTransitDelay.setDescription('The estimated number of seconds it takes to transmit a link state update packet over this interface. Note that the minimal value SHOULD be 1 second.') ospfIfRetransInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 8), UpToMaxAge().clone(5)).setUnits('seconds').setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfIfRetransInterval.setStatus('current') if mibBuilder.loadTexts: ospfIfRetransInterval.setDescription('The number of seconds between link state advertisement retransmissions, for adjacencies belonging to this interface. This value is also used when retransmitting database description and Link State request packets. Note that minimal value SHOULD be 1 second.') ospfIfHelloInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 9), HelloRange().clone(10)).setUnits('seconds').setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfIfHelloInterval.setStatus('current') if mibBuilder.loadTexts: ospfIfHelloInterval.setDescription('The length of time, in seconds, between the Hello packets that the router sends on the interface. This value must be the same for all routers attached to a common network.') ospfIfRtrDeadInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 10), PositiveInteger().clone(40)).setUnits('seconds').setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfIfRtrDeadInterval.setStatus('current') if mibBuilder.loadTexts: ospfIfRtrDeadInterval.setDescription("The number of seconds that a router's Hello packets have not been seen before its neighbors declare the router down. This should be some multiple of the Hello interval. This value must be the same for all routers attached to a common network.") ospfIfPollInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 11), PositiveInteger().clone(120)).setUnits('seconds').setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfIfPollInterval.setStatus('current') if mibBuilder.loadTexts: ospfIfPollInterval.setDescription('The larger time interval, in seconds, between the Hello packets sent to an inactive non-broadcast multi-access neighbor.') ospfIfState = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("down", 1), ("loopback", 2), ("waiting", 3), ("pointToPoint", 4), ("designatedRouter", 5), ("backupDesignatedRouter", 6), ("otherDesignatedRouter", 7))).clone('down')).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfIfState.setStatus('current') if mibBuilder.loadTexts: ospfIfState.setDescription('The OSPF Interface State.') ospfIfDesignatedRouter = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 13), IpAddress().clone(hexValue="00000000")).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfIfDesignatedRouter.setStatus('current') if mibBuilder.loadTexts: ospfIfDesignatedRouter.setDescription('The IP address of the designated router.') ospfIfBackupDesignatedRouter = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 14), IpAddress().clone(hexValue="00000000")).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfIfBackupDesignatedRouter.setStatus('current') if mibBuilder.loadTexts: ospfIfBackupDesignatedRouter.setDescription('The IP address of the backup designated router.') ospfIfEvents = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 15), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfIfEvents.setStatus('current') if mibBuilder.loadTexts: ospfIfEvents.setDescription('The number of times this OSPF interface has changed its state or an error has occurred. Discontinuities in the value of this counter can occur at re-initialization of the management system, and at other times as indicated by the value of ospfDiscontinuityTime.') ospfIfAuthKey = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 16), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 256)).clone(hexValue="0000000000000000")).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfIfAuthKey.setReference('OSPF Version 2, Section 9 The Interface Data Structure') if mibBuilder.loadTexts: ospfIfAuthKey.setStatus('current') if mibBuilder.loadTexts: ospfIfAuthKey.setDescription('The cleartext password used as an OSPF authentication key when simplePassword security is enabled. This object does not access any OSPF cryptogaphic (e.g., MD5) authentication key under any circumstance. If the key length is shorter than 8 octets, the agent will left adjust and zero fill to 8 octets. Unauthenticated interfaces need no authentication key, and simple password authentication cannot use a key of more than 8 octets. Note that the use of simplePassword authentication is NOT recommended when there is concern regarding attack upon the OSPF system. SimplePassword authentication is only sufficient to protect against accidental misconfigurations because it re-uses cleartext passwords [RFC1704]. When read, ospfIfAuthKey always returns an octet string of length zero.') ospfIfStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 17), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfIfStatus.setStatus('current') if mibBuilder.loadTexts: ospfIfStatus.setDescription('This object permits management of the table by facilitating actions such as row creation, construction, and destruction. The value of this object has no effect on whether other objects in this conceptual row can be modified.') ospfIfMulticastForwarding = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("blocked", 1), ("multicast", 2), ("unicast", 3))).clone('blocked')).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfIfMulticastForwarding.setStatus('current') if mibBuilder.loadTexts: ospfIfMulticastForwarding.setDescription('The way multicasts should be forwarded on this interface: not forwarded, forwarded as data link multicasts, or forwarded as data link unicasts. Data link multicasting is not meaningful on point-to-point and NBMA interfaces, and setting ospfMulticastForwarding to 0 effectively disables all multicast forwarding.') ospfIfDemand = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 19), TruthValue().clone('false')).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfIfDemand.setStatus('current') if mibBuilder.loadTexts: ospfIfDemand.setDescription('Indicates whether Demand OSPF procedures (hello suppression to FULL neighbors and setting the DoNotAge flag on propagated LSAs) should be performed on this interface.') ospfIfAuthType = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 20), OspfAuthenticationType().clone('none')).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfIfAuthType.setReference('OSPF Version 2, Appendix D Authentication') if mibBuilder.loadTexts: ospfIfAuthType.setStatus('current') if mibBuilder.loadTexts: ospfIfAuthType.setDescription('The authentication type specified for an interface. Note that this object can be used to engage in significant attacks against an OSPF router.') ospfIfLsaCount = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 21), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfIfLsaCount.setStatus('current') if mibBuilder.loadTexts: ospfIfLsaCount.setDescription("The total number of link-local link state advertisements in this interface's link-local link state database.") ospfIfLsaCksumSum = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 22), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfIfLsaCksumSum.setStatus('current') if mibBuilder.loadTexts: ospfIfLsaCksumSum.setDescription("The 32-bit unsigned sum of the Link State Advertisements' LS checksums contained in this interface's link-local link state database. The sum can be used to determine if there has been a change in the interface's link state database and to compare the interface link state database of routers attached to the same subnet.") ospfIfDesignatedRouterId = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 23), RouterID()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfIfDesignatedRouterId.setStatus('current') if mibBuilder.loadTexts: ospfIfDesignatedRouterId.setDescription('The Router ID of the designated router.') ospfIfBackupDesignatedRouterId = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 24), RouterID()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfIfBackupDesignatedRouterId.setStatus('current') if mibBuilder.loadTexts: ospfIfBackupDesignatedRouterId.setDescription('The Router ID of the backup designated router.') ospfIfMetricTable = MibTable((1, 3, 6, 1, 2, 1, 14, 8), ) if mibBuilder.loadTexts: ospfIfMetricTable.setReference('OSPF Version 2, Appendix C.3 Router interface parameters') if mibBuilder.loadTexts: ospfIfMetricTable.setStatus('current') if mibBuilder.loadTexts: ospfIfMetricTable.setDescription('The Metric Table describes the metrics to be advertised for a specified interface at the various types of service. As such, this table is an adjunct of the OSPF Interface Table. Types of service, as defined by RFC 791, have the ability to request low delay, high bandwidth, or reliable linkage. For the purposes of this specification, the measure of bandwidth: Metric = referenceBandwidth / ifSpeed is the default value. The default reference bandwidth is 10^8. For multiple link interfaces, note that ifSpeed is the sum of the individual link speeds. This yields a number having the following typical values: Network Type/bit rate Metric >= 100 MBPS 1 Ethernet/802.3 10 E1 48 T1 (ESF) 65 64 KBPS 1562 56 KBPS 1785 19.2 KBPS 5208 9.6 KBPS 10416 Routes that are not specified use the default (TOS 0) metric. Note that the default reference bandwidth can be configured using the general group object ospfReferenceBandwidth.') ospfIfMetricEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 8, 1), ).setIndexNames((0, "OSPF-MIB", "ospfIfMetricIpAddress"), (0, "OSPF-MIB", "ospfIfMetricAddressLessIf"), (0, "OSPF-MIB", "ospfIfMetricTOS")) if mibBuilder.loadTexts: ospfIfMetricEntry.setReference('OSPF Version 2, Appendix C.3 Router interface parameters') if mibBuilder.loadTexts: ospfIfMetricEntry.setStatus('current') if mibBuilder.loadTexts: ospfIfMetricEntry.setDescription('A particular TOS metric for a non-virtual interface identified by the interface index. Information in this table is persistent and when this object is written the entity SHOULD save the change to non-volatile storage.') ospfIfMetricIpAddress = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 8, 1, 1), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfIfMetricIpAddress.setStatus('current') if mibBuilder.loadTexts: ospfIfMetricIpAddress.setDescription('The IP address of this OSPF interface. On row creation, this can be derived from the instance.') ospfIfMetricAddressLessIf = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 8, 1, 2), InterfaceIndexOrZero()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfIfMetricAddressLessIf.setStatus('current') if mibBuilder.loadTexts: ospfIfMetricAddressLessIf.setDescription('For the purpose of easing the instancing of addressed and addressless interfaces; this variable takes the value 0 on interfaces with IP addresses and the value of ifIndex for interfaces having no IP address. On row creation, this can be derived from the instance.') ospfIfMetricTOS = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 8, 1, 3), TOSType()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfIfMetricTOS.setStatus('current') if mibBuilder.loadTexts: ospfIfMetricTOS.setDescription('The Type of Service metric being referenced. On row creation, this can be derived from the instance.') ospfIfMetricValue = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 8, 1, 4), Metric()).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfIfMetricValue.setStatus('current') if mibBuilder.loadTexts: ospfIfMetricValue.setDescription('The metric of using this Type of Service on this interface. The default value of the TOS 0 metric is 10^8 / ifSpeed.') ospfIfMetricStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 8, 1, 5), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfIfMetricStatus.setStatus('current') if mibBuilder.loadTexts: ospfIfMetricStatus.setDescription('This object permits management of the table by facilitating actions such as row creation, construction, and destruction. The value of this object has no effect on whether other objects in this conceptual row can be modified.') ospfVirtIfTable = MibTable((1, 3, 6, 1, 2, 1, 14, 9), ) if mibBuilder.loadTexts: ospfVirtIfTable.setReference('OSPF Version 2, Appendix C.4 Virtual link parameters') if mibBuilder.loadTexts: ospfVirtIfTable.setStatus('current') if mibBuilder.loadTexts: ospfVirtIfTable.setDescription("Information about this router's virtual interfaces that the OSPF Process is configured to carry on.") ospfVirtIfEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 9, 1), ).setIndexNames((0, "OSPF-MIB", "ospfVirtIfAreaId"), (0, "OSPF-MIB", "ospfVirtIfNeighbor")) if mibBuilder.loadTexts: ospfVirtIfEntry.setStatus('current') if mibBuilder.loadTexts: ospfVirtIfEntry.setDescription('Information about a single virtual interface. Information in this table is persistent and when this object is written the entity SHOULD save the change to non-volatile storage.') ospfVirtIfAreaId = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 9, 1, 1), AreaID()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfVirtIfAreaId.setStatus('current') if mibBuilder.loadTexts: ospfVirtIfAreaId.setDescription('The transit area that the virtual link traverses. By definition, this is not 0.0.0.0.') ospfVirtIfNeighbor = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 9, 1, 2), RouterID()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfVirtIfNeighbor.setStatus('current') if mibBuilder.loadTexts: ospfVirtIfNeighbor.setDescription('The Router ID of the virtual neighbor.') ospfVirtIfTransitDelay = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 9, 1, 3), UpToMaxAge().clone(1)).setUnits('seconds').setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfVirtIfTransitDelay.setStatus('current') if mibBuilder.loadTexts: ospfVirtIfTransitDelay.setDescription('The estimated number of seconds it takes to transmit a Link State update packet over this interface. Note that the minimal value SHOULD be 1 second.') ospfVirtIfRetransInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 9, 1, 4), UpToMaxAge().clone(5)).setUnits('seconds').setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfVirtIfRetransInterval.setStatus('current') if mibBuilder.loadTexts: ospfVirtIfRetransInterval.setDescription('The number of seconds between link state avertisement retransmissions, for adjacencies belonging to this interface. This value is also used when retransmitting database description and Link State request packets. This value should be well over the expected round-trip time. Note that the minimal value SHOULD be 1 second.') ospfVirtIfHelloInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 9, 1, 5), HelloRange().clone(10)).setUnits('seconds').setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfVirtIfHelloInterval.setStatus('current') if mibBuilder.loadTexts: ospfVirtIfHelloInterval.setDescription('The length of time, in seconds, between the Hello packets that the router sends on the interface. This value must be the same for the virtual neighbor.') ospfVirtIfRtrDeadInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 9, 1, 6), PositiveInteger().clone(60)).setUnits('seconds').setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfVirtIfRtrDeadInterval.setStatus('current') if mibBuilder.loadTexts: ospfVirtIfRtrDeadInterval.setDescription("The number of seconds that a router's Hello packets have not been seen before its neighbors declare the router down. This should be some multiple of the Hello interval. This value must be the same for the virtual neighbor.") ospfVirtIfState = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 9, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 4))).clone(namedValues=NamedValues(("down", 1), ("pointToPoint", 4))).clone('down')).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfVirtIfState.setStatus('current') if mibBuilder.loadTexts: ospfVirtIfState.setDescription('OSPF virtual interface states.') ospfVirtIfEvents = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 9, 1, 8), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfVirtIfEvents.setStatus('current') if mibBuilder.loadTexts: ospfVirtIfEvents.setDescription('The number of state changes or error events on this virtual link. Discontinuities in the value of this counter can occur at re-initialization of the management system, and at other times as indicated by the value of ospfDiscontinuityTime.') ospfVirtIfAuthKey = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 9, 1, 9), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 256)).clone(hexValue="0000000000000000")).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfVirtIfAuthKey.setReference('OSPF Version 2, Section 9 The Interface Data Structure') if mibBuilder.loadTexts: ospfVirtIfAuthKey.setStatus('current') if mibBuilder.loadTexts: ospfVirtIfAuthKey.setDescription('The cleartext password used as an OSPF authentication key when simplePassword security is enabled. This object does not access any OSPF cryptogaphic (e.g., MD5) authentication key under any circumstance. If the key length is shorter than 8 octets, the agent will left adjust and zero fill to 8 octets. Unauthenticated interfaces need no authentication key, and simple password authentication cannot use a key of more than 8 octets. Note that the use of simplePassword authentication is NOT recommended when there is concern regarding attack upon the OSPF system. SimplePassword authentication is only sufficient to protect against accidental misconfigurations because it re-uses cleartext passwords. [RFC1704] When read, ospfIfAuthKey always returns an octet string of length zero.') ospfVirtIfStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 9, 1, 10), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfVirtIfStatus.setStatus('current') if mibBuilder.loadTexts: ospfVirtIfStatus.setDescription('This object permits management of the table by facilitating actions such as row creation, construction, and destruction. The value of this object has no effect on whether other objects in this conceptual row can be modified.') ospfVirtIfAuthType = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 9, 1, 11), OspfAuthenticationType().clone('none')).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfVirtIfAuthType.setReference('OSPF Version 2, Appendix E Authentication') if mibBuilder.loadTexts: ospfVirtIfAuthType.setStatus('current') if mibBuilder.loadTexts: ospfVirtIfAuthType.setDescription('The authentication type specified for a virtual interface. Note that this object can be used to engage in significant attacks against an OSPF router.') ospfVirtIfLsaCount = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 9, 1, 12), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfVirtIfLsaCount.setStatus('current') if mibBuilder.loadTexts: ospfVirtIfLsaCount.setDescription("The total number of link-local link state advertisements in this virtual interface's link-local link state database.") ospfVirtIfLsaCksumSum = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 9, 1, 13), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfVirtIfLsaCksumSum.setStatus('current') if mibBuilder.loadTexts: ospfVirtIfLsaCksumSum.setDescription("The 32-bit unsigned sum of the link state advertisements' LS checksums contained in this virtual interface's link-local link state database. The sum can be used to determine if there has been a change in the virtual interface's link state database, and to compare the virtual interface link state database of the virtual neighbors.") ospfNbrTable = MibTable((1, 3, 6, 1, 2, 1, 14, 10), ) if mibBuilder.loadTexts: ospfNbrTable.setReference('OSPF Version 2, Section 10 The Neighbor Data Structure') if mibBuilder.loadTexts: ospfNbrTable.setStatus('current') if mibBuilder.loadTexts: ospfNbrTable.setDescription('A table describing all non-virtual neighbors in the locality of the OSPF router.') ospfNbrEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 10, 1), ).setIndexNames((0, "OSPF-MIB", "ospfNbrIpAddr"), (0, "OSPF-MIB", "ospfNbrAddressLessIndex")) if mibBuilder.loadTexts: ospfNbrEntry.setReference('OSPF Version 2, Section 10 The Neighbor Data Structure') if mibBuilder.loadTexts: ospfNbrEntry.setStatus('current') if mibBuilder.loadTexts: ospfNbrEntry.setDescription('The information regarding a single neighbor. Information in this table is persistent and when this object is written the entity SHOULD save the change to non-volatile storage.') ospfNbrIpAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 10, 1, 1), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfNbrIpAddr.setStatus('current') if mibBuilder.loadTexts: ospfNbrIpAddr.setDescription("The IP address this neighbor is using in its IP source address. Note that, on addressless links, this will not be 0.0.0.0 but the address of another of the neighbor's interfaces.") ospfNbrAddressLessIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 10, 1, 2), InterfaceIndexOrZero()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfNbrAddressLessIndex.setStatus('current') if mibBuilder.loadTexts: ospfNbrAddressLessIndex.setDescription('On an interface having an IP address, zero. On addressless interfaces, the corresponding value of ifIndex in the Internet Standard MIB. On row creation, this can be derived from the instance.') ospfNbrRtrId = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 10, 1, 3), RouterID().clone(hexValue="00000000")).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfNbrRtrId.setStatus('current') if mibBuilder.loadTexts: ospfNbrRtrId.setDescription('A 32-bit integer (represented as a type IpAddress) uniquely identifying the neighboring router in the Autonomous System.') ospfNbrOptions = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 10, 1, 4), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfNbrOptions.setReference('OSPF Version 2, Section 12.1.2 Options') if mibBuilder.loadTexts: ospfNbrOptions.setStatus('current') if mibBuilder.loadTexts: ospfNbrOptions.setDescription("A bit mask corresponding to the neighbor's options field. Bit 0, if set, indicates that the system will operate on Type of Service metrics other than TOS 0. If zero, the neighbor will ignore all metrics except the TOS 0 metric. Bit 1, if set, indicates that the associated area accepts and operates on external information; if zero, it is a stub area. Bit 2, if set, indicates that the system is capable of routing IP multicast datagrams, that is that it implements the multicast extensions to OSPF. Bit 3, if set, indicates that the associated area is an NSSA. These areas are capable of carrying type-7 external advertisements, which are translated into type-5 external advertisements at NSSA borders.") ospfNbrPriority = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 10, 1, 5), DesignatedRouterPriority().clone(1)).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfNbrPriority.setStatus('current') if mibBuilder.loadTexts: ospfNbrPriority.setDescription('The priority of this neighbor in the designated router election algorithm. The value 0 signifies that the neighbor is not eligible to become the designated router on this particular network.') ospfNbrState = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 10, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("down", 1), ("attempt", 2), ("init", 3), ("twoWay", 4), ("exchangeStart", 5), ("exchange", 6), ("loading", 7), ("full", 8))).clone('down')).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfNbrState.setReference('OSPF Version 2, Section 10.1 Neighbor States') if mibBuilder.loadTexts: ospfNbrState.setStatus('current') if mibBuilder.loadTexts: ospfNbrState.setDescription('The state of the relationship with this neighbor.') ospfNbrEvents = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 10, 1, 7), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfNbrEvents.setStatus('current') if mibBuilder.loadTexts: ospfNbrEvents.setDescription('The number of times this neighbor relationship has changed state or an error has occurred. Discontinuities in the value of this counter can occur at re-initialization of the management system, and at other times as indicated by the value of ospfDiscontinuityTime.') ospfNbrLsRetransQLen = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 10, 1, 8), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfNbrLsRetransQLen.setStatus('current') if mibBuilder.loadTexts: ospfNbrLsRetransQLen.setDescription('The current length of the retransmission queue.') ospfNbmaNbrStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 10, 1, 9), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfNbmaNbrStatus.setStatus('current') if mibBuilder.loadTexts: ospfNbmaNbrStatus.setDescription('This object permits management of the table by facilitating actions such as row creation, construction, and destruction. The value of this object has no effect on whether other objects in this conceptual row can be modified.') ospfNbmaNbrPermanence = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 10, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("dynamic", 1), ("permanent", 2))).clone('permanent')).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfNbmaNbrPermanence.setStatus('current') if mibBuilder.loadTexts: ospfNbmaNbrPermanence.setDescription("This variable displays the status of the entry; 'dynamic' and 'permanent' refer to how the neighbor became known.") ospfNbrHelloSuppressed = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 10, 1, 11), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfNbrHelloSuppressed.setStatus('current') if mibBuilder.loadTexts: ospfNbrHelloSuppressed.setDescription('Indicates whether Hellos are being suppressed to the neighbor.') ospfNbrRestartHelperStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 10, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("notHelping", 1), ("helping", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfNbrRestartHelperStatus.setStatus('current') if mibBuilder.loadTexts: ospfNbrRestartHelperStatus.setDescription('Indicates whether the router is acting as a graceful restart helper for the neighbor.') ospfNbrRestartHelperAge = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 10, 1, 13), Unsigned32()).setUnits('seconds').setMaxAccess("readonly") if mibBuilder.loadTexts: ospfNbrRestartHelperAge.setStatus('current') if mibBuilder.loadTexts: ospfNbrRestartHelperAge.setDescription('Remaining time in current OSPF graceful restart interval, if the router is acting as a restart helper for the neighbor.') ospfNbrRestartHelperExitReason = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 10, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("none", 1), ("inProgress", 2), ("completed", 3), ("timedOut", 4), ("topologyChanged", 5)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfNbrRestartHelperExitReason.setStatus('current') if mibBuilder.loadTexts: ospfNbrRestartHelperExitReason.setDescription('Describes the outcome of the last attempt at acting as a graceful restart helper for the neighbor.') ospfVirtNbrTable = MibTable((1, 3, 6, 1, 2, 1, 14, 11), ) if mibBuilder.loadTexts: ospfVirtNbrTable.setReference('OSPF Version 2, Section 15 Virtual Links') if mibBuilder.loadTexts: ospfVirtNbrTable.setStatus('current') if mibBuilder.loadTexts: ospfVirtNbrTable.setDescription('This table describes all virtual neighbors. Since virtual links are configured in the Virtual Interface Table, this table is read-only.') ospfVirtNbrEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 11, 1), ).setIndexNames((0, "OSPF-MIB", "ospfVirtNbrArea"), (0, "OSPF-MIB", "ospfVirtNbrRtrId")) if mibBuilder.loadTexts: ospfVirtNbrEntry.setStatus('current') if mibBuilder.loadTexts: ospfVirtNbrEntry.setDescription('Virtual neighbor information.') ospfVirtNbrArea = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 11, 1, 1), AreaID()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfVirtNbrArea.setStatus('current') if mibBuilder.loadTexts: ospfVirtNbrArea.setDescription('The Transit Area Identifier.') ospfVirtNbrRtrId = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 11, 1, 2), RouterID()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfVirtNbrRtrId.setStatus('current') if mibBuilder.loadTexts: ospfVirtNbrRtrId.setDescription('A 32-bit integer uniquely identifying the neighboring router in the Autonomous System.') ospfVirtNbrIpAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 11, 1, 3), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfVirtNbrIpAddr.setStatus('current') if mibBuilder.loadTexts: ospfVirtNbrIpAddr.setDescription('The IP address this virtual neighbor is using.') ospfVirtNbrOptions = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 11, 1, 4), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfVirtNbrOptions.setStatus('current') if mibBuilder.loadTexts: ospfVirtNbrOptions.setDescription("A bit mask corresponding to the neighbor's options field. Bit 1, if set, indicates that the system will operate on Type of Service metrics other than TOS 0. If zero, the neighbor will ignore all metrics except the TOS 0 metric. Bit 2, if set, indicates that the system is network multicast capable, i.e., that it implements OSPF multicast routing.") ospfVirtNbrState = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 11, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("down", 1), ("attempt", 2), ("init", 3), ("twoWay", 4), ("exchangeStart", 5), ("exchange", 6), ("loading", 7), ("full", 8)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfVirtNbrState.setStatus('current') if mibBuilder.loadTexts: ospfVirtNbrState.setDescription('The state of the virtual neighbor relationship.') ospfVirtNbrEvents = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 11, 1, 6), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfVirtNbrEvents.setStatus('current') if mibBuilder.loadTexts: ospfVirtNbrEvents.setDescription('The number of times this virtual link has changed its state or an error has occurred. Discontinuities in the value of this counter can occur at re-initialization of the management system, and at other times as indicated by the value of ospfDiscontinuityTime.') ospfVirtNbrLsRetransQLen = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 11, 1, 7), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfVirtNbrLsRetransQLen.setStatus('current') if mibBuilder.loadTexts: ospfVirtNbrLsRetransQLen.setDescription('The current length of the retransmission queue.') ospfVirtNbrHelloSuppressed = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 11, 1, 8), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfVirtNbrHelloSuppressed.setStatus('current') if mibBuilder.loadTexts: ospfVirtNbrHelloSuppressed.setDescription('Indicates whether Hellos are being suppressed to the neighbor.') ospfVirtNbrRestartHelperStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 11, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("notHelping", 1), ("helping", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfVirtNbrRestartHelperStatus.setStatus('current') if mibBuilder.loadTexts: ospfVirtNbrRestartHelperStatus.setDescription('Indicates whether the router is acting as a graceful restart helper for the neighbor.') ospfVirtNbrRestartHelperAge = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 11, 1, 10), Unsigned32()).setUnits('seconds').setMaxAccess("readonly") if mibBuilder.loadTexts: ospfVirtNbrRestartHelperAge.setStatus('current') if mibBuilder.loadTexts: ospfVirtNbrRestartHelperAge.setDescription('Remaining time in current OSPF graceful restart interval, if the router is acting as a restart helper for the neighbor.') ospfVirtNbrRestartHelperExitReason = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 11, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("none", 1), ("inProgress", 2), ("completed", 3), ("timedOut", 4), ("topologyChanged", 5)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfVirtNbrRestartHelperExitReason.setStatus('current') if mibBuilder.loadTexts: ospfVirtNbrRestartHelperExitReason.setDescription('Describes the outcome of the last attempt at acting as a graceful restart helper for the neighbor.') ospfExtLsdbTable = MibTable((1, 3, 6, 1, 2, 1, 14, 12), ) if mibBuilder.loadTexts: ospfExtLsdbTable.setReference('OSPF Version 2, Section 12 Link State Advertisements') if mibBuilder.loadTexts: ospfExtLsdbTable.setStatus('deprecated') if mibBuilder.loadTexts: ospfExtLsdbTable.setDescription("The OSPF Process's external LSA link state database. This table is identical to the OSPF LSDB Table in format, but contains only external link state advertisements. The purpose is to allow external LSAs to be displayed once for the router rather than once in each non-stub area. Note that external LSAs are also in the AS-scope link state database.") ospfExtLsdbEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 12, 1), ).setIndexNames((0, "OSPF-MIB", "ospfExtLsdbType"), (0, "OSPF-MIB", "ospfExtLsdbLsid"), (0, "OSPF-MIB", "ospfExtLsdbRouterId")) if mibBuilder.loadTexts: ospfExtLsdbEntry.setStatus('deprecated') if mibBuilder.loadTexts: ospfExtLsdbEntry.setDescription('A single link state advertisement.') ospfExtLsdbType = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 12, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(5))).clone(namedValues=NamedValues(("asExternalLink", 5)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfExtLsdbType.setReference('OSPF Version 2, Appendix A.4.1 The Link State Advertisement header') if mibBuilder.loadTexts: ospfExtLsdbType.setStatus('deprecated') if mibBuilder.loadTexts: ospfExtLsdbType.setDescription('The type of the link state advertisement. Each link state type has a separate advertisement format.') ospfExtLsdbLsid = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 12, 1, 2), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfExtLsdbLsid.setReference('OSPF Version 2, Section 12.1.4 Link State ID') if mibBuilder.loadTexts: ospfExtLsdbLsid.setStatus('deprecated') if mibBuilder.loadTexts: ospfExtLsdbLsid.setDescription('The Link State ID is an LS Type Specific field containing either a Router ID or an IP address; it identifies the piece of the routing domain that is being described by the advertisement.') ospfExtLsdbRouterId = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 12, 1, 3), RouterID()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfExtLsdbRouterId.setReference('OSPF Version 2, Appendix C.1 Global parameters') if mibBuilder.loadTexts: ospfExtLsdbRouterId.setStatus('deprecated') if mibBuilder.loadTexts: ospfExtLsdbRouterId.setDescription('The 32-bit number that uniquely identifies the originating router in the Autonomous System.') ospfExtLsdbSequence = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 12, 1, 4), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfExtLsdbSequence.setReference('OSPF Version 2, Section 12.1.6 LS sequence number') if mibBuilder.loadTexts: ospfExtLsdbSequence.setStatus('deprecated') if mibBuilder.loadTexts: ospfExtLsdbSequence.setDescription("The sequence number field is a signed 32-bit integer. It starts with the value '80000001'h, or -'7FFFFFFF'h, and increments until '7FFFFFFF'h. Thus, a typical sequence number will be very negative. It is used to detect old and duplicate link state advertisements. The space of sequence numbers is linearly ordered. The larger the sequence number, the more recent the advertisement.") ospfExtLsdbAge = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 12, 1, 5), Integer32()).setUnits('seconds').setMaxAccess("readonly") if mibBuilder.loadTexts: ospfExtLsdbAge.setReference('OSPF Version 2, Section 12.1.1 LS age') if mibBuilder.loadTexts: ospfExtLsdbAge.setStatus('deprecated') if mibBuilder.loadTexts: ospfExtLsdbAge.setDescription('This field is the age of the link state advertisement in seconds.') ospfExtLsdbChecksum = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 12, 1, 6), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfExtLsdbChecksum.setReference('OSPF Version 2, Section 12.1.7 LS checksum') if mibBuilder.loadTexts: ospfExtLsdbChecksum.setStatus('deprecated') if mibBuilder.loadTexts: ospfExtLsdbChecksum.setDescription("This field is the checksum of the complete contents of the advertisement, excepting the age field. The age field is excepted so that an advertisement's age can be incremented without updating the checksum. The checksum used is the same that is used for ISO connectionless datagrams; it is commonly referred to as the Fletcher checksum.") ospfExtLsdbAdvertisement = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 12, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(36, 36)).setFixedLength(36)).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfExtLsdbAdvertisement.setReference('OSPF Version 2, Section 12 Link State Advertisements') if mibBuilder.loadTexts: ospfExtLsdbAdvertisement.setStatus('deprecated') if mibBuilder.loadTexts: ospfExtLsdbAdvertisement.setDescription('The entire link state advertisement, including its header.') ospfRouteGroup = MibIdentifier((1, 3, 6, 1, 2, 1, 14, 13)) ospfIntraArea = MibIdentifier((1, 3, 6, 1, 2, 1, 14, 13, 1)) ospfInterArea = MibIdentifier((1, 3, 6, 1, 2, 1, 14, 13, 2)) ospfExternalType1 = MibIdentifier((1, 3, 6, 1, 2, 1, 14, 13, 3)) ospfExternalType2 = MibIdentifier((1, 3, 6, 1, 2, 1, 14, 13, 4)) ospfAreaAggregateTable = MibTable((1, 3, 6, 1, 2, 1, 14, 14), ) if mibBuilder.loadTexts: ospfAreaAggregateTable.setReference('OSPF Version 2, Appendix C.2 Area parameters') if mibBuilder.loadTexts: ospfAreaAggregateTable.setStatus('current') if mibBuilder.loadTexts: ospfAreaAggregateTable.setDescription("The Area Aggregate Table acts as an adjunct to the Area Table. It describes those address aggregates that are configured to be propagated from an area. Its purpose is to reduce the amount of information that is known beyond an Area's borders. It contains a set of IP address ranges specified by an IP address/IP network mask pair. For example, a class B address range of X.X.X.X with a network mask of 255.255.0.0 includes all IP addresses from X.X.0.0 to X.X.255.255. Note that if ranges are configured such that one range subsumes another range (e.g., 10.0.0.0 mask 255.0.0.0 and 10.1.0.0 mask 255.255.0.0), the most specific match is the preferred one.") ospfAreaAggregateEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 14, 1), ).setIndexNames((0, "OSPF-MIB", "ospfAreaAggregateAreaID"), (0, "OSPF-MIB", "ospfAreaAggregateLsdbType"), (0, "OSPF-MIB", "ospfAreaAggregateNet"), (0, "OSPF-MIB", "ospfAreaAggregateMask")) if mibBuilder.loadTexts: ospfAreaAggregateEntry.setReference('OSPF Version 2, Appendix C.2 Area parameters') if mibBuilder.loadTexts: ospfAreaAggregateEntry.setStatus('current') if mibBuilder.loadTexts: ospfAreaAggregateEntry.setDescription('A single area aggregate entry. Information in this table is persistent and when this object is written the entity SHOULD save the change to non-volatile storage.') ospfAreaAggregateAreaID = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 14, 1, 1), AreaID()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfAreaAggregateAreaID.setReference('OSPF Version 2, Appendix C.2 Area parameters') if mibBuilder.loadTexts: ospfAreaAggregateAreaID.setStatus('current') if mibBuilder.loadTexts: ospfAreaAggregateAreaID.setDescription('The area within which the address aggregate is to be found.') ospfAreaAggregateLsdbType = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 14, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(3, 7))).clone(namedValues=NamedValues(("summaryLink", 3), ("nssaExternalLink", 7)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfAreaAggregateLsdbType.setReference('OSPF Version 2, Appendix A.4.1 The Link State Advertisement header') if mibBuilder.loadTexts: ospfAreaAggregateLsdbType.setStatus('current') if mibBuilder.loadTexts: ospfAreaAggregateLsdbType.setDescription('The type of the address aggregate. This field specifies the Lsdb type that this address aggregate applies to.') ospfAreaAggregateNet = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 14, 1, 3), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfAreaAggregateNet.setReference('OSPF Version 2, Appendix C.2 Area parameters') if mibBuilder.loadTexts: ospfAreaAggregateNet.setStatus('current') if mibBuilder.loadTexts: ospfAreaAggregateNet.setDescription('The IP address of the net or subnet indicated by the range.') ospfAreaAggregateMask = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 14, 1, 4), IpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfAreaAggregateMask.setReference('OSPF Version 2, Appendix C.2 Area parameters') if mibBuilder.loadTexts: ospfAreaAggregateMask.setStatus('current') if mibBuilder.loadTexts: ospfAreaAggregateMask.setDescription('The subnet mask that pertains to the net or subnet.') ospfAreaAggregateStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 14, 1, 5), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfAreaAggregateStatus.setStatus('current') if mibBuilder.loadTexts: ospfAreaAggregateStatus.setDescription('This object permits management of the table by facilitating actions such as row creation, construction, and destruction. The value of this object has no effect on whether other objects in this conceptual row can be modified.') ospfAreaAggregateEffect = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 14, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("advertiseMatching", 1), ("doNotAdvertiseMatching", 2))).clone('advertiseMatching')).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfAreaAggregateEffect.setStatus('current') if mibBuilder.loadTexts: ospfAreaAggregateEffect.setDescription("Subnets subsumed by ranges either trigger the advertisement of the indicated aggregate (advertiseMatching) or result in the subnet's not being advertised at all outside the area.") ospfAreaAggregateExtRouteTag = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 14, 1, 7), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: ospfAreaAggregateExtRouteTag.setStatus('current') if mibBuilder.loadTexts: ospfAreaAggregateExtRouteTag.setDescription('External route tag to be included in NSSA (type-7) LSAs.') ospfLocalLsdbTable = MibTable((1, 3, 6, 1, 2, 1, 14, 17), ) if mibBuilder.loadTexts: ospfLocalLsdbTable.setReference('OSPF Version 2, Section 12 Link State Advertisements and The OSPF Opaque LSA Option') if mibBuilder.loadTexts: ospfLocalLsdbTable.setStatus('current') if mibBuilder.loadTexts: ospfLocalLsdbTable.setDescription("The OSPF Process's link-local link state database for non-virtual links. This table is identical to the OSPF LSDB Table in format, but contains only link-local Link State Advertisements for non-virtual links. The purpose is to allow link-local LSAs to be displayed for each non-virtual interface. This table is implemented to support type-9 LSAs that are defined in 'The OSPF Opaque LSA Option'.") ospfLocalLsdbEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 17, 1), ).setIndexNames((0, "OSPF-MIB", "ospfLocalLsdbIpAddress"), (0, "OSPF-MIB", "ospfLocalLsdbAddressLessIf"), (0, "OSPF-MIB", "ospfLocalLsdbType"), (0, "OSPF-MIB", "ospfLocalLsdbLsid"), (0, "OSPF-MIB", "ospfLocalLsdbRouterId")) if mibBuilder.loadTexts: ospfLocalLsdbEntry.setStatus('current') if mibBuilder.loadTexts: ospfLocalLsdbEntry.setDescription('A single link state advertisement.') ospfLocalLsdbIpAddress = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 17, 1, 1), IpAddress()) if mibBuilder.loadTexts: ospfLocalLsdbIpAddress.setReference('OSPF Version 2, Appendix C.3 Interface parameters') if mibBuilder.loadTexts: ospfLocalLsdbIpAddress.setStatus('current') if mibBuilder.loadTexts: ospfLocalLsdbIpAddress.setDescription('The IP address of the interface from which the LSA was received if the interface is numbered.') ospfLocalLsdbAddressLessIf = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 17, 1, 2), InterfaceIndexOrZero()) if mibBuilder.loadTexts: ospfLocalLsdbAddressLessIf.setReference('OSPF Version 2, Appendix C.3 Interface parameters') if mibBuilder.loadTexts: ospfLocalLsdbAddressLessIf.setStatus('current') if mibBuilder.loadTexts: ospfLocalLsdbAddressLessIf.setDescription('The interface index of the interface from which the LSA was received if the interface is unnumbered.') ospfLocalLsdbType = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 17, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(9))).clone(namedValues=NamedValues(("localOpaqueLink", 9)))) if mibBuilder.loadTexts: ospfLocalLsdbType.setReference('OSPF Version 2, Appendix A.4.1 The Link State Advertisement header') if mibBuilder.loadTexts: ospfLocalLsdbType.setStatus('current') if mibBuilder.loadTexts: ospfLocalLsdbType.setDescription('The type of the link state advertisement. Each link state type has a separate advertisement format.') ospfLocalLsdbLsid = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 17, 1, 4), IpAddress()) if mibBuilder.loadTexts: ospfLocalLsdbLsid.setReference('OSPF Version 2, Section 12.1.4 Link State ID') if mibBuilder.loadTexts: ospfLocalLsdbLsid.setStatus('current') if mibBuilder.loadTexts: ospfLocalLsdbLsid.setDescription('The Link State ID is an LS Type Specific field containing a 32-bit identifier in IP address format; it identifies the piece of the routing domain that is being described by the advertisement.') ospfLocalLsdbRouterId = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 17, 1, 5), RouterID()) if mibBuilder.loadTexts: ospfLocalLsdbRouterId.setReference('OSPF Version 2, Appendix C.1 Global parameters') if mibBuilder.loadTexts: ospfLocalLsdbRouterId.setStatus('current') if mibBuilder.loadTexts: ospfLocalLsdbRouterId.setDescription('The 32-bit number that uniquely identifies the originating router in the Autonomous System.') ospfLocalLsdbSequence = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 17, 1, 6), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfLocalLsdbSequence.setReference('OSPF Version 2, Section 12.1.6 LS sequence number') if mibBuilder.loadTexts: ospfLocalLsdbSequence.setStatus('current') if mibBuilder.loadTexts: ospfLocalLsdbSequence.setDescription("The sequence number field is a signed 32-bit integer. It starts with the value '80000001'h, or -'7FFFFFFF'h, and increments until '7FFFFFFF'h. Thus, a typical sequence number will be very negative. It is used to detect old and duplicate link state advertisements. The space of sequence numbers is linearly ordered. The larger the sequence number, the more recent the advertisement.") ospfLocalLsdbAge = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 17, 1, 7), Integer32()).setUnits('seconds').setMaxAccess("readonly") if mibBuilder.loadTexts: ospfLocalLsdbAge.setReference('OSPF Version 2, Section 12.1.1 LS age') if mibBuilder.loadTexts: ospfLocalLsdbAge.setStatus('current') if mibBuilder.loadTexts: ospfLocalLsdbAge.setDescription('This field is the age of the link state advertisement in seconds.') ospfLocalLsdbChecksum = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 17, 1, 8), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfLocalLsdbChecksum.setReference('OSPF Version 2, Section 12.1.7 LS checksum') if mibBuilder.loadTexts: ospfLocalLsdbChecksum.setStatus('current') if mibBuilder.loadTexts: ospfLocalLsdbChecksum.setDescription("This field is the checksum of the complete contents of the advertisement, excepting the age field. The age field is excepted so that an advertisement's age can be incremented without updating the checksum. The checksum used is the same that is used for ISO connectionless datagrams; it is commonly referred to as the Fletcher checksum.") ospfLocalLsdbAdvertisement = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 17, 1, 9), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 65535))).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfLocalLsdbAdvertisement.setReference('OSPF Version 2, Section 12 Link State Advertisements') if mibBuilder.loadTexts: ospfLocalLsdbAdvertisement.setStatus('current') if mibBuilder.loadTexts: ospfLocalLsdbAdvertisement.setDescription('The entire link state advertisement, including its header. Note that for variable length LSAs, SNMP agents may not be able to return the largest string size.') ospfVirtLocalLsdbTable = MibTable((1, 3, 6, 1, 2, 1, 14, 18), ) if mibBuilder.loadTexts: ospfVirtLocalLsdbTable.setReference('OSPF Version 2, Section 12 Link State Advertisements and The OSPF Opaque LSA Option') if mibBuilder.loadTexts: ospfVirtLocalLsdbTable.setStatus('current') if mibBuilder.loadTexts: ospfVirtLocalLsdbTable.setDescription("The OSPF Process's link-local link state database for virtual links. This table is identical to the OSPF LSDB Table in format, but contains only link-local Link State Advertisements for virtual links. The purpose is to allow link-local LSAs to be displayed for each virtual interface. This table is implemented to support type-9 LSAs that are defined in 'The OSPF Opaque LSA Option'.") ospfVirtLocalLsdbEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 18, 1), ).setIndexNames((0, "OSPF-MIB", "ospfVirtLocalLsdbTransitArea"), (0, "OSPF-MIB", "ospfVirtLocalLsdbNeighbor"), (0, "OSPF-MIB", "ospfVirtLocalLsdbType"), (0, "OSPF-MIB", "ospfVirtLocalLsdbLsid"), (0, "OSPF-MIB", "ospfVirtLocalLsdbRouterId")) if mibBuilder.loadTexts: ospfVirtLocalLsdbEntry.setStatus('current') if mibBuilder.loadTexts: ospfVirtLocalLsdbEntry.setDescription('A single link state advertisement.') ospfVirtLocalLsdbTransitArea = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 18, 1, 1), AreaID()) if mibBuilder.loadTexts: ospfVirtLocalLsdbTransitArea.setReference('OSPF Version 2, Appendix C.3 Interface parameters') if mibBuilder.loadTexts: ospfVirtLocalLsdbTransitArea.setStatus('current') if mibBuilder.loadTexts: ospfVirtLocalLsdbTransitArea.setDescription('The transit area that the virtual link traverses. By definition, this is not 0.0.0.0.') ospfVirtLocalLsdbNeighbor = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 18, 1, 2), RouterID()) if mibBuilder.loadTexts: ospfVirtLocalLsdbNeighbor.setReference('OSPF Version 2, Appendix C.3 Interface parameters') if mibBuilder.loadTexts: ospfVirtLocalLsdbNeighbor.setStatus('current') if mibBuilder.loadTexts: ospfVirtLocalLsdbNeighbor.setDescription('The Router ID of the virtual neighbor.') ospfVirtLocalLsdbType = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 18, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(9))).clone(namedValues=NamedValues(("localOpaqueLink", 9)))) if mibBuilder.loadTexts: ospfVirtLocalLsdbType.setReference('OSPF Version 2, Appendix A.4.1 The Link State Advertisement header') if mibBuilder.loadTexts: ospfVirtLocalLsdbType.setStatus('current') if mibBuilder.loadTexts: ospfVirtLocalLsdbType.setDescription('The type of the link state advertisement. Each link state type has a separate advertisement format.') ospfVirtLocalLsdbLsid = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 18, 1, 4), IpAddress()) if mibBuilder.loadTexts: ospfVirtLocalLsdbLsid.setReference('OSPF Version 2, Section 12.1.4 Link State ID') if mibBuilder.loadTexts: ospfVirtLocalLsdbLsid.setStatus('current') if mibBuilder.loadTexts: ospfVirtLocalLsdbLsid.setDescription('The Link State ID is an LS Type Specific field containing a 32-bit identifier in IP address format; it identifies the piece of the routing domain that is being described by the advertisement.') ospfVirtLocalLsdbRouterId = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 18, 1, 5), RouterID()) if mibBuilder.loadTexts: ospfVirtLocalLsdbRouterId.setReference('OSPF Version 2, Appendix C.1 Global parameters') if mibBuilder.loadTexts: ospfVirtLocalLsdbRouterId.setStatus('current') if mibBuilder.loadTexts: ospfVirtLocalLsdbRouterId.setDescription('The 32-bit number that uniquely identifies the originating router in the Autonomous System.') ospfVirtLocalLsdbSequence = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 18, 1, 6), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfVirtLocalLsdbSequence.setReference('OSPF Version 2, Section 12.1.6 LS sequence number') if mibBuilder.loadTexts: ospfVirtLocalLsdbSequence.setStatus('current') if mibBuilder.loadTexts: ospfVirtLocalLsdbSequence.setDescription("The sequence number field is a signed 32-bit integer. It starts with the value '80000001'h, or -'7FFFFFFF'h, and increments until '7FFFFFFF'h. Thus, a typical sequence number will be very negative. It is used to detect old and duplicate link state advertisements. The space of sequence numbers is linearly ordered. The larger the sequence number, the more recent the advertisement.") ospfVirtLocalLsdbAge = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 18, 1, 7), Integer32()).setUnits('seconds').setMaxAccess("readonly") if mibBuilder.loadTexts: ospfVirtLocalLsdbAge.setReference('OSPF Version 2, Section 12.1.1 LS age') if mibBuilder.loadTexts: ospfVirtLocalLsdbAge.setStatus('current') if mibBuilder.loadTexts: ospfVirtLocalLsdbAge.setDescription('This field is the age of the link state advertisement in seconds.') ospfVirtLocalLsdbChecksum = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 18, 1, 8), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfVirtLocalLsdbChecksum.setReference('OSPF Version 2, Section 12.1.7 LS checksum') if mibBuilder.loadTexts: ospfVirtLocalLsdbChecksum.setStatus('current') if mibBuilder.loadTexts: ospfVirtLocalLsdbChecksum.setDescription("This field is the checksum of the complete contents of the advertisement, excepting the age field. The age field is excepted so that an advertisement's age can be incremented without updating the checksum. The checksum used is the same that is used for ISO connectionless datagrams; it is commonly referred to as the Fletcher checksum.") ospfVirtLocalLsdbAdvertisement = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 18, 1, 9), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 65535))).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfVirtLocalLsdbAdvertisement.setReference('OSPF Version 2, Section 12 Link State Advertisements. Note that for variable length LSAs, SNMP agents may not be able to return the largest string size.') if mibBuilder.loadTexts: ospfVirtLocalLsdbAdvertisement.setStatus('current') if mibBuilder.loadTexts: ospfVirtLocalLsdbAdvertisement.setDescription('The entire link state advertisement, including its header.') ospfAsLsdbTable = MibTable((1, 3, 6, 1, 2, 1, 14, 19), ) if mibBuilder.loadTexts: ospfAsLsdbTable.setReference('OSPF Version 2, Section 12 Link State Advertisements') if mibBuilder.loadTexts: ospfAsLsdbTable.setStatus('current') if mibBuilder.loadTexts: ospfAsLsdbTable.setDescription("The OSPF Process's AS-scope LSA link state database. The database contains the AS-scope Link State Advertisements from throughout the areas that the device is attached to. This table is identical to the OSPF LSDB Table in format, but contains only AS-scope Link State Advertisements. The purpose is to allow AS-scope LSAs to be displayed once for the router rather than once in each non-stub area.") ospfAsLsdbEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 19, 1), ).setIndexNames((0, "OSPF-MIB", "ospfAsLsdbType"), (0, "OSPF-MIB", "ospfAsLsdbLsid"), (0, "OSPF-MIB", "ospfAsLsdbRouterId")) if mibBuilder.loadTexts: ospfAsLsdbEntry.setStatus('current') if mibBuilder.loadTexts: ospfAsLsdbEntry.setDescription('A single link state advertisement.') ospfAsLsdbType = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 19, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(5, 11))).clone(namedValues=NamedValues(("asExternalLink", 5), ("asOpaqueLink", 11)))) if mibBuilder.loadTexts: ospfAsLsdbType.setReference('OSPF Version 2, Appendix A.4.1 The Link State Advertisement header') if mibBuilder.loadTexts: ospfAsLsdbType.setStatus('current') if mibBuilder.loadTexts: ospfAsLsdbType.setDescription('The type of the link state advertisement. Each link state type has a separate advertisement format.') ospfAsLsdbLsid = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 19, 1, 2), IpAddress()) if mibBuilder.loadTexts: ospfAsLsdbLsid.setReference('OSPF Version 2, Section 12.1.4 Link State ID') if mibBuilder.loadTexts: ospfAsLsdbLsid.setStatus('current') if mibBuilder.loadTexts: ospfAsLsdbLsid.setDescription('The Link State ID is an LS Type Specific field containing either a Router ID or an IP address; it identifies the piece of the routing domain that is being described by the advertisement.') ospfAsLsdbRouterId = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 19, 1, 3), RouterID()) if mibBuilder.loadTexts: ospfAsLsdbRouterId.setReference('OSPF Version 2, Appendix C.1 Global parameters') if mibBuilder.loadTexts: ospfAsLsdbRouterId.setStatus('current') if mibBuilder.loadTexts: ospfAsLsdbRouterId.setDescription('The 32-bit number that uniquely identifies the originating router in the Autonomous System.') ospfAsLsdbSequence = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 19, 1, 4), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfAsLsdbSequence.setReference('OSPF Version 2, Section 12.1.6 LS sequence number') if mibBuilder.loadTexts: ospfAsLsdbSequence.setStatus('current') if mibBuilder.loadTexts: ospfAsLsdbSequence.setDescription("The sequence number field is a signed 32-bit integer. It starts with the value '80000001'h, or -'7FFFFFFF'h, and increments until '7FFFFFFF'h. Thus, a typical sequence number will be very negative. It is used to detect old and duplicate link state advertisements. The space of sequence numbers is linearly ordered. The larger the sequence number, the more recent the advertisement.") ospfAsLsdbAge = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 19, 1, 5), Integer32()).setUnits('seconds').setMaxAccess("readonly") if mibBuilder.loadTexts: ospfAsLsdbAge.setReference('OSPF Version 2, Section 12.1.1 LS age') if mibBuilder.loadTexts: ospfAsLsdbAge.setStatus('current') if mibBuilder.loadTexts: ospfAsLsdbAge.setDescription('This field is the age of the link state advertisement in seconds.') ospfAsLsdbChecksum = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 19, 1, 6), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfAsLsdbChecksum.setReference('OSPF Version 2, Section 12.1.7 LS checksum') if mibBuilder.loadTexts: ospfAsLsdbChecksum.setStatus('current') if mibBuilder.loadTexts: ospfAsLsdbChecksum.setDescription("This field is the checksum of the complete contents of the advertisement, excepting the age field. The age field is excepted so that an advertisement's age can be incremented without updating the checksum. The checksum used is the same that is used for ISO connectionless datagrams; it is commonly referred to as the Fletcher checksum.") ospfAsLsdbAdvertisement = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 19, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 65535))).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfAsLsdbAdvertisement.setReference('OSPF Version 2, Section 12 Link State Advertisements. Note that for variable length LSAs, SNMP agents may not be able to return the largest string size.') if mibBuilder.loadTexts: ospfAsLsdbAdvertisement.setStatus('current') if mibBuilder.loadTexts: ospfAsLsdbAdvertisement.setDescription('The entire link state advertisement, including its header.') ospfAreaLsaCountTable = MibTable((1, 3, 6, 1, 2, 1, 14, 20), ) if mibBuilder.loadTexts: ospfAreaLsaCountTable.setStatus('current') if mibBuilder.loadTexts: ospfAreaLsaCountTable.setDescription('This table maintains per-area, per-LSA-type counters') ospfAreaLsaCountEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 20, 1), ).setIndexNames((0, "OSPF-MIB", "ospfAreaLsaCountAreaId"), (0, "OSPF-MIB", "ospfAreaLsaCountLsaType")) if mibBuilder.loadTexts: ospfAreaLsaCountEntry.setStatus('current') if mibBuilder.loadTexts: ospfAreaLsaCountEntry.setDescription('An entry with a number of link advertisements of a given type for a given area.') ospfAreaLsaCountAreaId = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 20, 1, 1), AreaID()) if mibBuilder.loadTexts: ospfAreaLsaCountAreaId.setStatus('current') if mibBuilder.loadTexts: ospfAreaLsaCountAreaId.setDescription('This entry Area ID.') ospfAreaLsaCountLsaType = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 20, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 6, 7, 10))).clone(namedValues=NamedValues(("routerLink", 1), ("networkLink", 2), ("summaryLink", 3), ("asSummaryLink", 4), ("multicastLink", 6), ("nssaExternalLink", 7), ("areaOpaqueLink", 10)))) if mibBuilder.loadTexts: ospfAreaLsaCountLsaType.setStatus('current') if mibBuilder.loadTexts: ospfAreaLsaCountLsaType.setDescription('This entry LSA type.') ospfAreaLsaCountNumber = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 20, 1, 3), Gauge32()).setMaxAccess("readonly") if mibBuilder.loadTexts: ospfAreaLsaCountNumber.setStatus('current') if mibBuilder.loadTexts: ospfAreaLsaCountNumber.setDescription('Number of LSAs of a given type for a given area.') ospfConformance = MibIdentifier((1, 3, 6, 1, 2, 1, 14, 15)) ospfGroups = MibIdentifier((1, 3, 6, 1, 2, 1, 14, 15, 1)) ospfCompliances = MibIdentifier((1, 3, 6, 1, 2, 1, 14, 15, 2)) ospfCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 14, 15, 2, 1)).setObjects(("OSPF-MIB", "ospfBasicGroup"), ("OSPF-MIB", "ospfAreaGroup"), ("OSPF-MIB", "ospfStubAreaGroup"), ("OSPF-MIB", "ospfIfGroup"), ("OSPF-MIB", "ospfIfMetricGroup"), ("OSPF-MIB", "ospfVirtIfGroup"), ("OSPF-MIB", "ospfNbrGroup"), ("OSPF-MIB", "ospfVirtNbrGroup"), ("OSPF-MIB", "ospfAreaAggregateGroup"), ("OSPF-MIB", "ospfHostGroup"), ("OSPF-MIB", "ospfLsdbGroup"), ("OSPF-MIB", "ospfExtLsdbGroup")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfCompliance = ospfCompliance.setStatus('deprecated') if mibBuilder.loadTexts: ospfCompliance.setDescription('The compliance statement for OSPF systems conforming to RFC 1850.') ospfCompliance2 = ModuleCompliance((1, 3, 6, 1, 2, 1, 14, 15, 2, 2)).setObjects(("OSPF-MIB", "ospfBasicGroup2"), ("OSPF-MIB", "ospfAreaGroup2"), ("OSPF-MIB", "ospfStubAreaGroup"), ("OSPF-MIB", "ospfIfGroup2"), ("OSPF-MIB", "ospfIfMetricGroup"), ("OSPF-MIB", "ospfVirtIfGroup2"), ("OSPF-MIB", "ospfNbrGroup2"), ("OSPF-MIB", "ospfVirtNbrGroup2"), ("OSPF-MIB", "ospfAreaAggregateGroup2"), ("OSPF-MIB", "ospfHostGroup2"), ("OSPF-MIB", "ospfLsdbGroup"), ("OSPF-MIB", "ospfAsLsdbGroup"), ("OSPF-MIB", "ospfLocalLsdbGroup"), ("OSPF-MIB", "ospfVirtLocalLsdbGroup"), ("OSPF-MIB", "ospfAreaLsaCountGroup")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfCompliance2 = ospfCompliance2.setStatus('current') if mibBuilder.loadTexts: ospfCompliance2.setDescription('The compliance statement.') ospfComplianceObsolete = ModuleCompliance((1, 3, 6, 1, 2, 1, 14, 15, 2, 3)).setObjects(("OSPF-MIB", "ospfAreaRangeGroup"), ("OSPF-MIB", "ospfObsoleteGroup")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfComplianceObsolete = ospfComplianceObsolete.setStatus('obsolete') if mibBuilder.loadTexts: ospfComplianceObsolete.setDescription('Contains obsolete object groups.') ospfBasicGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 1)).setObjects(("OSPF-MIB", "ospfRouterId"), ("OSPF-MIB", "ospfAdminStat"), ("OSPF-MIB", "ospfVersionNumber"), ("OSPF-MIB", "ospfAreaBdrRtrStatus"), ("OSPF-MIB", "ospfASBdrRtrStatus"), ("OSPF-MIB", "ospfExternLsaCount"), ("OSPF-MIB", "ospfExternLsaCksumSum"), ("OSPF-MIB", "ospfTOSSupport"), ("OSPF-MIB", "ospfOriginateNewLsas"), ("OSPF-MIB", "ospfRxNewLsas"), ("OSPF-MIB", "ospfExtLsdbLimit"), ("OSPF-MIB", "ospfMulticastExtensions"), ("OSPF-MIB", "ospfExitOverflowInterval"), ("OSPF-MIB", "ospfDemandExtensions")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfBasicGroup = ospfBasicGroup.setStatus('deprecated') if mibBuilder.loadTexts: ospfBasicGroup.setDescription('These objects are used to monitor/manage global OSPF parameters. This object group conforms to RFC 1850.') ospfAreaGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 2)).setObjects(("OSPF-MIB", "ospfAreaId"), ("OSPF-MIB", "ospfImportAsExtern"), ("OSPF-MIB", "ospfSpfRuns"), ("OSPF-MIB", "ospfAreaBdrRtrCount"), ("OSPF-MIB", "ospfAsBdrRtrCount"), ("OSPF-MIB", "ospfAreaLsaCount"), ("OSPF-MIB", "ospfAreaLsaCksumSum"), ("OSPF-MIB", "ospfAreaSummary"), ("OSPF-MIB", "ospfAreaStatus")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfAreaGroup = ospfAreaGroup.setStatus('deprecated') if mibBuilder.loadTexts: ospfAreaGroup.setDescription('These objects are used for OSPF systems supporting areas per RFC 1850.') ospfStubAreaGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 3)).setObjects(("OSPF-MIB", "ospfStubAreaId"), ("OSPF-MIB", "ospfStubTOS"), ("OSPF-MIB", "ospfStubMetric"), ("OSPF-MIB", "ospfStubStatus"), ("OSPF-MIB", "ospfStubMetricType")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfStubAreaGroup = ospfStubAreaGroup.setStatus('current') if mibBuilder.loadTexts: ospfStubAreaGroup.setDescription('These objects are used for OSPF systems supporting stub areas.') ospfLsdbGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 4)).setObjects(("OSPF-MIB", "ospfLsdbAreaId"), ("OSPF-MIB", "ospfLsdbType"), ("OSPF-MIB", "ospfLsdbLsid"), ("OSPF-MIB", "ospfLsdbRouterId"), ("OSPF-MIB", "ospfLsdbSequence"), ("OSPF-MIB", "ospfLsdbAge"), ("OSPF-MIB", "ospfLsdbChecksum"), ("OSPF-MIB", "ospfLsdbAdvertisement")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfLsdbGroup = ospfLsdbGroup.setStatus('current') if mibBuilder.loadTexts: ospfLsdbGroup.setDescription('These objects are used for OSPF systems that display their link state database.') ospfAreaRangeGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 5)).setObjects(("OSPF-MIB", "ospfAreaRangeAreaId"), ("OSPF-MIB", "ospfAreaRangeNet"), ("OSPF-MIB", "ospfAreaRangeMask"), ("OSPF-MIB", "ospfAreaRangeStatus"), ("OSPF-MIB", "ospfAreaRangeEffect")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfAreaRangeGroup = ospfAreaRangeGroup.setStatus('obsolete') if mibBuilder.loadTexts: ospfAreaRangeGroup.setDescription('These objects are used for non-CIDR OSPF systems that support multiple areas. This object group is obsolete.') ospfHostGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 6)).setObjects(("OSPF-MIB", "ospfHostIpAddress"), ("OSPF-MIB", "ospfHostTOS"), ("OSPF-MIB", "ospfHostMetric"), ("OSPF-MIB", "ospfHostStatus"), ("OSPF-MIB", "ospfHostAreaID")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfHostGroup = ospfHostGroup.setStatus('deprecated') if mibBuilder.loadTexts: ospfHostGroup.setDescription('These objects are used for OSPF systems that support attached hosts.') ospfIfGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 7)).setObjects(("OSPF-MIB", "ospfIfIpAddress"), ("OSPF-MIB", "ospfAddressLessIf"), ("OSPF-MIB", "ospfIfAreaId"), ("OSPF-MIB", "ospfIfType"), ("OSPF-MIB", "ospfIfAdminStat"), ("OSPF-MIB", "ospfIfRtrPriority"), ("OSPF-MIB", "ospfIfTransitDelay"), ("OSPF-MIB", "ospfIfRetransInterval"), ("OSPF-MIB", "ospfIfHelloInterval"), ("OSPF-MIB", "ospfIfRtrDeadInterval"), ("OSPF-MIB", "ospfIfPollInterval"), ("OSPF-MIB", "ospfIfState"), ("OSPF-MIB", "ospfIfDesignatedRouter"), ("OSPF-MIB", "ospfIfBackupDesignatedRouter"), ("OSPF-MIB", "ospfIfEvents"), ("OSPF-MIB", "ospfIfAuthType"), ("OSPF-MIB", "ospfIfAuthKey"), ("OSPF-MIB", "ospfIfStatus"), ("OSPF-MIB", "ospfIfMulticastForwarding"), ("OSPF-MIB", "ospfIfDemand")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfIfGroup = ospfIfGroup.setStatus('deprecated') if mibBuilder.loadTexts: ospfIfGroup.setDescription('These objects are used to monitor/manage OSPF interfaces. This object group conforms to RFC 1850.') ospfIfMetricGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 8)).setObjects(("OSPF-MIB", "ospfIfMetricIpAddress"), ("OSPF-MIB", "ospfIfMetricAddressLessIf"), ("OSPF-MIB", "ospfIfMetricTOS"), ("OSPF-MIB", "ospfIfMetricValue"), ("OSPF-MIB", "ospfIfMetricStatus")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfIfMetricGroup = ospfIfMetricGroup.setStatus('current') if mibBuilder.loadTexts: ospfIfMetricGroup.setDescription('These objects are used for OSPF systems for supporting interface metrics.') ospfVirtIfGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 9)).setObjects(("OSPF-MIB", "ospfVirtIfAreaId"), ("OSPF-MIB", "ospfVirtIfNeighbor"), ("OSPF-MIB", "ospfVirtIfTransitDelay"), ("OSPF-MIB", "ospfVirtIfRetransInterval"), ("OSPF-MIB", "ospfVirtIfHelloInterval"), ("OSPF-MIB", "ospfVirtIfRtrDeadInterval"), ("OSPF-MIB", "ospfVirtIfState"), ("OSPF-MIB", "ospfVirtIfEvents"), ("OSPF-MIB", "ospfVirtIfAuthType"), ("OSPF-MIB", "ospfVirtIfAuthKey"), ("OSPF-MIB", "ospfVirtIfStatus")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfVirtIfGroup = ospfVirtIfGroup.setStatus('deprecated') if mibBuilder.loadTexts: ospfVirtIfGroup.setDescription('These objects are used for OSPF systems for supporting virtual interfaces. This object group conforms to RFC 1850.') ospfNbrGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 10)).setObjects(("OSPF-MIB", "ospfNbrIpAddr"), ("OSPF-MIB", "ospfNbrAddressLessIndex"), ("OSPF-MIB", "ospfNbrRtrId"), ("OSPF-MIB", "ospfNbrOptions"), ("OSPF-MIB", "ospfNbrPriority"), ("OSPF-MIB", "ospfNbrState"), ("OSPF-MIB", "ospfNbrEvents"), ("OSPF-MIB", "ospfNbrLsRetransQLen"), ("OSPF-MIB", "ospfNbmaNbrStatus"), ("OSPF-MIB", "ospfNbmaNbrPermanence"), ("OSPF-MIB", "ospfNbrHelloSuppressed")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfNbrGroup = ospfNbrGroup.setStatus('deprecated') if mibBuilder.loadTexts: ospfNbrGroup.setDescription('These objects are used to monitor/manage OSPF neighbors. This object group conforms to RFC 1850.') ospfVirtNbrGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 11)).setObjects(("OSPF-MIB", "ospfVirtNbrArea"), ("OSPF-MIB", "ospfVirtNbrRtrId"), ("OSPF-MIB", "ospfVirtNbrIpAddr"), ("OSPF-MIB", "ospfVirtNbrOptions"), ("OSPF-MIB", "ospfVirtNbrState"), ("OSPF-MIB", "ospfVirtNbrEvents"), ("OSPF-MIB", "ospfVirtNbrLsRetransQLen"), ("OSPF-MIB", "ospfVirtNbrHelloSuppressed")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfVirtNbrGroup = ospfVirtNbrGroup.setStatus('deprecated') if mibBuilder.loadTexts: ospfVirtNbrGroup.setDescription('These objects are used to monitor/manage OSPF virtual neighbors. This object group conforms to RFC 1850.') ospfExtLsdbGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 12)).setObjects(("OSPF-MIB", "ospfExtLsdbType"), ("OSPF-MIB", "ospfExtLsdbLsid"), ("OSPF-MIB", "ospfExtLsdbRouterId"), ("OSPF-MIB", "ospfExtLsdbSequence"), ("OSPF-MIB", "ospfExtLsdbAge"), ("OSPF-MIB", "ospfExtLsdbChecksum"), ("OSPF-MIB", "ospfExtLsdbAdvertisement")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfExtLsdbGroup = ospfExtLsdbGroup.setStatus('deprecated') if mibBuilder.loadTexts: ospfExtLsdbGroup.setDescription('These objects are used for OSPF systems that display their link state database. This object group conforms to RFC 1850. This object group is replaced by the ospfAsLsdbGroup in order to support any AS-scope LSA type in a single table.') ospfAreaAggregateGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 13)).setObjects(("OSPF-MIB", "ospfAreaAggregateAreaID"), ("OSPF-MIB", "ospfAreaAggregateLsdbType"), ("OSPF-MIB", "ospfAreaAggregateNet"), ("OSPF-MIB", "ospfAreaAggregateMask"), ("OSPF-MIB", "ospfAreaAggregateStatus"), ("OSPF-MIB", "ospfAreaAggregateEffect")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfAreaAggregateGroup = ospfAreaAggregateGroup.setStatus('deprecated') if mibBuilder.loadTexts: ospfAreaAggregateGroup.setDescription('These objects are used for OSPF systems to support network prefix aggregation across areas.') ospfLocalLsdbGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 14)).setObjects(("OSPF-MIB", "ospfLocalLsdbSequence"), ("OSPF-MIB", "ospfLocalLsdbAge"), ("OSPF-MIB", "ospfLocalLsdbChecksum"), ("OSPF-MIB", "ospfLocalLsdbAdvertisement")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfLocalLsdbGroup = ospfLocalLsdbGroup.setStatus('current') if mibBuilder.loadTexts: ospfLocalLsdbGroup.setDescription('These objects are used for OSPF systems that display their link-local link state databases for non-virtual links.') ospfVirtLocalLsdbGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 15)).setObjects(("OSPF-MIB", "ospfVirtLocalLsdbSequence"), ("OSPF-MIB", "ospfVirtLocalLsdbAge"), ("OSPF-MIB", "ospfVirtLocalLsdbChecksum"), ("OSPF-MIB", "ospfVirtLocalLsdbAdvertisement")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfVirtLocalLsdbGroup = ospfVirtLocalLsdbGroup.setStatus('current') if mibBuilder.loadTexts: ospfVirtLocalLsdbGroup.setDescription('These objects are used for OSPF systems that display their link-local link state databases for virtual links.') ospfAsLsdbGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 16)).setObjects(("OSPF-MIB", "ospfAsLsdbSequence"), ("OSPF-MIB", "ospfAsLsdbAge"), ("OSPF-MIB", "ospfAsLsdbChecksum"), ("OSPF-MIB", "ospfAsLsdbAdvertisement")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfAsLsdbGroup = ospfAsLsdbGroup.setStatus('current') if mibBuilder.loadTexts: ospfAsLsdbGroup.setDescription('These objects are used for OSPF systems that display their AS-scope link state database.') ospfBasicGroup2 = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 17)).setObjects(("OSPF-MIB", "ospfRouterId"), ("OSPF-MIB", "ospfAdminStat"), ("OSPF-MIB", "ospfVersionNumber"), ("OSPF-MIB", "ospfAreaBdrRtrStatus"), ("OSPF-MIB", "ospfASBdrRtrStatus"), ("OSPF-MIB", "ospfExternLsaCount"), ("OSPF-MIB", "ospfExternLsaCksumSum"), ("OSPF-MIB", "ospfTOSSupport"), ("OSPF-MIB", "ospfOriginateNewLsas"), ("OSPF-MIB", "ospfRxNewLsas"), ("OSPF-MIB", "ospfExtLsdbLimit"), ("OSPF-MIB", "ospfMulticastExtensions"), ("OSPF-MIB", "ospfExitOverflowInterval"), ("OSPF-MIB", "ospfDemandExtensions"), ("OSPF-MIB", "ospfRFC1583Compatibility"), ("OSPF-MIB", "ospfOpaqueLsaSupport"), ("OSPF-MIB", "ospfReferenceBandwidth"), ("OSPF-MIB", "ospfRestartSupport"), ("OSPF-MIB", "ospfRestartInterval"), ("OSPF-MIB", "ospfRestartStrictLsaChecking"), ("OSPF-MIB", "ospfRestartStatus"), ("OSPF-MIB", "ospfRestartAge"), ("OSPF-MIB", "ospfRestartExitReason"), ("OSPF-MIB", "ospfAsLsaCount"), ("OSPF-MIB", "ospfAsLsaCksumSum"), ("OSPF-MIB", "ospfStubRouterSupport"), ("OSPF-MIB", "ospfStubRouterAdvertisement"), ("OSPF-MIB", "ospfDiscontinuityTime")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfBasicGroup2 = ospfBasicGroup2.setStatus('current') if mibBuilder.loadTexts: ospfBasicGroup2.setDescription('These objects are used to monitor/manage OSPF global parameters.') ospfAreaGroup2 = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 18)).setObjects(("OSPF-MIB", "ospfAreaId"), ("OSPF-MIB", "ospfImportAsExtern"), ("OSPF-MIB", "ospfSpfRuns"), ("OSPF-MIB", "ospfAreaBdrRtrCount"), ("OSPF-MIB", "ospfAsBdrRtrCount"), ("OSPF-MIB", "ospfAreaLsaCount"), ("OSPF-MIB", "ospfAreaLsaCksumSum"), ("OSPF-MIB", "ospfAreaSummary"), ("OSPF-MIB", "ospfAreaStatus"), ("OSPF-MIB", "ospfAreaNssaTranslatorRole"), ("OSPF-MIB", "ospfAreaNssaTranslatorState"), ("OSPF-MIB", "ospfAreaNssaTranslatorStabilityInterval"), ("OSPF-MIB", "ospfAreaNssaTranslatorEvents")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfAreaGroup2 = ospfAreaGroup2.setStatus('current') if mibBuilder.loadTexts: ospfAreaGroup2.setDescription('These objects are used by OSPF systems to support areas.') ospfIfGroup2 = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 19)).setObjects(("OSPF-MIB", "ospfIfIpAddress"), ("OSPF-MIB", "ospfAddressLessIf"), ("OSPF-MIB", "ospfIfAreaId"), ("OSPF-MIB", "ospfIfType"), ("OSPF-MIB", "ospfIfAdminStat"), ("OSPF-MIB", "ospfIfRtrPriority"), ("OSPF-MIB", "ospfIfTransitDelay"), ("OSPF-MIB", "ospfIfRetransInterval"), ("OSPF-MIB", "ospfIfHelloInterval"), ("OSPF-MIB", "ospfIfRtrDeadInterval"), ("OSPF-MIB", "ospfIfPollInterval"), ("OSPF-MIB", "ospfIfState"), ("OSPF-MIB", "ospfIfDesignatedRouter"), ("OSPF-MIB", "ospfIfBackupDesignatedRouter"), ("OSPF-MIB", "ospfIfEvents"), ("OSPF-MIB", "ospfIfAuthType"), ("OSPF-MIB", "ospfIfAuthKey"), ("OSPF-MIB", "ospfIfStatus"), ("OSPF-MIB", "ospfIfMulticastForwarding"), ("OSPF-MIB", "ospfIfDemand"), ("OSPF-MIB", "ospfIfLsaCount"), ("OSPF-MIB", "ospfIfLsaCksumSum")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfIfGroup2 = ospfIfGroup2.setStatus('current') if mibBuilder.loadTexts: ospfIfGroup2.setDescription('These objects are used to monitor/manage OSPF interfaces.') ospfVirtIfGroup2 = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 20)).setObjects(("OSPF-MIB", "ospfVirtIfAreaId"), ("OSPF-MIB", "ospfVirtIfNeighbor"), ("OSPF-MIB", "ospfVirtIfTransitDelay"), ("OSPF-MIB", "ospfVirtIfRetransInterval"), ("OSPF-MIB", "ospfVirtIfHelloInterval"), ("OSPF-MIB", "ospfVirtIfRtrDeadInterval"), ("OSPF-MIB", "ospfVirtIfState"), ("OSPF-MIB", "ospfVirtIfEvents"), ("OSPF-MIB", "ospfVirtIfAuthType"), ("OSPF-MIB", "ospfVirtIfAuthKey"), ("OSPF-MIB", "ospfVirtIfStatus"), ("OSPF-MIB", "ospfVirtIfLsaCount"), ("OSPF-MIB", "ospfVirtIfLsaCksumSum"), ("OSPF-MIB", "ospfIfDesignatedRouterId"), ("OSPF-MIB", "ospfIfBackupDesignatedRouterId")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfVirtIfGroup2 = ospfVirtIfGroup2.setStatus('current') if mibBuilder.loadTexts: ospfVirtIfGroup2.setDescription('These objects are used to monitor/manage OSPF virtual interfaces.') ospfNbrGroup2 = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 21)).setObjects(("OSPF-MIB", "ospfNbrIpAddr"), ("OSPF-MIB", "ospfNbrAddressLessIndex"), ("OSPF-MIB", "ospfNbrRtrId"), ("OSPF-MIB", "ospfNbrOptions"), ("OSPF-MIB", "ospfNbrPriority"), ("OSPF-MIB", "ospfNbrState"), ("OSPF-MIB", "ospfNbrEvents"), ("OSPF-MIB", "ospfNbrLsRetransQLen"), ("OSPF-MIB", "ospfNbmaNbrStatus"), ("OSPF-MIB", "ospfNbmaNbrPermanence"), ("OSPF-MIB", "ospfNbrHelloSuppressed"), ("OSPF-MIB", "ospfNbrRestartHelperStatus"), ("OSPF-MIB", "ospfNbrRestartHelperAge"), ("OSPF-MIB", "ospfNbrRestartHelperExitReason")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfNbrGroup2 = ospfNbrGroup2.setStatus('current') if mibBuilder.loadTexts: ospfNbrGroup2.setDescription('These objects are used to monitor/manage OSPF neighbors.') ospfVirtNbrGroup2 = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 22)).setObjects(("OSPF-MIB", "ospfVirtNbrArea"), ("OSPF-MIB", "ospfVirtNbrRtrId"), ("OSPF-MIB", "ospfVirtNbrIpAddr"), ("OSPF-MIB", "ospfVirtNbrOptions"), ("OSPF-MIB", "ospfVirtNbrState"), ("OSPF-MIB", "ospfVirtNbrEvents"), ("OSPF-MIB", "ospfVirtNbrLsRetransQLen"), ("OSPF-MIB", "ospfVirtNbrHelloSuppressed"), ("OSPF-MIB", "ospfVirtNbrRestartHelperStatus"), ("OSPF-MIB", "ospfVirtNbrRestartHelperAge"), ("OSPF-MIB", "ospfVirtNbrRestartHelperExitReason")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfVirtNbrGroup2 = ospfVirtNbrGroup2.setStatus('current') if mibBuilder.loadTexts: ospfVirtNbrGroup2.setDescription('These objects are used to monitor/manage OSPF virtual neighbors.') ospfAreaAggregateGroup2 = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 23)).setObjects(("OSPF-MIB", "ospfAreaAggregateAreaID"), ("OSPF-MIB", "ospfAreaAggregateLsdbType"), ("OSPF-MIB", "ospfAreaAggregateNet"), ("OSPF-MIB", "ospfAreaAggregateMask"), ("OSPF-MIB", "ospfAreaAggregateStatus"), ("OSPF-MIB", "ospfAreaAggregateEffect"), ("OSPF-MIB", "ospfAreaAggregateExtRouteTag")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfAreaAggregateGroup2 = ospfAreaAggregateGroup2.setStatus('current') if mibBuilder.loadTexts: ospfAreaAggregateGroup2.setDescription('These objects are used for OSPF systems to support network prefix aggregation across areas.') ospfAreaLsaCountGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 24)).setObjects(("OSPF-MIB", "ospfAreaLsaCountNumber")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfAreaLsaCountGroup = ospfAreaLsaCountGroup.setStatus('current') if mibBuilder.loadTexts: ospfAreaLsaCountGroup.setDescription('These objects are used for OSPF systems that display per-area, per-LSA-type counters.') ospfHostGroup2 = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 25)).setObjects(("OSPF-MIB", "ospfHostIpAddress"), ("OSPF-MIB", "ospfHostTOS"), ("OSPF-MIB", "ospfHostMetric"), ("OSPF-MIB", "ospfHostStatus"), ("OSPF-MIB", "ospfHostCfgAreaID")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfHostGroup2 = ospfHostGroup2.setStatus('current') if mibBuilder.loadTexts: ospfHostGroup2.setDescription('These objects are used for OSPF systems that support attached hosts.') ospfObsoleteGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 14, 15, 1, 26)).setObjects(("OSPF-MIB", "ospfAuthType")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): ospfObsoleteGroup = ospfObsoleteGroup.setStatus('obsolete') if mibBuilder.loadTexts: ospfObsoleteGroup.setDescription('These objects are obsolete and are no longer required for OSPF systems. They are placed into this group for SMI conformance.') mibBuilder.exportSymbols("OSPF-MIB", ospfVirtLocalLsdbTable=ospfVirtLocalLsdbTable, ospfAsLsdbAdvertisement=ospfAsLsdbAdvertisement, ospfIfRtrPriority=ospfIfRtrPriority, ospfNbrPriority=ospfNbrPriority, ospfStubAreaEntry=ospfStubAreaEntry, ospfLsdbType=ospfLsdbType, ospfAreaSummary=ospfAreaSummary, ospfObsoleteGroup=ospfObsoleteGroup, ospfIfGroup=ospfIfGroup, ospfAreaBdrRtrStatus=ospfAreaBdrRtrStatus, ospfCompliances=ospfCompliances, ospfAsLsdbAge=ospfAsLsdbAge, ospfOriginateNewLsas=ospfOriginateNewLsas, ospfLsdbLsid=ospfLsdbLsid, ospfVirtLocalLsdbType=ospfVirtLocalLsdbType, ospfRxNewLsas=ospfRxNewLsas, ospfAreaRangeMask=ospfAreaRangeMask, ospfIfEvents=ospfIfEvents, ospfVirtIfTable=ospfVirtIfTable, ospfVirtLocalLsdbAge=ospfVirtLocalLsdbAge, ospfVirtLocalLsdbGroup=ospfVirtLocalLsdbGroup, ospfNbrGroup=ospfNbrGroup, ospfExtLsdbLsid=ospfExtLsdbLsid, ospfVirtIfAuthType=ospfVirtIfAuthType, ospfVirtLocalLsdbAdvertisement=ospfVirtLocalLsdbAdvertisement, ospfVirtNbrArea=ospfVirtNbrArea, ospfVirtIfGroup2=ospfVirtIfGroup2, ospfLocalLsdbAddressLessIf=ospfLocalLsdbAddressLessIf, ospfConformance=ospfConformance, ospfVirtNbrState=ospfVirtNbrState, ospfNbrOptions=ospfNbrOptions, ospfAreaNssaTranslatorRole=ospfAreaNssaTranslatorRole, ospfAreaNssaTranslatorState=ospfAreaNssaTranslatorState, ospfHostTable=ospfHostTable, ospfGeneralGroup=ospfGeneralGroup, ospfVirtNbrTable=ospfVirtNbrTable, UpToMaxAge=UpToMaxAge, ospfIfHelloInterval=ospfIfHelloInterval, ospfVirtIfLsaCount=ospfVirtIfLsaCount, ospfStubMetricType=ospfStubMetricType, ospfAreaLsaCount=ospfAreaLsaCount, ospfIntraArea=ospfIntraArea, ospfAreaAggregateEffect=ospfAreaAggregateEffect, ospfVirtLocalLsdbChecksum=ospfVirtLocalLsdbChecksum, ospfAsLsdbGroup=ospfAsLsdbGroup, ospfStubTOS=ospfStubTOS, ospfComplianceObsolete=ospfComplianceObsolete, ospfAreaRangeTable=ospfAreaRangeTable, ospfAreaAggregateGroup=ospfAreaAggregateGroup, ospfAreaNssaTranslatorStabilityInterval=ospfAreaNssaTranslatorStabilityInterval, ospfExtLsdbType=ospfExtLsdbType, ospfIfBackupDesignatedRouter=ospfIfBackupDesignatedRouter, ospfAreaStatus=ospfAreaStatus, ospfVirtLocalLsdbEntry=ospfVirtLocalLsdbEntry, ospfExtLsdbRouterId=ospfExtLsdbRouterId, ospfNbrRestartHelperAge=ospfNbrRestartHelperAge, ospfLocalLsdbEntry=ospfLocalLsdbEntry, ospfLocalLsdbSequence=ospfLocalLsdbSequence, ospfExternLsaCount=ospfExternLsaCount, ospfAsLsdbEntry=ospfAsLsdbEntry, ospfExternalType1=ospfExternalType1, ospfAreaAggregateAreaID=ospfAreaAggregateAreaID, ospfGroups=ospfGroups, ospfAreaAggregateMask=ospfAreaAggregateMask, ospfIfPollInterval=ospfIfPollInterval, ospfVirtIfAuthKey=ospfVirtIfAuthKey, ospfExtLsdbChecksum=ospfExtLsdbChecksum, ospfExtLsdbLimit=ospfExtLsdbLimit, ospfVirtIfState=ospfVirtIfState, ospfIfMetricGroup=ospfIfMetricGroup, ospfNbrTable=ospfNbrTable, ospfAreaRangeNet=ospfAreaRangeNet, ospfVirtNbrLsRetransQLen=ospfVirtNbrLsRetransQLen, ospfAreaAggregateEntry=ospfAreaAggregateEntry, ospfRestartStatus=ospfRestartStatus, ospfVirtNbrGroup2=ospfVirtNbrGroup2, ospfASBdrRtrStatus=ospfASBdrRtrStatus, ospfHostGroup=ospfHostGroup, ospfNbrIpAddr=ospfNbrIpAddr, ospfVirtNbrHelloSuppressed=ospfVirtNbrHelloSuppressed, Status=Status, ospfInterArea=ospfInterArea, ospfRFC1583Compatibility=ospfRFC1583Compatibility, ospfIfMetricValue=ospfIfMetricValue, ospfAsLsdbSequence=ospfAsLsdbSequence, ospfIfLsaCksumSum=ospfIfLsaCksumSum, BigMetric=BigMetric, ospfHostAreaID=ospfHostAreaID, ospfIfMulticastForwarding=ospfIfMulticastForwarding, ospfLsdbChecksum=ospfLsdbChecksum, ospfIfMetricIpAddress=ospfIfMetricIpAddress, ospfVirtIfEvents=ospfVirtIfEvents, ospfAreaAggregateGroup2=ospfAreaAggregateGroup2, ospfAreaId=ospfAreaId, ospfAsLsdbTable=ospfAsLsdbTable, ospfHostGroup2=ospfHostGroup2, ospfNbmaNbrStatus=ospfNbmaNbrStatus, ospfLsdbAdvertisement=ospfLsdbAdvertisement, ospfMulticastExtensions=ospfMulticastExtensions, ospfLocalLsdbAdvertisement=ospfLocalLsdbAdvertisement, ospfVirtNbrOptions=ospfVirtNbrOptions, ospfAreaRangeEffect=ospfAreaRangeEffect, ospfVirtIfEntry=ospfVirtIfEntry, ospfReferenceBandwidth=ospfReferenceBandwidth, ospfAreaGroup2=ospfAreaGroup2, ospfExternalType2=ospfExternalType2, ospfBasicGroup2=ospfBasicGroup2, Metric=Metric, ospfHostTOS=ospfHostTOS, ospfLocalLsdbLsid=ospfLocalLsdbLsid, ospfAsLsdbLsid=ospfAsLsdbLsid, ospfAreaRangeGroup=ospfAreaRangeGroup, ospfStubStatus=ospfStubStatus, ospfVirtLocalLsdbNeighbor=ospfVirtLocalLsdbNeighbor, ospfHostStatus=ospfHostStatus, ospfAreaAggregateStatus=ospfAreaAggregateStatus, ospfAsLsaCksumSum=ospfAsLsaCksumSum, ospfVirtIfRtrDeadInterval=ospfVirtIfRtrDeadInterval, ospfExtLsdbAge=ospfExtLsdbAge, ospfExtLsdbEntry=ospfExtLsdbEntry, ospfRestartInterval=ospfRestartInterval, ospfExtLsdbGroup=ospfExtLsdbGroup, ospfAreaRangeEntry=ospfAreaRangeEntry, ospfLsdbAge=ospfLsdbAge, TOSType=TOSType, ospfIfAdminStat=ospfIfAdminStat, ospfLsdbSequence=ospfLsdbSequence, ospfLocalLsdbTable=ospfLocalLsdbTable, ospfVirtNbrRestartHelperAge=ospfVirtNbrRestartHelperAge, ospfAreaGroup=ospfAreaGroup, ospfRouterId=ospfRouterId, OspfAuthenticationType=OspfAuthenticationType, ospfVirtIfStatus=ospfVirtIfStatus, ospfIfEntry=ospfIfEntry, HelloRange=HelloRange, ospfNbrState=ospfNbrState, ospfDemandExtensions=ospfDemandExtensions, ospfNbrAddressLessIndex=ospfNbrAddressLessIndex, ospfHostCfgAreaID=ospfHostCfgAreaID, ospfLsdbGroup=ospfLsdbGroup, ospfBasicGroup=ospfBasicGroup, ospfRouteGroup=ospfRouteGroup, ospfStubRouterSupport=ospfStubRouterSupport, ospfVirtIfLsaCksumSum=ospfVirtIfLsaCksumSum, ospfExternLsaCksumSum=ospfExternLsaCksumSum, ospfNbrRtrId=ospfNbrRtrId, ospfNbrRestartHelperExitReason=ospfNbrRestartHelperExitReason, ospfAreaLsaCountEntry=ospfAreaLsaCountEntry, ospfIfDesignatedRouterId=ospfIfDesignatedRouterId, ospfAreaLsaCountNumber=ospfAreaLsaCountNumber, ospfLocalLsdbAge=ospfLocalLsdbAge, ospfNbrRestartHelperStatus=ospfNbrRestartHelperStatus, ospfNbrHelloSuppressed=ospfNbrHelloSuppressed, ospfVirtIfRetransInterval=ospfVirtIfRetransInterval, ospfStubAreaTable=ospfStubAreaTable, ospfVirtIfHelloInterval=ospfVirtIfHelloInterval, ospfVirtNbrRestartHelperExitReason=ospfVirtNbrRestartHelperExitReason, RouterID=RouterID, ospfRestartExitReason=ospfRestartExitReason, ospfIfLsaCount=ospfIfLsaCount, ospfAreaAggregateTable=ospfAreaAggregateTable, ospfNbmaNbrPermanence=ospfNbmaNbrPermanence, ospfTOSSupport=ospfTOSSupport, ospfAreaLsaCountAreaId=ospfAreaLsaCountAreaId, ospfIfIpAddress=ospfIfIpAddress, ospfStubMetric=ospfStubMetric, ospfIfType=ospfIfType, ospfAdminStat=ospfAdminStat, ospfImportAsExtern=ospfImportAsExtern, ospfVirtIfAreaId=ospfVirtIfAreaId, ospfAreaLsaCksumSum=ospfAreaLsaCksumSum, ospfVirtIfNeighbor=ospfVirtIfNeighbor, ospfExtLsdbAdvertisement=ospfExtLsdbAdvertisement, ospfVersionNumber=ospfVersionNumber, ospfOpaqueLsaSupport=ospfOpaqueLsaSupport, PYSNMP_MODULE_ID=ospf, ospfLsdbTable=ospfLsdbTable, ospfAddressLessIf=ospfAddressLessIf, ospfExtLsdbTable=ospfExtLsdbTable, ospfIfAuthKey=ospfIfAuthKey, ospfLsdbRouterId=ospfLsdbRouterId, ospfLocalLsdbIpAddress=ospfLocalLsdbIpAddress, ospfIfAreaId=ospfIfAreaId, ospfAsLsdbChecksum=ospfAsLsdbChecksum, ospfHostMetric=ospfHostMetric, ospfAreaEntry=ospfAreaEntry, ospfIfMetricTOS=ospfIfMetricTOS, ospfNbrLsRetransQLen=ospfNbrLsRetransQLen, ospfVirtNbrRestartHelperStatus=ospfVirtNbrRestartHelperStatus, ospfLocalLsdbGroup=ospfLocalLsdbGroup, ospfVirtNbrEntry=ospfVirtNbrEntry, ospfExitOverflowInterval=ospfExitOverflowInterval, ospfStubRouterAdvertisement=ospfStubRouterAdvertisement, ospfLsdbEntry=ospfLsdbEntry, ospfIfMetricEntry=ospfIfMetricEntry, ospfVirtNbrIpAddr=ospfVirtNbrIpAddr, ospfVirtLocalLsdbRouterId=ospfVirtLocalLsdbRouterId, ospfSpfRuns=ospfSpfRuns, AreaID=AreaID, ospf=ospf, ospfVirtNbrGroup=ospfVirtNbrGroup, ospfVirtLocalLsdbSequence=ospfVirtLocalLsdbSequence, ospfIfRtrDeadInterval=ospfIfRtrDeadInterval, ospfAreaAggregateNet=ospfAreaAggregateNet, ospfLocalLsdbChecksum=ospfLocalLsdbChecksum, ospfIfTransitDelay=ospfIfTransitDelay, ospfIfTable=ospfIfTable, ospfIfRetransInterval=ospfIfRetransInterval, ospfIfAuthType=ospfIfAuthType, ospfLocalLsdbRouterId=ospfLocalLsdbRouterId, ospfAreaTable=ospfAreaTable, ospfAreaBdrRtrCount=ospfAreaBdrRtrCount, ospfIfDemand=ospfIfDemand, ospfAsLsaCount=ospfAsLsaCount, ospfDiscontinuityTime=ospfDiscontinuityTime, ospfAsBdrRtrCount=ospfAsBdrRtrCount, ospfLsdbAreaId=ospfLsdbAreaId, ospfAreaRangeAreaId=ospfAreaRangeAreaId, ospfAreaRangeStatus=ospfAreaRangeStatus, ospfLocalLsdbType=ospfLocalLsdbType, ospfAsLsdbRouterId=ospfAsLsdbRouterId, ospfCompliance2=ospfCompliance2, ospfIfDesignatedRouter=ospfIfDesignatedRouter, ospfRestartStrictLsaChecking=ospfRestartStrictLsaChecking, ospfVirtIfGroup=ospfVirtIfGroup, ospfIfGroup2=ospfIfGroup2, ospfAreaAggregateExtRouteTag=ospfAreaAggregateExtRouteTag, ospfVirtLocalLsdbTransitArea=ospfVirtLocalLsdbTransitArea, ospfAreaLsaCountTable=ospfAreaLsaCountTable, ospfHostIpAddress=ospfHostIpAddress, ospfStubAreaId=ospfStubAreaId, ospfVirtNbrRtrId=ospfVirtNbrRtrId, ospfIfBackupDesignatedRouterId=ospfIfBackupDesignatedRouterId, PositiveInteger=PositiveInteger, ospfAreaLsaCountLsaType=ospfAreaLsaCountLsaType, ospfAreaAggregateLsdbType=ospfAreaAggregateLsdbType, ospfAreaNssaTranslatorEvents=ospfAreaNssaTranslatorEvents, ospfAuthType=ospfAuthType, DesignatedRouterPriority=DesignatedRouterPriority, ospfStubAreaGroup=ospfStubAreaGroup, ospfCompliance=ospfCompliance, ospfVirtIfTransitDelay=ospfVirtIfTransitDelay, ospfHostEntry=ospfHostEntry, ospfNbrEvents=ospfNbrEvents, ospfAreaLsaCountGroup=ospfAreaLsaCountGroup, ospfNbrGroup2=ospfNbrGroup2, ospfIfState=ospfIfState, ospfVirtNbrEvents=ospfVirtNbrEvents, ospfNbrEntry=ospfNbrEntry, ospfAsLsdbType=ospfAsLsdbType, ospfIfMetricTable=ospfIfMetricTable, ospfIfStatus=ospfIfStatus, ospfIfMetricAddressLessIf=ospfIfMetricAddressLessIf, ospfVirtLocalLsdbLsid=ospfVirtLocalLsdbLsid) mibBuilder.exportSymbols("OSPF-MIB", ospfIfMetricStatus=ospfIfMetricStatus, ospfExtLsdbSequence=ospfExtLsdbSequence, ospfRestartSupport=ospfRestartSupport, ospfRestartAge=ospfRestartAge)
141,668
52,110
from csv import reader import yaml import json def splitrow(row, DELIMETER): x = row.split(DELIMETER) return ([] if row == '' else x) def get_data_from_csv(settings, DELIMETER = '|'): rules = [] with open(settings['CSV_FILENAME'], 'r') as csv_file: csv_reader = reader(csv_file) for row in csv_reader: bug_id = row[0] pattern_either = splitrow(row[1], DELIMETER) pattern_inside = splitrow(row[2], DELIMETER) pattern_not_inside = splitrow(row[3], DELIMETER) languages = splitrow(row[4], DELIMETER) message = row[5] severity = row[6] patterns = { "pattern-either": {"pattern":patt for patt in pattern_either} , "pattern-not-inside" : { "pattern": patt for patt in pattern_not_inside }, "pattern-inside" : {"pattern" : patt for patt in pattern_inside}, } patterns = {k: v for k, v in patterns.items() if v} single_rule_obj = { "id" : bug_id, "patterns" : patterns, "message" : message, "languages" : languages, "severity" : severity } rules.append(single_rule_obj) return {"rules" : rules} def convert_json_to_yaml(yml_dict, settings): with open(settings['OUTPUT_FILENAME'], 'w') as ymlfile: yaml.dump(yml_dict, ymlfile, allow_unicode=True) def go(config_filename = 'yml-generator-config.json'): with open(config_filename, 'r') as json_file: settings = json.load(json_file) yml_dict = get_data_from_csv(settings, settings['DELIMETER']) convert_json_to_yaml(yml_dict, settings) go()
1,695
548
import numpy as np import pandas as pd from types import FunctionType import warnings from .transform import BaseTransformer def drop_na(x, y, according='both'): """ Drop the values in both x and y if the element in `according` is missing ex. drop_na([1, 2, np.nan], [1, 2, 3], 'x') => [1, 2], [1, 2] """ if according == 'x': valid_index = ~np.isnan(x) elif according == 'y': valid_index = ~np.isnan(y) elif according == 'both': valid_index = (~np.isnan(x)) & (~np.isnan(y)) else: raise ValueError('According should be one of {}'.format(['x', 'y', 'both'])) return np.array(x)[valid_index], np.array(y)[valid_index] def check_binary_label(y): """ Make sure the label contains only 0 and 1 """ if set(y) != set([0, 1]): raise ValueError('The label must be binary 0 or 1.') def check_numerical(x): if isinstance(x, list): x = x[0] if not pd.api.types.is_numeric_dtype(x): raise ValueError('The input must be a numerical array.') def as_positive_rate(x, y, bins, interval_value='mean'): """ Group numerical variable x into several bins and calculate the positive rate within each bin :param bins: Integer or a sequence of values as cutoff points :param interval_value: One of ['left', 'right', 'mean'], how the interval is converted to a scalar """ if isinstance(x, list): x = np.array(x) check_numerical(x) check_binary_label(y) if len(set(x)) <= bins: pos_pct = pd.Series(y).groupby(x).mean() else: intervals = pd.cut(x, bins) if interval_value == 'left': intervals = [i.left for i in intervals] elif interval_value == 'right': intervals = [i.right for i in intervals] elif interval_value == 'mean': intervals = [(i.left + i.right) / 2.0 for i in intervals] else: raise ValueError('Only {} is supported.'.format(['left', 'right', 'mean'])) pos_pct = pd.Series(y).groupby(intervals).mean() return pos_pct.index.values, pos_pct.values EPILSON = 1e-15 def _odds(p): p = np.clip(p, EPILSON, 1 - EPILSON) return p / (1 - p) def _logodds(p): return np.log(_odds(p)) _TRANSFORMS = { 'odds': _odds, 'logodds': _logodds } def preprocess(x, y, binary_label=True, bins=50, transform_y=None, interval_value='mean', ignore_na=True): """ Preprocess the input before finding the best transformations :param binary_label: Whether the label is binary (0, 1), in other words. whether the problem is classification or regression. :param transform_y: Transformation applied to y, can either be a string within ['odds', 'logodds'], or a function :param bins: Integer or a sequence of values as cutoff points :param interval_value: One of ['left', 'right', 'mean'], how the interval is converted to a scalar :ignore_na: Whether to ignore NA """ if binary_label: x, y = as_positive_rate(x, y, bins, interval_value) if transform_y is not None: # make sure y is an array y = np.array(y) if isinstance(transform_y, str): if transform_y not in _TRANSFORMS: raise ValueError('Only {} is supported.'.format(_TRANSFORMS.keys())) y = _TRANSFORMS[transform_y](y) elif isinstance(transform_y, FunctionType): y = transform_y(y) else: raise ValueError('Only string and function is supported for `transform_y`.') if ignore_na: x, y = drop_na(x, y, according='both') return x, y def _check_complexity(): cpl = {} for cls in BaseTransformer.__subclasses__(): complexity = cls.complexity if complexity in cpl: warnings.warn('{} and {} has the same complexity {}.'.\ format(cls.__name__, cpl[complexity].__name__, complexity)) cpl[complexity] = cls
4,018
1,292
__author__ = 'yinjun' """ Definition of ListNode class ListNode(object): def __init__(self, val, next=None): self.val = val self.next = next """ class Solution: """ @param head: A ListNode @return: A ListNode """ def deleteDuplicates(self, head): # write your code here delete = {} h = ListNode(0) h.next = head p = h while p!=None and p.next!=None and p.next.next!=None: if p.next.val == p.next.next.val: delete[p.next.val] = True p.next = p.next.next elif p.next.val in delete: p.next = p.next.next else: p = p.next if p!=None and p.next!=None and p.next.val in delete: p.next = None return h.next
821
263
class Object: def __init__(self, type): self.type = type def square(self, a, b): if self.type == 'square': return a * b if self.type == 'triangle': return (a * b) / 2 vid = input() object = Object(vid) a = int(input()) b = int(input()) print(f'{object.square(a,b)}')
325
116
from .main import Main from .arguments import arg from textwrap import dedent main = Main() command = main.command @command('install-bash-completions', arg('--dest', help="destination file. Typically ~/.bashrc or ~/.profile", default="~/.bashrc"), arg('script_name'), ) def install_bash_completions(dest, script_name): main.install_bash_completion(dest=dest, script_name=script_name) print(dedent(""" To activate bash completions of script_name run: . %s """ % dest)) @command('uninstall-bash-completions', arg('--dest', help="destination file. Typically ~/.bashrc or ~/.profile", default="~/.bashrc"), arg('script_name'), ) def uninstall_bash_completions(dest, script_name): main.uninstall_bash_completion(dest=dest, script_name=script_name) main()
809
262
# ********************************************************* # Relative Stage Movement # # Stephanie Fung 2014 # ********************************************************* # Import modules. # --------------------------------------------------------- import visa import string import struct import sys import serial # ========================================================= # Initialize Prior: # ========================================================= def PriorInit(): # set units to 1 micron # need to verify this command_prior = "RES,s,1.0\r\n" Prior.write(command_prior) line = Prior.readline() print line # ========================================================= # Prior Move Relative function: # ========================================================= def moveRel(axis,distance): if axis == 'x' or axis == 'X': command_prior = "GR," + str(distance) + ",0,0\r\n" Prior.write(command_prior) if axis == 'y' or axis == 'Y': command_prior = "GR,0," + str(distance) + ",0\r\n" Prior.write(command_prior) # ========================================================= # Main program: # ========================================================= ## Prior Stage Prior = serial.Serial() Prior.port = "COM1" Prior.timeout = 0.1 print Prior Prior.open() Prior.isOpen PriorInit() ## Move position moveRel('x',-1000) moveRel('y',0) # Close the serial connection to the Prior stage Prior.close() Prior.isOpen() print "End of program."
1,500
420
# Copyright (c) 2013 Per Unneberg # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. import os import luigi import logging import time import glob import ratatosk.lib.files.external from ratatosk.utils import rreplace from ratatosk.job import InputJobTask, JobWrapperTask, JobTask, DefaultShellJobRunner import ratatosk.shell as shell # TODO: make these configurable JAVA="java" JAVA_OPTS="-Xmx2g" PICARD_HOME=os.getenv("PICARD_HOME") logger = logging.getLogger('luigi-interface') class PicardJobRunner(DefaultShellJobRunner): path = PICARD_HOME def run_job(self, job): if not job.jar() or not os.path.exists(os.path.join(self.path,job.jar())): logger.error("Can't find jar: {0}, full path {1}".format(job.jar(), os.path.abspath(job.jar()))) raise Exception("job jar does not exist") arglist = [JAVA] + job.java_opt() + ['-jar', os.path.join(self.path, job.jar())] if job.main(): arglist.append(job.main()) if job.opts(): arglist += job.opts() (tmp_files, job_args) = DefaultShellJobRunner._fix_paths(job) arglist += job_args cmd = ' '.join(arglist) logger.info("\nJob runner '{0}';\n\trunning command '{1}'".format(self.__class__, cmd.replace("= ", "="))) (stdout, stderr, returncode) = shell.exec_cmd(cmd.replace("= ", "="), shell=True) if returncode == 0: logger.info("Shell job completed") for a, b in tmp_files: logger.info("renaming {0} to {1}".format(a.path, b.path)) a.move(os.path.join(os.curdir, b.path)) else: raise Exception("Job '{}' failed: \n{}".format(cmd.replace("= ", "="), " ".join([stderr]))) class InputBamFile(JobTask): _config_section = "picard" _config_subsection = "InputBamFile" parent_task = luigi.Parameter(default="ratatosk.lib.files.external.BamFile") def requires(self): cls = self.set_parent_task() return cls(target=self.target) def output(self): return luigi.LocalTarget(self.target) def run(self): pass class PicardJobTask(JobTask): _config_section = "picard" java_options = luigi.Parameter(default=("-Xmx2g",), is_list=True) executable = luigi.Parameter(default=None) parent_task = luigi.Parameter(default="ratatosk.lib.tools.picard.InputBamFile") target_suffix = luigi.Parameter(default=".bam") source_suffix = luigi.Parameter(default=".bam") def jar(self): """Path to the jar for this Picard job""" return self.executable def java_opt(self): return list(self.java_options) def exe(self): return self.jar() def job_runner(self): return PicardJobRunner() def requires(self): cls = self.set_parent_task() source = self._make_source_file_name() return cls(target=source) class SortSam(PicardJobTask): _config_subsection = "SortSam" executable = "SortSam.jar" options = luigi.Parameter(default=("SO=coordinate MAX_RECORDS_IN_RAM=750000",), is_list=True) label = luigi.Parameter(default=".sort") def args(self): return ["INPUT=", self.input(), "OUTPUT=", self.output()] class MergeSamFiles(PicardJobTask): _config_subsection = "MergeSamFiles" executable = "MergeSamFiles.jar" label = luigi.Parameter(default=".merge") read1_suffix = luigi.Parameter(default="_R1_001") target_generator_function = luigi.Parameter(default=None) # FIXME: TMP_DIR should not be hard-coded options = luigi.Parameter(default=("SO=coordinate TMP_DIR=./tmp",), is_list=True) def args(self): return ["OUTPUT=", self.output()] + [item for sublist in [["INPUT=", x] for x in self.input()] for item in sublist] def requires(self): cls = self.set_parent_task() tgt_fun = self.set_target_generator_function() if tgt_fun: sources = tgt_fun(self) return [cls(target=src) for src in sources] else: return [] class AlignmentMetrics(PicardJobTask): _config_subsection = "AlignmentMetrics" executable = "CollectAlignmentSummaryMetrics.jar" target_suffix = luigi.Parameter(default=".align_metrics") def args(self): return ["INPUT=", self.input(), "OUTPUT=", self.output()] class InsertMetrics(PicardJobTask): _config_subsection = "InsertMetrics" executable = "CollectInsertSizeMetrics.jar" target_suffix = luigi.Parameter(default=(".insert_metrics", ".insert_hist"), is_list=True) def output(self): return [luigi.LocalTarget(self.target), luigi.LocalTarget(rreplace(self.target, self.target_suffix[0], self.target_suffix[1], 1))] def args(self): return ["INPUT=", self.input(), "OUTPUT=", self.output()[0], "HISTOGRAM_FILE=", self.output()[1]] class DuplicationMetrics(PicardJobTask): _config_subsection = "DuplicationMetrics" executable = "MarkDuplicates.jar" label = luigi.Parameter(default=".dup") target_suffix = luigi.Parameter(default=(".bam", ".dup_metrics"), is_list=True) def args(self): return ["INPUT=", self.input(), "OUTPUT=", self.output(), "METRICS_FILE=", rreplace(self.output().fn, "{}{}".format(self.label, self.target_suffix[0]), self.target_suffix[1], 1)] class HsMetrics(PicardJobTask): _config_subsection = "HsMetrics" executable = "CalculateHsMetrics.jar" bait_regions = luigi.Parameter(default=None) target_regions = luigi.Parameter(default=None) target_suffix = luigi.Parameter(default=".hs_metrics") def args(self): if not self.bait_regions or not self.target_regions: raise Exception("need bait and target regions to run CalculateHsMetrics") return ["INPUT=", self.input(), "OUTPUT=", self.output(), "BAIT_INTERVALS=", os.path.expanduser(self.bait_regions), "TARGET_INTERVALS=", os.path.expanduser(self.target_regions)] class HsMetricsNonDup(HsMetrics): """Run on non-deduplicated data""" _config_subsection = "HsMetricsNonDup" parent_task = luigi.Parameter(default="ratatosk.lib.tools.picard.MergeSamFiles") class PicardMetrics(JobWrapperTask): def requires(self): return [InsertMetrics(target=self.target + str(InsertMetrics.target_suffix.default[0])), #DuplicationMetrics(target=self.target + str(DuplicationMetrics.label.default) + str(DuplicationMetrics.target_suffix.default[0])), HsMetrics(target=self.target + str(HsMetrics.target_suffix.default)), AlignmentMetrics(target=self.target + str(AlignmentMetrics.target_suffix.default))] class PicardMetricsNonDup(JobWrapperTask): """Runs hs metrics on both duplicated and de-duplicated data""" def requires(self): return [InsertMetrics(target=self.target + str(InsertMetrics.target_suffix.default[0])), HsMetrics(target=self.target + str(HsMetrics.target_suffix.default)), HsMetricsNonDup(target=rreplace(self.target, str(DuplicationMetrics.label.default), "", 1) + str(HsMetrics.target_suffix.default)), AlignmentMetrics(target=self.target + str(AlignmentMetrics.target_suffix.default))]
7,794
2,543
import sys import pandas as pd # Can open csv files as a dataframe dframe = pd.read_csv('lec25.csv') # Can also use read_table with ',' as a delimiter dframe = pd.read_table('lec25.csv', sep=',') # If we dont want the header to be the first row dframe = pd.read_csv('lec25.csv', header=None) # We can also indicate a particular number of rows to be read pd.read_csv('lec25.csv', header=None, nrows=2) # Now let's see how we can write DataFrames out to text files dframe.to_csv('mytextdata_out.csv') # You'll see this file where you're ipython Notebooks are saved (Usually # under my documents) # We can also use other delimiters # we'll import sys to see the output # Use sys.stdout to see the output directly and not save it dframe.to_csv(sys.stdout, sep='_') # Just to make sure we understand the delimiter dframe.to_csv(sys.stdout, sep='?') # We can also choose to write only a specific subset of columns dframe.to_csv(sys.stdout, columns=[0, 1, 2]) # You should also checkout pythons built-in csv reader and writer fot more info # https://docs.python.org/2/library/csv.html
1,092
367
#!/usr/bin/env python # -*- coding: utf-8 -*- # @author: x.huang # @date:17-8-17 from Crypto.Hash import MD5 def encrypto(val): # python3 需要把进行 encode # todo 更改项目加密 key m = MD5.new('python-framework'.encode('utf-8')) m.update(val.encode('utf-8')) return m.hexdigest() def decrypto(val): pass if __name__ == '__main__': print(encrypto('arhieason'))
385
169
from django.conf.urls import patterns from comments.views import CommentDebateList # This url file is included from items.urls with the prefix /comments/ urlpatterns = patterns('', # Add a comment to a topic (r'^(?P<topic_id>\d+)/add/$', 'comments.views.add'), # Edit a comment (r'^(?P<topic_id>\d+)/edit/$', 'comments.views.edit'), # View a single comment on a page by itself (r'^(?P<comment_id>\d+)/?$', 'comments.views.comment_detail'), # Delete a comment (r'[delete|undelete]/$', 'comments.views.delete'), # View all arguments associated with a comment (r'^(?P<comment_id>\d+)/arguments/?(?P<page>\d+)?/?$', CommentDebateList.as_view(paginate_by=10, template_name='comments/comment_args.html', context_object_name='args_list')), # Flag a comment as spam (r'^flag/$', 'comments.views.flag'), # Follow or unfollow a topic or comment for # updates when new replies are made (r'^follow/$', 'comments.views.toggle_follow'), )
1,428
366
import sys from threading import Thread from PyQt5.QtWidgets import * from PyQt5.QtGui import * from PyQt5.QtCore import * from pyqtconsole.console import PythonConsole from view.console import Console from view.gui_dock import * from util.logger import * from model.engine import * # clear logs logger.ClearLogs() # make Qapp app = QApplication([]) app.setApplicationName("Stoinks Alpha") window = QMainWindow() console = PythonConsole() logConsole = Console() # stop debug console from resizing logConsoleFrame = QScrollArea() logConsoleFrame.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOn) logConsoleFrame.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOn) logConsoleFrame.setWidgetResizable(True) logConsoleFrame.setWidget(logConsole) #temp for now gui = finance_tab_container() consoleContainer = QDockWidget("Input") consoleContainer.setAllowedAreas(Qt.LeftDockWidgetArea) consoleContainer.setWidget(console) logConsoleContainer = QDockWidget("Output") logConsoleContainer.setAllowedAreas(Qt.RightDockWidgetArea) logConsoleContainer.setWidget(logConsoleFrame) guiContainer = QDockWidget("GUI View") guiContainer.setAllowedAreas(Qt.TopDockWidgetArea) guiContainer.setWidget(gui) window.addDockWidget(Qt.LeftDockWidgetArea, consoleContainer) window.addDockWidget(Qt.RightDockWidgetArea, logConsoleContainer) window.addDockWidget(Qt.TopDockWidgetArea, guiContainer) #console.show() add dock widget calls show on its widget i think console.eval_in_thread() # let the input terminal go # make an engine engine.connectConsole(console) engine.connectDebugConsole(logConsole) # Force the style to be the same on all OSs: app.setStyle("Fusion") # Now use a palette to switch to dark colors: palette = QPalette() palette.setColor(QPalette.Window, QColor(53, 53, 53)) palette.setColor(QPalette.WindowText, Qt.white) app.setPalette(palette) window.setMinimumSize(820, 800) window.show() app.exec_() # sys.exit(app.exec_()) engine.stop()
2,022
669
#!/usr/bin/env python # -*- coding: utf-8 -*- # ˅ from behavioral_patterns.command.command import Command # ˄ # Holder of the past commands class HistoryCommand(Command): # ˅ # ˄ def __init__(self): # A set of past commands self.__past_commands = [] # ˅ pass # ˄ def execute(self): # ˅ for past_command in self.__past_commands: past_command.execute() # ˄ def add(self, cmd): # ˅ self.__past_commands.append(cmd) # ˄ # Delete the last command def undo(self): # ˅ if len(self.__past_commands) != 0: self.__past_commands.pop() # ˄ # Delete all past commands def clear(self): # ˅ self.__past_commands.clear() # ˄ # ˅ # ˄ # ˅ # ˄
845
317
import logging, os, time, multiprocessing, sys, signal logging.disable(logging.WARNING) os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3" import tensorflow as tf import gym import pybullet, pybullet_envs, pybullet_data import numpy as np import pandas as pd from stable_baselines.sac.policies import MlpPolicy from stable_baselines.clac.policies import MlpPolicy as CLAC_MlpPolicy from stable_baselines.common.vec_env import DummyVecEnv from stable_baselines import SAC, CLAC #from tensorflow.python.client import device_lib #print(device_lib.list_local_devices()) # ENVIRONMENT_NAMES Walker2DBulletEnv-v0, Robots/AntBulletEnv-v0 , HopperBulletEnv-v0 , HumanoidBulletEnv-v0, HalfCheetahBulletEnv-v0 FOLDER = "Results/InvertedDoublePendulumBulletEnv" NUM_RESAMPLES = 50 NUM_TRAINING_STEPS = 100000 NUM_TESTING_STEPS = 50000 ENVIRONMENT_NAME = "InvertedDoublePendulumBulletEnv-v0" if(not os.path.exists(FOLDER + '/Extreme/results')): os.mkdir(FOLDER + '/Extreme/results') if(not os.path.exists(FOLDER + '/Generalization/results')): os.mkdir(FOLDER + '/Generalization/results') if(not os.path.exists(FOLDER + '/Training/results')): os.mkdir(FOLDER + '/Training/results') if(not os.path.exists(FOLDER + '/Training/models')): os.mkdir(FOLDER + '/Training/models') CLAC_COEFS = [2.0] SAC_COEFS = [2.0] def eval_model(model, env, model_name, coef, testing_timesteps, training_timestep, agent_step, resample_step, randomization): obs = env.reset() states = None reward_sum = 0 Data = pd.DataFrame() all_rewards = [] allPlayedCards = [] if(randomization > 0): env.env_method("randomize", randomization) for test_time in range(testing_timesteps): action, states = model.predict(obs, states) obs, rewards, dones, infos = env.step(action) reward_sum += rewards[0] if(dones[0]): d = {"Model": model_name, "Reward": reward_sum, "Timestep": training_timestep, "Coef": coef, "Randomization": randomization, "AgentID": agent_step, "Resample": resample_step} Data = Data.append(d, ignore_index=True) all_rewards.append(reward_sum) reward_sum = 0 if(randomization > 0): env.env_method("randomize", randomization) Avg = np.mean(all_rewards) return Data def test_agent(agent_step): now = time.time() for coef_index in range(len(CLAC_COEFS)): mut_coef = CLAC_COEFS[coef_index] ent_coef = SAC_COEFS[coef_index] training_timestep = 0 clac_env = gym.make(ENVIRONMENT_NAME) clac_env = DummyVecEnv([lambda: clac_env]) clac_model = CLAC(CLAC_MlpPolicy, clac_env, mut_inf_coef=mut_coef, verbose=1) sac_env = gym.make(ENVIRONMENT_NAME) sac_env = DummyVecEnv([lambda: sac_env]) sac_model = SAC(MlpPolicy, sac_env, ent_coef=ent_coef, verbose=1) mirl_env = gym.make(ENVIRONMENT_NAME) mirl_env = DummyVecEnv([lambda: mirl_env]) mirl_model = CLAC(CLAC_MlpPolicy, mirl_env, mut_inf_coef=mut_coef, coef_schedule=3.3e-3, verbose=1) for resample_step in range(0, NUM_RESAMPLES): features = pd.DataFrame() if(agent_step == 1): print(mut_coef, " ", ent_coef, " ", NUM_TRAINING_STEPS, " ", ENVIRONMENT_NAME, " ", FOLDER, " ", resample_step) (clac_model, learning_results) = clac_model.learn(total_timesteps=NUM_TRAINING_STEPS, log_interval=1000) (sac_model, learning_results) = sac_model.learn(total_timesteps=NUM_TRAINING_STEPS, log_interval=1000) (mirl_model, learning_results) = mirl_model.learn(total_timesteps=NUM_TRAINING_STEPS, log_interval=1000) # Save models clac_model.save(FOLDER + "/Training/models/CLAC_" + str(mut_coef).replace(".", "p") + "_" + str(agent_step) + "_" + str(resample_step)) sac_model.save(FOLDER + "/Training/models/CLAC_" + str(ent_coef).replace(".", "p") + "_" + str(agent_step) + "_" + str(resample_step)) mirl_model.save(FOLDER + "/Training/models/CLAC_" + str(mut_coef).replace(".", "p") + "_" + str(agent_step) + "_" + str(resample_step)) training_timestep += NUM_TRAINING_STEPS # Test Normal eval_results = eval_model(clac_model, clac_env, "CLAC", mut_coef, NUM_TESTING_STEPS, training_timestep, agent_step, resample_step, 0) eval_results.to_pickle(FOLDER + "/Training/results/CLAC_" + str(mut_coef).replace(".", "p") + "_" + str(agent_step) + "_" + str(resample_step) + ".pkl") eval_results = eval_model(sac_model, sac_env, "SAC", ent_coef, NUM_TESTING_STEPS, training_timestep, agent_step, resample_step, 0) eval_results['AgentID'] = agent_step eval_results.to_pickle(FOLDER + "/Training/results/SAC_" + str(ent_coef).replace(".", "p") + "_" + str(agent_step) + "_" + str(resample_step) + ".pkl") eval_results = eval_model(mirl_model, mirl_env, "MIRL", mut_coef, NUM_TESTING_STEPS, training_timestep, agent_step, resample_step, 0) eval_results['AgentID'] = agent_step eval_results.to_pickle(FOLDER + "/Training/results/MIRL_" + str(mut_coef).replace(".", "p") + "_" + str(agent_step) + "_" + str(resample_step) + ".pkl") # Test generalization eval_results = eval_model(clac_model, clac_env, "CLAC", mut_coef, NUM_TESTING_STEPS, training_timestep, agent_step, resample_step, 1) eval_results['AgentID'] = agent_step eval_results.to_pickle(FOLDER + "/Generalization/results/CLAC_" + str(mut_coef).replace(".", "p") + "_" + str(agent_step) + "_" + str(resample_step) + ".pkl") eval_results = eval_model(sac_model, sac_env, "SAC", ent_coef, NUM_TESTING_STEPS, training_timestep, agent_step, resample_step, 1) eval_results['AgentID'] = agent_step eval_results.to_pickle(FOLDER + "/Generalization/results/SAC_" + str(ent_coef).replace(".", "p") + "_" + str(agent_step) + "_" + str(resample_step) + ".pkl") eval_results = eval_model(mirl_model, mirl_env, "MIRL", mut_coef, NUM_TESTING_STEPS, training_timestep, agent_step, resample_step, 1) eval_results['AgentID'] = agent_step eval_results.to_pickle(FOLDER + "/Generalization/results/MIRL_" + str(mut_coef).replace(".", "p") + "_" + str(agent_step) + "_" + str(resample_step) + ".pkl") # Test generalization Extreme eval_results = eval_model(clac_model, clac_env, "CLAC", mut_coef, NUM_TESTING_STEPS, training_timestep, agent_step, resample_step, 2) eval_results['AgentID'] = agent_step eval_results.to_pickle(FOLDER + "/Extreme/results/CLAC_" + str(mut_coef).replace(".", "p") + "_" + str(agent_step) + "_" + str(resample_step) + ".pkl") eval_results = eval_model(sac_model, sac_env, "SAC", ent_coef, NUM_TESTING_STEPS, training_timestep, agent_step, resample_step, 2) eval_results['AgentID'] = agent_step eval_results.to_pickle(FOLDER + "/Extreme/results/SAC_" + str(ent_coef).replace(".", "p") + "_" + str(agent_step) + "_" + str(resample_step) + ".pkl") eval_results = eval_model(mirl_model, mirl_env, "MIRL", mut_coef, NUM_TESTING_STEPS, training_timestep, agent_step, resample_step, 2) eval_results['AgentID'] = agent_step eval_results.to_pickle(FOLDER + "/Extreme/results/MIRL_" + str(mut_coef).replace(".", "p") + "_" + str(agent_step) + "_" + str(resample_step) + ".pkl") clac_env.env_method("reset_features") sac_env.env_method("reset_features") mirl_env.env_method("reset_features") del sac_model del sac_env del clac_model del clac_env del mirl_model del mirl_env later = time.time() difference = int(later - now) print("Tested Agent Time: ", difference) def main(): Agents = [1, 2] print("Initializng workers: ", Agents) original_sigint_handler = signal.signal(signal.SIGINT, signal.SIG_IGN) pool = multiprocessing.Pool(processes=len(Agents)) signal.signal(signal.SIGINT, original_sigint_handler) try: print("Starting jobs") res = pool.map_async(test_agent, Agents) print("Waiting for results") #res.get(1000000) # Without the timeout this blocking call ignores all signals. except KeyboardInterrupt: print("Caught Keyboard Interrupt, terminating workers") pool.terminate() pool.join() else: print("Normal termination") pool.close() pool.join() if __name__ == "__main__": main()
8,710
3,190
""" Copyright 2016 Andrea McIntosh Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from django.conf.urls import url, include from . import views app_name = 'todo' urlpatterns = [ url(r'^$', views.index_view, name='index'), url(r'^(?P<pk>[0-9]+)/$', views.list_details, name='detail'), url(r'^(?P<pk>[0-9]+)/newitem/$', views.new_item, name='new_item'), url(r'^newlist/$', views.new_list, name='new_list'), url(r'^register/$', views.register, name='register'), url(r'^accounts/login/$', 'django.contrib.auth.views.login', name='login'), url(r'^accounts/logout/$', views.user_logout, name='logout'), url(r'^accounts/viewlists/$', views.view_lists, name='viewlists'), url(r'^accounts/', include('django.contrib.auth.urls')), ]
1,274
427
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Graphic User Interface.""" import sys import os import glob import ConfigParser from PyQt4.QtCore import * from PyQt4.QtGui import * from version import __version__ from algorithm.imtool import label_edge_detection as vol_label_edge_detection from algorithm.imtool import inverse_transformation from algorithm.meshtool import label_edge_detection as surf_label_edge_detection from core.labelconfig import LabelConfig from utils import get_icon_dir from widgets.listwidget import LayerView from widgets.gridwidget import GridView from widgets.orthwidget import OrthView from widgets.datamodel import VolumeListModel from widgets.drawsettings import PainterStatus, ViewSettings, MoveSettings from widgets.binarizationdialog import VolBinarizationDialog, SurfBinarizationDialog from widgets.intersectdialog import VolIntersectDialog, SurfIntersectDialog from widgets.localmaxdialog import LocalMaxDialog from widgets.no_gui_tools import gen_label_color from widgets.smoothingdialog import SmoothingDialog from widgets.growdialog import GrowDialog, VolumeRGDialog from widgets.watersheddialog import WatershedDialog from widgets.slicdialog import SLICDialog from widgets.clusterdialog import SurfClusterDialog, VolClusterDialog from widgets.regularroidialog import RegularROIDialog from widgets.regularroifromcsvfiledialog import RegularROIFromCSVFileDialog from widgets.roi2gwmidialog import Roi2gwmiDialog from widgets.roimergedialog import ROIMergeDialog from widgets.opendialog import OpenDialog from widgets.labelmanagedialog import LabelManageDialog from widgets.labelconfigcenter import LabelConfigCenter from widgets.roidialog import VolROIDialog, SurfROIDialog from widgets.atlasdialog import AtlasDialog from widgets.binaryerosiondialog import VolBinErosionDialog, SurfBinErosionDialog from widgets.binarydilationdialog import VolBinDilationDialog, SurfBinDilationDialog from widgets.greydilationdialog import GreydilationDialog from widgets.greyerosiondialog import GreyerosionDialog from widgets.meants import MeanTSDialog from widgets.voxelstatsdialog import VoxelStatsDialog from widgets.registervolume import RegisterVolumeDialog from widgets.treemodel import TreeModel from widgets.surfacetreewidget import SurfaceTreeView from widgets.surfaceview import SurfaceView from widgets.scribingdialog import ScribingDialog from widgets.surfaceRGdialog import SurfaceRGDialog from widgets.prob_map_dialog import SurfProbMapDialog from widgets.concatenate_dialog import SurfConcatenateDialog class BpMainWindow(QMainWindow): """Class BpMainWindow provides UI interface of FreeROI. Example: -------- >>> from PyQt4.QtGui import QApplication >>> import main >>> app = QApplication([]) >>> win = main.BpMainWindow() ...... >>> win.show() >>> app.exec_() """ def __init__(self, parent=None): """Initialize an instance of BpMainWindow.""" # Inherited from QMainWindow if sys.platform == 'darwin': # Workaround for Qt issue on OSX that causes QMainWindow to # hide when adding QToolBar, see # https://bugreports.qt-project.org/browse/QTBUG-4300 super(BpMainWindow, self).__init__(parent, Qt.MacWindowToolBarButtonHint) else: super(BpMainWindow, self).__init__(parent) # temporary variable self._save_dir = None self._temp_dir = None self.is_save_configure = False # pre-define model variables, one for volume dataset, another # for suface dataset self.volume_model = None self.surface_model = None self.tabWidget = None self.volume_actions_status = {} self.surface_actions_status = {} self.volume_view = None self.surface_view = None self.list_view = None self.surface_tree_view = None self.painter_status = PainterStatus(ViewSettings()) def config_extra_settings(self, data_dir): """Set data directory and update some configurations.""" # load data directory configuration self.label_path = data_dir self.label_config_dir = os.path.join(self.label_path, 'labelconfig') self.label_config_suffix = 'lbl' # set icon configuration self._icon_dir = get_icon_dir() self.setWindowTitle('FreeROI') self.setWindowIcon(QIcon(os.path.join(self._icon_dir, 'logo.png'))) self._init_configuration() self.center() self._create_actions() self._create_menus() def center(self): """Display main window in the center of screen.""" qr = self.frameGeometry() cp = QDesktopWidget().availableGeometry().center() qr.moveCenter(cp) self.move(qr.topLeft()) def _init_configuration(self): """Load configuration for GUI.""" config_file = os.path.expanduser('~/.froi.conf') if os.path.exists(config_file): config = ConfigParser.RawConfigParser() config.read(config_file) self.window_width = config.getint('width', 'int') self.window_height = config.getint('height', 'int') self.orth_scale_factor = config.getint('orth_scale', 'int') self.grid_scale_factor = config.getint('grid_scale', 'int') self.window_xpos = config.getint('xpos', 'int') self.window_ypos = config.getint('ypos', 'int') self.resize(self.window_width, self.window_height) self.move(self.window_xpos, self.window_ypos) self.default_orth_scale_factor = float(self.orth_scale_factor) / 100 self.default_grid_scale_factor = float(self.grid_scale_factor) / 100 else: # self.setWindowState(Qt.WindowMaximized) screen_geo = QDesktopWidget().screenGeometry() self.setMinimumSize(screen_geo.width()*2/3, screen_geo.height()*8/9) self.default_orth_scale_factor = 1.0 self.default_grid_scale_factor = 2.0 def _init_tab_widget(self): # set tab widget self.tabWidget = QTabWidget() self.tabWidget.setTabShape(QTabWidget.Rounded) self.tabWidget.setSizePolicy(QSizePolicy.Maximum, QSizePolicy.Expanding) self.tabWidget.setMaximumWidth(280) self.tabWidget.currentChanged.connect(self._tabwidget_index_changed) # set central widget central_widget = QWidget() layout = QHBoxLayout() central_widget.setLayout(layout) central_widget.layout().addWidget(self.tabWidget) self.setCentralWidget(central_widget) # add tool bar self._add_toolbar() # self.setUnifiedTitleAndToolBarOnMac(True) # change actions status self._actions['add_image'].setEnabled(True) self._actions['new_image'].setEnabled(True) self._actions['save_image'].setEnabled(True) self._actions['close'].setEnabled(True) def _init_vol_actions(self): self._actions['duplicate_image'].setEnabled(True) # self._actions['ld_lbl'].setEnabled(True) # self._actions['ld_glbl'].setEnabled(True) self._actions['orth_view'].setEnabled(True) self._actions['cross_hover_view'].setEnabled(True) self._actions['original_view'].setEnabled(True) self._actions['remove_image'].setEnabled(False) self._actions['undo'].setEnabled(False) self._actions['redo'].setEnabled(False) self._vol_func_module_set_enabled(True) self._actions['binarization'].setEnabled(True) self._actions['binaryerosion'].setEnabled(True) self._actions['binarydilation'].setEnabled(True) self._actions['edge_dete'].setEnabled(True) self._actions['inverse'].setEnabled(True) self._actions['label_management'].setEnabled(True) self._actions['cluster'].setEnabled(True) self._actions['intersect'].setEnabled(True) if not self.volume_model.is_mni_space(): self._actions['atlas'].setEnabled(False) def _init_surf_actions(self): self._actions['duplicate_image'].setEnabled(True) self._actions['undo'].setEnabled(False) self._actions['redo'].setEnabled(False) self._spinbox.setEnabled(False) self._surf_func_module_set_enabled(True) self._actions['binarization'].setEnabled(True) self._actions['binaryerosion'].setEnabled(True) self._actions['binarydilation'].setEnabled(True) self._actions['edge_dete'].setEnabled(True) self._actions['inverse'].setEnabled(True) self._actions['label_management'].setEnabled(True) self._actions['cluster'].setEnabled(True) self._actions['intersect'].setEnabled(True) def _save_configuration(self): """Save GUI configuration to a file.""" config_file = os.path.expanduser('~/.freeroi.conf') config = ConfigParser.RawConfigParser() config.add_section('width') config.add_section('height') config.add_section('orth_scale') config.add_section('grid_scale') config.add_section('xpos') config.add_section('ypos') config.set('width', 'int', self.width()) config.set('height', 'int', self.height()) config.set('xpos', 'int', self.x()) config.set('ypos', 'int', self.y()) if hasattr(self, 'volume_model') and isinstance(self.volume_model, VolumeListModel): config.set('orth_scale', 'int', int(self.volume_model.get_scale_factor('orth')*100)) config.set('grid_scale', 'int', int(self.volume_model.get_scale_factor('grid')*100)) else: config.set('orth_scale', 'int', int(self.default_orth_scale_factor * 100)) config.set('grid_scale', 'int', int(self.default_grid_scale_factor * 100)) with open(config_file, 'wb') as conf: config.write(conf) def closeEvent(self, e): if self.is_save_configure: self._save_configuration() e.accept() def _create_actions(self): """Create actions.""" # create a dictionary to store actions info self._actions = {} # Open template action self._actions['add_template'] = QAction(QIcon(os.path.join( self._icon_dir, 'open.png')), self.tr("&Open standard template"), self) self._actions['add_template'].setShortcut(self.tr("Ctrl+O")) self._actions['add_template'].triggered.connect(self._add_template) self._actions['add_template'].setEnabled(True) # Add a new volume image action self._actions['add_volume_image'] = QAction(QIcon(os.path.join( self._icon_dir, 'add.png')), self.tr("&Add volume file ... "), self) self._actions['add_volume_image'].triggered.connect(self._add_volume_image) self._actions['add_volume_image'].setEnabled(True) # Add a new surface image action self._actions['add_surface_image'] = QAction(QIcon(os.path.join( self._icon_dir, 'add.png')), self.tr("&Add surface file ... "), self) self._actions['add_surface_image'].triggered.connect(self._add_surface_image) self._actions['add_surface_image'].setEnabled(True) # Add a new image action self._actions['add_image'] = QAction(QIcon(os.path.join(self._icon_dir, 'add.png')), self.tr("&Add image ... "), self) self._actions['add_image'].setShortcut(self.tr("Ctrl+A")) self._actions['add_image'].triggered.connect(self._add_image) self._actions['add_image'].setEnabled(False) # Remove an image self._actions['remove_image'] = QAction(QIcon(os.path.join( self._icon_dir, 'remove.png')), self.tr("&Remove image"), self) self._actions['remove_image'].setShortcut(self.tr("Ctrl+R")) self._actions['remove_image'].triggered.connect(self._remove_image) self._actions['remove_image'].setEnabled(False) # New image self._actions['new_image'] = QAction(QIcon(os.path.join( self._icon_dir, 'create.png')), self.tr("&New image"), self) self._actions['new_image'].setShortcut(self.tr("Ctrl+N")) self._actions['new_image'].triggered.connect(self._new_image) self._actions['new_image'].setEnabled(False) # Duplicate image self._actions['duplicate_image'] = QAction(self.tr("Duplicate"), self) self._actions['duplicate_image'].triggered.connect( self._duplicate_image) self._actions['duplicate_image'].setEnabled(False) # Save image self._actions['save_image'] = QAction(QIcon(os.path.join( self._icon_dir, 'save.png')), self.tr("&Save image as..."), self) self._actions['save_image'].setShortcut(self.tr("Ctrl+S")) self._actions['save_image'].triggered.connect(self._save_image) self._actions['save_image'].setEnabled(False) ## Load Label Config #self._actions['ld_lbl'] = QAction('Load Label', self) #self._actions['ld_lbl'].triggered.connect(self._ld_lbl) #self._actions['ld_lbl'].setEnabled(False) ## Load Global Label Config #self._actions['ld_glbl'] = QAction('Load Global Label', self) #self._actions['ld_glbl'].triggered.connect(self._ld_glbl) #self._actions['ld_glbl'].setEnabled(False) # Close display self._actions['close'] = QAction(self.tr("Close tab"), self) self._actions['close'].setShortcut(self.tr("Ctrl+W")) self._actions['close'].triggered.connect(self._close_display) self._actions['close'].setEnabled(False) # Quit action self._actions['quit'] = QAction(QIcon(os.path.join( self._icon_dir, 'quit.png')), self.tr("&Quit"), self) self._actions['quit'].setShortcut(self.tr("Ctrl+Q")) self._actions['quit'].triggered.connect(self.close) # Grid view action self._actions['grid_view'] = QAction(QIcon(os.path.join( self._icon_dir, 'gridview.png')), self.tr("Lightbox"), self) self._actions['grid_view'].triggered.connect(self._grid_view) self._actions['grid_view'].setEnabled(False) # Orth view action self._actions['orth_view'] = QAction(QIcon(os.path.join( self._icon_dir, 'orthview.png')), self.tr("Orthographic"), self) self._actions['orth_view'].triggered.connect(self._orth_view) self._actions['orth_view'].setEnabled(False) # return original size self._actions['original_view'] = QAction(QIcon(os.path.join( self._icon_dir, 'original_size.png')), self.tr("Reset view"), self) self._actions['original_view'].triggered.connect(self._reset_view) self._actions['original_view'].setEnabled(False) # whether display the cross hover self._actions['cross_hover_view'] = QAction(QIcon(os.path.join( self._icon_dir, 'cross_hover_enable.png')), self.tr("Disable cross hover"), self) self._actions['cross_hover_view'].triggered.connect(self._display_cross_hover) self._actions['cross_hover_view'].setEnabled(False) # Binarization view action self._actions['binarization'] = QAction(QIcon(os.path.join( self._icon_dir, 'binarization.png')), self.tr("Binarization"), self) self._actions['binarization'].triggered.connect(self._binarization) self._actions['binarization'].setEnabled(False) # Intersection action self._actions['intersect'] = QAction(QIcon(os.path.join( self._icon_dir, 'intersect.png')), self.tr("Intersection"), self) self._actions['intersect'].triggered.connect(self._intersect) self._actions['intersect'].setEnabled(False) # Extract mean time course self._actions['meants'] = QAction(QIcon(os.path.join( self._icon_dir, 'voxel_curve.png')), self.tr("Extract Mean Time Course"), self) self._actions['meants'].triggered.connect(self._meants) self._actions['meants'].setEnabled(False) # Voxel Stats self._actions['voxelstats'] = QAction(self.tr("Voxel number stats"), self) self._actions['voxelstats'].triggered.connect(self._voxelstats) self._actions['voxelstats'].setEnabled(False) # Local Max action self._actions['localmax'] = QAction(QIcon(os.path.join( self._icon_dir, 'localmax.png')), self.tr("Local Max"), self) self._actions['localmax'].triggered.connect(self._local_max) self._actions['localmax'].setEnabled(False) # Inversion action self._actions['inverse'] = QAction(QIcon(os.path.join( self._icon_dir, 'inverse.png')), self.tr("Inversion"), self) self._actions['inverse'].triggered.connect(self._inverse) self._actions['inverse'].setEnabled(False) # Smoothing action self._actions['smoothing'] = QAction(QIcon(os.path.join( self._icon_dir, 'smoothing.png')), self.tr("Smoothing"), self) self._actions['smoothing'].triggered.connect(self._smooth) self._actions['smoothing'].setEnabled(False) # Concatenate overlays to one overlay self._actions['concatenate'] = QAction(self.tr('Concatenate'), self) self._actions['concatenate'].triggered.connect(self._concatenate) self._actions['concatenate'].setEnabled(False) # Calculate probability map action self._actions['probability_map'] = QAction(self.tr('ProbabilityMap'), self) self._actions['probability_map'].triggered.connect(self._prob_map) self._actions['probability_map'].setEnabled(False) # Region Growing action self._actions['region_grow'] = QAction(QIcon(os.path.join( self._icon_dir, 'grow.png')), self.tr("Region Growing"), self) self._actions['region_grow'].triggered.connect(self._region_grow) self._actions['region_grow'].setEnabled(False) # Lable Management action self._actions['label_management'] = QAction(self.tr("Label Management"), self) self._actions['label_management'].triggered.connect(self._label_manage) self._actions['label_management'].setEnabled(False) # Snapshot self._actions['snapshot'] = QAction(self.tr("Snapshot"), self) self._actions['snapshot'].triggered.connect(self._snapshot) self._actions['snapshot'].setEnabled(False) # Watershed action self._actions['watershed'] = QAction(QIcon(os.path.join( self._icon_dir, 'watershed.png')), self.tr("Watershed"), self) self._actions['watershed'].triggered.connect(self._watershed) self._actions['watershed'].setEnabled(False) # SLIC action self._actions['slic'] = QAction(QIcon(os.path.join( self._icon_dir, 'slic.png')), self.tr("SLIC"), self) self._actions['slic'].triggered.connect(self._slic) self._actions['slic'].setEnabled(False) # Cluster action self._actions['cluster'] = QAction(QIcon(os.path.join( self._icon_dir, 'cluster.png')), self.tr("Cluster"), self) self._actions['cluster'].triggered.connect(self._cluster) self._actions['cluster'].setEnabled(False) # Opening self._actions['opening'] = QAction(self.tr("Opening"), self) self._actions['opening'].triggered.connect(self._opening) self._actions['opening'].setEnabled(False) # Binary_erosion view action self._actions['binaryerosion'] = QAction(self.tr("Binary Erosion"), self) self._actions['binaryerosion'].triggered.connect(self._binaryerosion) self._actions['binaryerosion'].setEnabled(False) # Binary_dilation view action self._actions['binarydilation'] = QAction(self.tr("Binary Dilation"), self) self._actions['binarydilation'].triggered.connect(self._binarydilation) self._actions['binarydilation'].setEnabled(False) # grey_erosion view action self._actions['greyerosion'] = QAction(self.tr("Grey Erosion"), self) self._actions['greyerosion'].triggered.connect(self._greyerosion) self._actions['greyerosion'].setEnabled(False) # grey_dilation view action self._actions['greydilation'] = QAction(self.tr("Grey Dilation"), self) self._actions['greydilation'].triggered.connect(self._greydilation) self._actions['greydilation'].setEnabled(False) # About software self._actions['about_freeroi'] = QAction(self.tr("About FreeROI"), self) self._actions['about_freeroi'].triggered.connect(self._about_freeroi) # About Qt self._actions['about_qt'] = QAction(QIcon(os.path.join( self._icon_dir, 'qt.png')), self.tr("About Qt"), self) self._actions['about_qt'].triggered.connect(qApp.aboutQt) # Hand self._actions['hand'] = QAction(QIcon(os.path.join( self._icon_dir, 'hand.png')), self.tr("Hand"), self) self._actions['hand'].triggered.connect(self._hand_enable) self._actions['hand'].setCheckable(True) self._actions['hand'].setChecked(False) self._actions['hand'].setEnabled(False) # Cursor self._actions['cursor'] = QAction(QIcon(os.path.join( self._icon_dir, 'cursor.png')), self.tr("Cursor"), self) self._actions['cursor'].triggered.connect(self._cursor_enable) self._actions['cursor'].setCheckable(True) self._actions['cursor'].setChecked(True) self._actions['cursor'].setEnabled(True) # Edit self._actions['edit'] = QAction(QIcon(os.path.join( self._icon_dir, 'edit.png')), self.tr("Edit"), self) self._actions['edit'].triggered.connect(self._roidialog_enable) self._actions['edit'].setCheckable(True) self._actions['edit'].setChecked(False) # Undo self._actions['undo'] = QAction(QIcon(os.path.join( self._icon_dir, 'undo.png')), self.tr("Undo"), self) self._actions['undo'].triggered.connect(self._undo) # Redo self._actions['redo'] = QAction(QIcon(os.path.join( self._icon_dir, 'redo.png')), self.tr("Redo"), self) self._actions['redo'].triggered.connect(self._redo) # sphere and cube roi self._actions['regular_roi'] = QAction(QIcon(os.path.join( self._icon_dir, 'sphere_and_cube.png')), self.tr("Regular ROI"), self) self._actions['regular_roi'].triggered.connect(self._regular_roi) self._actions['regular_roi'].setEnabled(False) # sphere and cube roi from csv file self._actions['regular_roi_from_csv'] = QAction(QIcon(os.path.join( self._icon_dir, 'sphere_and_cube.png')), self.tr("Regular ROI From CSV File"), self) self._actions['regular_roi_from_csv'].triggered.connect(self._regular_roi_from_csv_file) self._actions['regular_roi_from_csv'].setEnabled(False) # ROI to Interface self._actions['r2i'] = QAction(QIcon(os.path.join( self._icon_dir, 'r2i.png')), self.tr("ROI2Interface"), self) self._actions['r2i'].triggered.connect(self._r2i) self._actions['r2i'].setEnabled(False) # Edge detection for ROI self._actions['edge_dete'] = QAction(QIcon(os.path.join( self._icon_dir, 'edge_detection.png')), self.tr("Edge Detection"), self) self._actions['edge_dete'].triggered.connect(self._label_edge_detection) self._actions['edge_dete'].setEnabled(False) # Atlas information self._actions['atlas'] = QAction(QIcon(os.path.join( self._icon_dir, 'atlas.png')), self.tr("Candidate Label"), self) self._actions['atlas'].triggered.connect(self._atlas_dialog) self._actions['atlas'].setEnabled(False) # ROI Merging self._actions['roi_merge'] = QAction(QIcon(os.path.join( self._icon_dir, 'merging.png')), self.tr("ROI Merging"), self) self._actions['roi_merge'].triggered.connect(self._roi_merge) self._actions['roi_merge'].setEnabled(False) # ROI scribing self._actions['scribing'] = QAction(self.tr("scribing"), self) self._actions['scribing'].triggered.connect(self._roi_scribing) self._actions['scribing'].setEnabled(False) # surface region grow self._actions['surf_region_grow'] = QAction(self.tr("surf_RG"), self) self._actions['surf_region_grow'].triggered.connect(self._surf_rg) self._actions['surf_region_grow'].setEnabled(False) def _surf_rg(self): new_dialog = SurfaceRGDialog(self.surface_model, self.surface_view, self) new_dialog.show() def _roi_scribing(self): new_dialog = ScribingDialog(self.surface_view, self) new_dialog.show() def _add_toolbar(self): """Add toolbar.""" # Initialize a spinbox for zoom-scale selection self._spinbox = QSpinBox() self._spinbox.setMaximum(500) self._spinbox.setMinimum(50) self._spinbox.setSuffix('%') self._spinbox.setSingleStep(10) self._spinbox.setValue(self.default_grid_scale_factor * 100) self._spinbox.valueChanged.connect(self._set_scale_factor) # Add a toolbar self._toolbar = self.addToolBar("Tools") #self._toolbar.setIconSize(QSize(38,38)) # Add file actions self._toolbar.addAction(self._actions['add_image']) self._toolbar.addAction(self._actions['remove_image']) self._toolbar.addAction(self._actions['new_image']) self._toolbar.addAction(self._actions['save_image']) # Add view actions self._toolbar.addSeparator() self._toolbar.addAction(self._actions['grid_view']) self._toolbar.addAction(self._actions['orth_view']) self._toolbar.addAction(self._actions['original_view']) self._toolbar.addAction(self._actions['cross_hover_view']) # Add cursor status self._toolbar.addSeparator() self._toolbar.addAction(self._actions['hand']) self._toolbar.addAction(self._actions['cursor']) self._toolbar.addAction(self._actions['edit']) # Add undo redo self._toolbar.addSeparator() self._toolbar.addAction(self._actions['undo']) self._toolbar.addAction(self._actions['redo']) self._toolbar.addSeparator() self._toolbar.addWidget(self._spinbox) def _set_scale_factor(self, value): """Set scale factor.""" value = float(value) / 100 self.volume_model.set_scale_factor(value, self.volume_view.display_type()) def _add_template(self): """Open a dialog window and select a template file.""" template_dir = os.path.join(self.label_path, 'standard', 'MNI152_T1_2mm_brain.nii.gz') template_name = QFileDialog.getOpenFileName( self, 'Open standard file', template_dir, 'Nifti files (*.nii.gz *.nii)') if not template_name == '': if sys.platform == 'win32': template_path = unicode(template_name).encode('gb2312') else: template_path = str(template_name) self._add_volume_img(template_path) def _add_image(self): if self.tabWidget.currentWidget() == self.list_view: self._add_volume_image() else: self._add_surface_image() def _add_volume_image(self): """Add new item.""" if self._temp_dir == None: temp_dir = QDir.currentPath() else: temp_dir = self._temp_dir file_name = QFileDialog.getOpenFileName(self, 'Add new volume file', temp_dir, "Nifti files (*.nii *.nii.gz)") if file_name != '': if sys.platform == 'win32': file_path = unicode(file_name).encode('gb2312') else: file_path = str(file_name) self._add_volume_img(file_path) def _add_surface_image(self): """Add new surface image.""" if self._temp_dir is None: temp_dir = QDir.currentPath() else: temp_dir = self._temp_dir file_name = QFileDialog.getOpenFileName(self, 'Add new surface file', temp_dir) if file_name != '': if sys.platform == 'win32': file_path = unicode(file_name).encode('gb2312') else: file_path = str(file_name) self._add_surface_img(file_path) def _duplicate_image(self): """Duplicate image.""" if self.tabWidget.currentWidget() is self.list_view: index = self.volume_model.currentIndex() dup_img = self.volume_model._data[index.row()].duplicate() self.volume_model.insertRow(0, dup_img) self.list_view.setCurrentIndex(self.volume_model.index(0)) # change button status self._actions['remove_image'].setEnabled(True) elif self.tabWidget.currentWidget() is self.surface_tree_view: index = self.surface_model.current_index() depth = self.surface_model.index_depth(index) if depth != 2: QMessageBox.warning(self, 'Warning!', 'Get overlay failed!\nYou may have not selected any overlay!', QMessageBox.Yes) return self.surface_model.add_item(index, source=self.surface_model.data(index, Qt.UserRole + 5).copy(), vmin=self.surface_model.data(index, Qt.UserRole), vmax=self.surface_model.data(index, Qt.UserRole + 1), colormap=self.surface_model.data(index, Qt.UserRole + 3), alpha=self.surface_model.data(index, Qt.UserRole + 2), visible=self.surface_model.data(index, Qt.UserRole + 8), islabel=self.surface_model.data(index, Qt.UserRole + 7), name=self.surface_model.data(index, Qt.DisplayRole)) def _add_volume_img(self, source, name=None, header=None, view_min=None, view_max=None, alpha=255, colormap='gray'): """ Add image.""" # If model is NULL, then re-initialize it. if not self.volume_model: self._vol_label_config_center = self._init_label_config_center() self._vol_label_config_center.size_edit.setRange(1, 10) self._vol_label_config_center.size_edit.setValue(4) self.volume_model = VolumeListModel([], self._vol_label_config_center) self.volume_model.set_scale_factor(self.default_grid_scale_factor, 'grid') self.volume_model.set_scale_factor(self.default_orth_scale_factor, 'orth') self._init_vol_roidialog(self.volume_model) # Save previous opened directory (except `standard` directory) file_path = source if sys.platform == 'win32': temp_dir = os.path.dirname(unicode(file_path, 'gb2312')) if not os.stat(temp_dir) == os.stat(os.path.join(self.label_path, 'standard')): self._temp_dir = temp_dir else: temp_dir = os.path.dirname(file_path) if not os.path.samefile(temp_dir, os.path.join(self.label_path, 'standard')): self._temp_dir = temp_dir if self.volume_model.addItem(file_path, None, name, header, view_min, view_max, alpha, colormap): # If only one data in VolumeList, then initialize views. if self.volume_model.rowCount() == 1: # initialize views self.list_view = LayerView(self._vol_label_config_center) self.list_view.setModel(self.volume_model) self.volume_view = GridView(self.volume_model, self.painter_status) # connect signals with slots self.list_view.current_changed.connect(self._update_undo) self.list_view.current_changed.connect(self._update_redo) self.list_view._list_view.selectionModel().currentChanged.connect(self.vol_roidialog.clear_rois) self.volume_model.rowsInserted.connect(self._update_remove_image) self.volume_model.undo_stack_changed.connect(self._update_undo) self.volume_model.redo_stack_changed.connect(self._update_redo) # set current volume index self.list_view.setCurrentIndex(self.volume_model.index(0)) # set crosshair as the center of the data self.volume_model.set_cross_pos([self.volume_model.getY()/2, self.volume_model.getX()/2, self.volume_model.getZ()/2]) # Enable cursor tracking # self.list_view._list_view.selectionModel().currentChanged.connect( # self._switch_cursor_status) if not self.tabWidget: self._init_tab_widget() if self.tabWidget.count() == 0: self.tabWidget.addTab(self.list_view, "Volume") self._init_vol_actions() elif self.tabWidget.count() == 1 and self.tabWidget.currentWidget() != self.list_view: self.tabWidget.addTab(self.list_view, "Volume") self.tabWidget.setCurrentIndex(1) self._init_vol_actions() elif self.tabWidget.count() == 2 and self.tabWidget.currentWidget() != self.list_view: self.tabWidget.setCurrentIndex(self.tabWidget.count() - self.tabWidget.currentIndex() - 1) if self.centralWidget().layout().indexOf(self.volume_view) == -1: # Could not find the self.volume_view if self.centralWidget().layout().indexOf(self.surface_view) != -1: self.centralWidget().layout().removeWidget(self.surface_view) self.surface_view.setParent(None) self.centralWidget().layout().addWidget(self.volume_view) if self.volume_model.rowCount() > 1: self._actions['remove_image'].setEnabled(True) # set current volume index self.list_view.setCurrentIndex(self.volume_model.index(0)) self.is_save_configure = True else: ret = QMessageBox.question(self, 'FreeROI', 'Cannot load ' + file_path + ': due to mismatch data size.\nNeed registration?', QMessageBox.Cancel, QMessageBox.Yes) if ret == QMessageBox.Yes: register_volume_dialog = RegisterVolumeDialog(self.volume_model, file_path) register_volume_dialog.exec_() def _add_surface_img(self, source, index=None, offset=None, vmin=None, vmax=None, colormap='jet', alpha=1.0, visible=True, islabel=False): """ Add surface image.""" # If model is NULL, then re-initialize it. if not self.surface_model: self._surf_label_config_center = self._init_label_config_center() self._surf_label_config_center.size_edit.setRange(0, 10) self._surf_label_config_center.size_edit.setValue(1) self.surface_model = TreeModel([]) self.surface_tree_view = SurfaceTreeView(self.surface_model, self._surf_label_config_center) self.surface_tree_view_control = self.surface_tree_view.get_treeview() self._init_surf_roidialog(self.surface_model) if index is None: index = self.surface_tree_view_control.currentIndex() # Save previous opened directory (except `standard` directory) file_path = source if sys.platform == 'win32': temp_dir, basename = os.path.split(unicode(file_path, 'gb2312')) if not os.stat(temp_dir) == os.stat(os.path.join(self.label_path, 'standard')): self._temp_dir = temp_dir else: temp_dir, basename = os.path.split(file_path) if not os.path.samefile(temp_dir, os.path.join(self.label_path, 'standard')): self._temp_dir = temp_dir ends = basename.split('.')[-1] if len(self.surface_model.get_data()) == 0 and ends not in ('pial', 'white', 'inflated', 'gii'): QMessageBox.warning(self, 'Warning', 'You must choose the brain surface file first!', QMessageBox.Yes) elif self.surface_model.add_item(index, file_path, vmin=vmin, vmax=vmax, alpha=alpha, colormap=colormap, visible=visible, islabel=islabel): # Initial the tabwidget. if not self.tabWidget: self._init_tab_widget() if self.tabWidget.count() == 0: self.tabWidget.addTab(self.surface_tree_view, "Surface") self._init_surf_actions() elif self.tabWidget.count() == 1 and self.tabWidget.currentWidget() != self.surface_tree_view: self.tabWidget.addTab(self.surface_tree_view, "Surface") self.tabWidget.setCurrentIndex(1) self._init_surf_actions() elif self.tabWidget.count() == 2 and self.tabWidget.currentWidget() != self.surface_tree_view: self.tabWidget.setCurrentIndex(self.tabWidget.count() - self.tabWidget.currentIndex() - 1) # Initial surface_view if not self.surface_view: self.surface_view = SurfaceView() self.surface_view.set_model(self.surface_model) self.surface_view.set_painter_status(self.painter_status) if self.centralWidget().layout().indexOf(self.surface_view) == -1: # Could not find the self.surface_view if self.centralWidget().layout().indexOf(self.volume_view) != -1: self.centralWidget().layout().removeWidget(self.volume_view) self.volume_view.setParent(None) self.centralWidget().layout().addWidget(self.surface_view) self._actions['remove_image'].setEnabled(True) else: QMessageBox.question(self, 'FreeROI', 'Cannot load ' + file_path + ' !', QMessageBox.Yes) def _save_actions_status(self, actions_status): actions_status['grid_view'] = self._actions['grid_view'].isEnabled() actions_status['orth_view'] = self._actions['orth_view'].isEnabled() actions_status['hand'] = self._actions['hand'].isEnabled() actions_status['snapshot'] = self._actions['snapshot'].isEnabled() actions_status['orth_view'] = self._actions['orth_view'].isEnabled() actions_status['cross_hover_view'] = self._actions['cross_hover_view'].isEnabled() actions_status['original_view'] = self._actions['original_view'].isEnabled() actions_status['remove_image'] = self._actions['remove_image'].isEnabled() actions_status['undo'] = self._actions['undo'].isEnabled() actions_status['redo'] = self._actions['redo'].isEnabled() # actions_status['functional_module_set_enabled'] = self._actions['binarization'].isEnabled() actions_status['atlas'] = self._actions['atlas'].isEnabled() def _disable_vol_actions(self): # set enabled status volume-specific actions self._actions['grid_view'].setEnabled(False) self._actions['orth_view'].setEnabled(False) self._actions['hand'].setEnabled(False) self._actions['snapshot'].setEnabled(False) self._actions['orth_view'].setEnabled(False) self._actions['cross_hover_view'].setEnabled(False) self._actions['original_view'].setEnabled(False) self._actions['undo'].setEnabled(False) self._actions['redo'].setEnabled(False) self._vol_func_module_set_enabled(False) self._spinbox.setEnabled(False) def _disable_surf_actions(self): # Disable surface-specific actions for volume self._surf_func_module_set_enabled(False) def _restore_actions_status(self, actions_status): # Restore all toolbar controls if actions_status: self._actions['grid_view'].setEnabled(actions_status['grid_view']) self._actions['hand'].setEnabled(actions_status['hand']) self._actions['snapshot'].setEnabled(actions_status['snapshot']) self._actions['orth_view'].setEnabled(actions_status['orth_view']) self._actions['cross_hover_view'].setEnabled(actions_status['cross_hover_view']) self._actions['original_view'].setEnabled(actions_status['original_view']) self._actions['remove_image'].setEnabled(actions_status['remove_image']) self._actions['undo'].setEnabled(actions_status['undo']) self._actions['redo'].setEnabled(actions_status['redo']) if actions_status == self.volume_actions_status: self._vol_func_module_set_enabled(True) self._spinbox.setEnabled(True) if not self.volume_model.is_mni_space(): self._actions['atlas'].setEnabled(actions_status['atlas']) else: self._surf_func_module_set_enabled(True) def _tabwidget_index_changed(self): if self.tabWidget.count() == 2: if self.tabWidget.currentWidget() == self.list_view: self.centralWidget().layout().removeWidget(self.surface_view) self.surface_view.setParent(None) self.centralWidget().layout().addWidget(self.volume_view) self._save_actions_status(self.surface_actions_status) self._disable_surf_actions() self._restore_actions_status(self.volume_actions_status) else: self.centralWidget().layout().removeWidget(self.volume_view) self.volume_view.setParent(None) self.centralWidget().layout().addWidget(self.surface_view) self._save_actions_status(self.volume_actions_status) self._disable_vol_actions() self._restore_actions_status(self.surface_actions_status) self._roidialog_disable() def _new_image(self): """Create new image.""" if self.tabWidget.currentWidget() == self.list_view: self.new_volume_image() else: self.new_surface_image() def _update_remove_image(self): """Update the display after removing an image.""" if self.volume_model.rowCount() == 1: self._actions['remove_image'].setEnabled(False) else: self._actions['remove_image'].setEnabled(True) def new_volume_image(self, data=None, name=None, colormap=None): """Create a new volume for brain parcellation.""" if colormap is None: colormap = self._vol_label_config_center.get_first_label_config() self.volume_model.new_image(data, name, None, colormap) self.list_view.setCurrentIndex(self.volume_model.index(0)) # change button status self._actions['remove_image'].setEnabled(True) def new_surface_image(self): self.surface_model.add_item(self.surface_tree_view_control.currentIndex()) def new_image_action(self): """Change the related status of other actions after creating an image.""" self._actions['remove_image'].setEnabled(True) def _remove_image(self): """Remove current image.""" if self.tabWidget.currentWidget() == self.list_view: self._remove_volume_image() else: self._remove_surface_image() def _remove_volume_image(self): row = self.list_view.currentRow() self.volume_model.delItem(row) if self.volume_model.rowCount() == 1: self._actions['remove_image'].setEnabled(False) def _remove_surface_image(self): self.surface_model.del_item(self.surface_tree_view_control.currentIndex()) if self.surface_model.rowCount(QModelIndex()) == 0: self._actions['remove_image'].setEnabled(False) def _save_image(self): """Save overlay as a file.""" if self._save_dir is not None: temp_dir = self._save_dir else: temp_dir = str(QDir.currentPath()) if self._temp_dir is None else self._temp_dir if self.tabWidget.currentWidget() == self.list_view: index = self.volume_model.currentIndex() file_types = "Compressed NIFTI file(*.nii.gz);;NIFTI file(*.nii)" file_path = os.path.join(temp_dir, str(self.volume_model.data(index, Qt.DisplayRole))) overlay = self.volume_model._data[index.row()] else: index = self.surface_tree_view_control.currentIndex() if not index.isValid(): QMessageBox.warning(self, 'Error', 'You have not specified a overlay!', QMessageBox.Yes) return else: parent = index.parent() if not parent.isValid(): QMessageBox.warning(self, 'Error', 'You have not specified a overlay!', QMessageBox.Yes) return file_types = "Compressed NIFTI file(*.nii.gz);;NIFTI file(*.nii);;FS label(*.label)" file_path = os.path.join(temp_dir, str(self.surface_model.data(index, Qt.DisplayRole))) overlay = index.internalPointer() path, filter = QFileDialog.getSaveFileNameAndFilter(self, 'Save image as...', file_path, file_types) if str(path) != '': if sys.platform == 'win32': path = unicode(path).encode('gb2312') self._temp_dir = os.path.dirname(unicode(path, 'gb2312')) else: path = str(path) self._temp_dir = os.path.dirname(path) if filter == 'FS label(*.label)': index = self.surface_model.get_surface_index() # FIXME coordinates in freesurfer-style label file should come from '.white' file coords = self.surface_model.data(index, Qt.UserRole + 6).coords overlay.save2label(path, hemi_coords=coords) else: overlay.save2nifti(path) def _close_display(self): """Close current display.""" old_index = self.tabWidget.currentIndex() if self.tabWidget.count() == 1: self.setCentralWidget(QWidget()) self.removeToolBar(self._toolbar) if self.tabWidget.currentWidget() == self.list_view: self._set_scale_factor(self.default_grid_scale_factor) self.volume_model = None self.volume_view = None self.volume_actions_status.clear() else: self.surface_model = None self.surface_view = None self.surface_actions_status.clear() self._actions['add_image'].setEnabled(False) self._actions['remove_image'].setEnabled(False) self._actions['new_image'].setEnabled(False) self._actions['save_image'].setEnabled(False) #self._actions['ld_glbl'].setEnabled(False) #self._actions['ld_lbl'].setEnabled(False) self._actions['close'].setEnabled(False) self._disable_vol_actions() self._disable_surf_actions() elif self.tabWidget.count() == 2 and self.tabWidget.currentWidget() == self.list_view: self.tabWidget.setCurrentIndex(self.tabWidget.count() - old_index - 1) self.tabWidget.removeTab(old_index) self._set_scale_factor(self.default_grid_scale_factor) self.volume_model = None self.volume_view = None self.volume_actions_status.clear() elif self.tabWidget.count() == 2 and self.tabWidget.currentWidget() == self.surface_tree_view: self.tabWidget.setCurrentIndex(self.tabWidget.count() - old_index - 1) self.tabWidget.removeTab(old_index) self.surface_model = None self.surface_view = None self.surface_actions_status.clear() def _about_freeroi(self): """ About software.""" QMessageBox.about(self, self.tr("About FreeROI"), self.tr("<p><b>FreeROI</b> is a versatile image " "processing software developed for " "neuroimaging data.</p>" "<p>Its goal is to provide a user-friendly " "interface for neuroimaging researchers " "to visualize and analyze their data, " "especially in defining region of interest " "(ROI) for ROI analysis.</p>" "<p>Version: " + __version__ + "</p>" "<p>Written by: Lijie Huang, Zetian Yang, " "Guangfu Zhou, Zhaoguo Liu, Xiaobin Dang, " "Xiangzhen Kong, Xu Wang, and Zonglei Zhen." "</p>" "<p><b>FreeROI</b> is under Revised BSD " "License.</p>" "<p>Copyright(c) 2012-2015 " "Neuroinformatic Team in LiuLab " "from Beijing Normal University</p>" "<p></p>" "<p>Please join and report bugs to:</p>" "<p><b>nitk-user@googlegroups.com</b></p>")) def _create_menus(self): """Create menus.""" self.file_menu = self.menuBar().addMenu(self.tr("File")) self.file_menu.addAction(self._actions['add_volume_image']) self.file_menu.addAction(self._actions['add_template']) self.file_menu.addSeparator() self.file_menu.addAction(self._actions['add_surface_image']) self.file_menu.addSeparator() self.file_menu.addAction(self._actions['new_image']) self.file_menu.addAction(self._actions['remove_image']) self.file_menu.addAction(self._actions['duplicate_image']) self.file_menu.addAction(self._actions['save_image']) #self.file_menu.addAction(self._actions['ld_lbl']) #self.file_menu.addAction(self._actions['ld_glbl']) self.file_menu.addSeparator() self.file_menu.addAction(self._actions['close']) self.file_menu.addAction(self._actions['quit']) #self.volume_menu = self.menuBar().addMenu(self.tr("Volume")) #self.volume_menu.addAction(self._actions['new_image']) #self.volume_menu.addAction(self._actions['remove_image']) self.view_menu = self.menuBar().addMenu(self.tr("View")) self.view_menu.addAction(self._actions['grid_view']) self.view_menu.addAction(self._actions['orth_view']) self.view_menu.addAction(self._actions['original_view']) self.view_menu.addAction(self._actions['cross_hover_view']) self.tool_menu = self.menuBar().addMenu(self.tr("Tools")) # Basic tools basic_tools = self.tool_menu.addMenu(self.tr("Basic Tools")) basic_tools.addAction(self._actions['binarization']) basic_tools.addAction(self._actions['intersect']) basic_tools.addAction(self._actions['localmax']) basic_tools.addAction(self._actions['inverse']) basic_tools.addAction(self._actions['smoothing']) basic_tools.addAction(self._actions['concatenate']) basic_tools.addAction(self._actions['probability_map']) basic_tools.addAction(self._actions['meants']) basic_tools.addAction(self._actions['voxelstats']) # Segment tools segment_tools = self.tool_menu.addMenu(self.tr("Segmentation")) segment_tools.addAction(self._actions['region_grow']) segment_tools.addAction(self._actions['watershed']) segment_tools.addAction(self._actions['slic']) segment_tools.addAction(self._actions['cluster']) segment_tools.addAction(self._actions['surf_region_grow']) # ROI tools roi_tools = self.tool_menu.addMenu(self.tr("ROI Tools")) roi_tools.addAction(self._actions['edge_dete']) roi_tools.addAction(self._actions['roi_merge']) roi_tools.addAction(self._actions['regular_roi']) roi_tools.addAction(self._actions['regular_roi_from_csv']) roi_tools.addAction(self._actions['r2i']) roi_tools.addAction(self._actions['scribing']) # Morphological tools morphological_tools = self.tool_menu.addMenu( self.tr("Morphological Processing")) morphological_tools.addAction(self._actions['opening']) morphological_tools.addAction(self._actions['binarydilation']) morphological_tools.addAction(self._actions['binaryerosion']) morphological_tools.addAction(self._actions['greydilation']) morphological_tools.addAction(self._actions['greyerosion']) # label management self.tool_menu.addAction(self._actions['atlas']) self.tool_menu.addAction(self._actions['label_management']) self.tool_menu.addAction(self._actions['snapshot']) self.help_menu = self.menuBar().addMenu(self.tr("Help")) self.help_menu.addAction(self._actions['about_freeroi']) self.help_menu.addAction(self._actions['about_qt']) def _cursor_enable(self): """Cursor enabled.""" if self._actions['cursor'].isChecked(): self._actions['cursor'].setChecked(True) if self.tabWidget.currentWidget() is self.list_view: if isinstance(self.volume_view, OrthView): self._actions['hand'].setChecked(False) self.volume_view.set_cursor(Qt.ArrowCursor) self.volume_view.set_label_mouse_tracking(True) self._roidialog_disable() self.painter_status.set_draw_settings(ViewSettings()) else: self._actions['cursor'].setChecked(True) def _voxel_edit_enable(self): """Voxel brush enabled.""" self._vol_label_config_center.set_is_roi_edit(False) self.painter_status.set_draw_settings(self._vol_label_config_center) self.volume_view.set_cursor(Qt.CrossCursor) self.volume_view.set_label_mouse_tracking(False) def _vertex_edit_enable(self): """Vertex brush enabled.""" self._surf_label_config_center.set_is_roi_edit(False) self.painter_status.set_draw_settings(self._surf_label_config_center) def _vol_roi_edit_enable(self): """Volume ROI brush enabled.""" self._vol_label_config_center.set_is_roi_edit(True) self.painter_status.set_draw_settings(self._vol_label_config_center) self.volume_view.set_cursor(Qt.CrossCursor) self.volume_view.set_label_mouse_tracking(False) def _surf_roi_edit_enable(self): """Surface ROI brush enabled.""" self._surf_label_config_center.set_is_roi_edit(True) self.painter_status.set_draw_settings(self._surf_label_config_center) def _vol_roi_batch_enable(self): """Volume ROI batch enabled.""" self.volume_view.set_label_mouse_tracking(False) self._vol_label_config_center.set_is_roi_edit(False) self.painter_status.set_draw_settings(self.vol_roidialog) def _surf_roi_batch_enable(self): """Surface ROI batch enabled.""" self._surf_label_config_center.set_is_roi_edit(False) self.painter_status.set_draw_settings(self.surf_roidialog) def _roidialog_enable(self): """ROI dialog enabled.""" if self._actions['edit'].isChecked(): self._actions['cursor'].setChecked(False) self._actions['edit'].setChecked(True) if self.tabWidget.currentWidget() is self.list_view: if isinstance(self.volume_view, OrthView): self._actions['hand'].setChecked(False) self.vol_roidialog.show_dialog() elif self.tabWidget.currentWidget() is self.surface_tree_view: self.surf_roidialog.show_dialog() else: self._actions['edit'].setChecked(True) def _roidialog_disable(self): """Disable the roi dialog.""" if hasattr(self, "vol_roidialog"): if self.vol_roidialog.isVisible(): self.vol_roidialog.hide_dialog() if hasattr(self, "surf_roidialog"): if self.surf_roidialog.isVisible(): self.surf_roidialog.hide_dialog() self._actions['edit'].setChecked(False) def _atlas_dialog(self): """Atlas information dialog.""" if 'atlasdialog' in self.__dict__: self.atlasdialog.show() else: self.atlasdialog = AtlasDialog(self.volume_model, self) self.atlasdialog.show() def _hand_enable(self): """Hand enabled.""" if self._actions['hand'].isChecked(): self._actions['cursor'].setChecked(False) self._actions['hand'].setChecked(True) self._roidialog_disable() self.painter_status.set_draw_settings(MoveSettings()) self.volume_view.set_cursor(Qt.OpenHandCursor) self.volume_view.set_label_mouse_tracking(True) else: self._actions['hand'].setChecked(True) def _switch_cursor_status(self): """Change the cursor status.""" self._actions['cursor'].setChecked(True) self._cursor_enable() def _update_undo(self): """Update the undo status.""" if self.volume_model.current_undo_available(): self._actions['undo'].setEnabled(True) else: self._actions['undo'].setEnabled(False) def _update_redo(self): """Update the redo status.""" if self.volume_model.current_redo_available(): self._actions['redo'].setEnabled(True) else: self._actions['redo'].setEnabled(False) def _init_vol_roidialog(self, model): """Initialize volume ROI Dialog.""" self.vol_roidialog = VolROIDialog(model, self._vol_label_config_center, self) self.vol_roidialog.vx_edit_enabled.connect(self._voxel_edit_enable) self.vol_roidialog.roi_edit_enabled.connect(self._vol_roi_edit_enable) self.vol_roidialog.roi_batch_enabled.connect(self._vol_roi_batch_enable) def _init_surf_roidialog(self, model): """Initialize Surface ROI Dialog.""" self.surf_roidialog = SurfROIDialog(model, self._surf_label_config_center, self) self.surf_roidialog.vx_edit_enabled.connect(self._vertex_edit_enable) self.surf_roidialog.roi_edit_enabled.connect(self._surf_roi_edit_enable) self.surf_roidialog.roi_batch_enabled.connect(self._surf_roi_batch_enable) def _init_label_config_center(self): """Initialize LabelConfigCenter.""" lbl_path = os.path.join(self.label_config_dir, '*.' + self.label_config_suffix) label_configs = glob.glob(lbl_path) self.label_configs = map(LabelConfig, label_configs) self._list_view_model = QStandardItemModel() # _list_view_model.appendRow(QStandardItem("None")) for x in self.label_configs: self._list_view_model.appendRow(QStandardItem(x.get_name())) self._label_models = [] for item in self.label_configs: model = QStandardItemModel() indexs = sorted(item.get_index_list()) for index in indexs: text_index_icon_item = QStandardItem(gen_label_color(item.get_label_color(item.get_index_label(index))), str(index) + ' ' + item.get_index_label(index)) model.appendRow(text_index_icon_item) self._label_models.append(model) return LabelConfigCenter(self.label_configs, self._list_view_model, self._label_models) def _get_label_config(self, file_path): """Get label config file.""" # Get label config file dir = os.path.dirname(file_path) file = os.path.basename(file_path) split_list = file.split('.') nii_index = split_list.index('nii') file = ''.join(split_list[:nii_index]) config_file = os.path.join(file, 'lbl') if os.path.isfile(config_file): label_config = LabelConfig(config_file, False) else: label_config = self.label_config return label_config def _undo(self): """The undo action.""" self.volume_model.undo_current_image() def _redo(self): """The redo action.""" self.volume_model.redo_current_image() def _regular_roi(self): """Generate regular(cube, sphere, etc.) roi dialog.""" regular_roi_dialog = RegularROIDialog(self.volume_model) regular_roi_dialog.exec_() def _regular_roi_from_csv_file(self): """Generate regular(cube, sphere, etc.) roi from csv file.""" regular_roi_from_csv_file = RegularROIFromCSVFileDialog(self.volume_model) regular_roi_from_csv_file.exec_() def _label_edge_detection(self): """edge detection for labels""" if self.tabWidget.currentWidget() is self.list_view: # get information from the model index = self.volume_model.currentIndex() data = self.volume_model.data(index, Qt.UserRole + 6) name = self.volume_model.data(index, Qt.DisplayRole) new_name = "edge_" + name # detect edges new_data = vol_label_edge_detection(data) # save result as a new overlay self.volume_model.addItem(new_data, None, new_name, self.volume_model.data(index, Qt.UserRole + 11), None, None, 255, 'green') elif self.tabWidget.currentWidget() is self.surface_tree_view: # get information from the model index = self.surface_model.current_index() depth = self.surface_model.index_depth(index) if depth != 2: QMessageBox.warning(self, 'Warning!', 'Get overlay failed!\nYou may have not selected any overlay!', QMessageBox.Yes) return if not self.surface_model.data(index, Qt.UserRole + 7): QMessageBox.warning(self, 'Warning!', "Current overlay isn't for ROIs.\nThis tool should be used for ROIs", QMessageBox.Yes) return data = self.surface_model.data(index, Qt.UserRole + 10) name = self.surface_model.data(index, Qt.DisplayRole) new_name = "edge_" + name # detect the edges new_data = surf_label_edge_detection(data, self.surface_model.data(index.parent(), Qt.UserRole + 6).faces) # save result as a new overlay self.surface_model.add_item(index, source=new_data.astype(int), colormap=self.surface_model.data(index, Qt.UserRole + 3), islabel=True, name=new_name) else: return def _roi_merge(self): """ROI merge dialog.""" new_dialog = ROIMergeDialog(self.volume_model) new_dialog.exec_() def _r2i(self): """ROI to gwmi dialog.""" new_dialog = Roi2gwmiDialog(self.volume_model) new_dialog.exec_() def _opening(self): """Opening Dialog which using the opening algorithm to process the image.""" new_dialog = OpenDialog(self.volume_model) new_dialog.exec_() def _voxelstats(self): """Voxel statistical analysis dialog.""" new_dialog = VoxelStatsDialog(self.volume_model, self) new_dialog.show() def _label_manage(self): """Label management dialog.""" self.label_manage_dialog = LabelManageDialog(self.label_configs, self._list_view_model, self._label_models, self.label_config_dir, self.label_config_suffix, self) self.label_manage_dialog.exec_() def _ld_lbl(self): """Local label config file.""" file_name = QFileDialog.getOpenFileName(self, 'Load Label File', QDir.currentPath(), "Label files (*.lbl)") if file_name: label_config = LabelConfig(str(file_name), False) self.volume_model.set_cur_label(label_config) def _ld_glbl(self): """Local global label config file.""" file_name = QFileDialog.getOpenFileName(self, 'Load Label File', QDir.currentPath(), "Label files (*.lbl)") if file_name: label_config = LabelConfig(str(file_name), True) self.volume_model.set_global_label(label_config) def _grid_view(self): """Grid view option.""" self._actions['grid_view'].setEnabled(False) self._actions['orth_view'].setEnabled(True) self._actions['hand'].setEnabled(False) self._actions['snapshot'].setEnabled(False) self._actions['cursor'].trigger() self.centralWidget().layout().removeWidget(self.volume_view) self.volume_view.set_display_type('grid') self.volume_model.scale_changed.disconnect() self.volume_model.repaint_slices.disconnect() self.volume_model.cross_pos_changed.disconnect(self.volume_view.update_cross_pos) self.volume_view.deleteLater() self._spinbox.setValue(100 * self.volume_model.get_scale_factor('grid')) self.volume_view = GridView(self.volume_model, self.painter_status, self._gridview_vertical_scrollbar_position) self.centralWidget().layout().addWidget(self.volume_view) def _orth_view(self): """Orth view option.""" self._actions['orth_view'].setEnabled(False) self._actions['grid_view'].setEnabled(True) self._actions['snapshot'].setEnabled(True) self._actions['hand'].setEnabled(True) self._actions['cursor'].trigger() self._gridview_vertical_scrollbar_position = \ self.volume_view.get_vertical_srollbar_position() self.centralWidget().layout().removeWidget(self.volume_view) self.volume_view.set_display_type('orth') self.volume_model.scale_changed.disconnect() self.volume_model.repaint_slices.disconnect() self.volume_model.cross_pos_changed.disconnect(self.volume_view.update_cross_pos) self.volume_view.deleteLater() self._spinbox.setValue(100 * self.volume_model.get_scale_factor('orth')) self.volume_view = OrthView(self.volume_model, self.painter_status) self.centralWidget().layout().addWidget(self.volume_view) def _display_cross_hover(self): """Display the cross hover on the image.""" if self.volume_model._display_cross: self.volume_model.set_cross_status(False) self._actions['cross_hover_view'].setText('Enable cross hover') self._actions['cross_hover_view'].setIcon(QIcon(os.path.join(self._icon_dir,'cross_hover_disable.png'))) else: self.volume_model.set_cross_status(True) self._actions['cross_hover_view'].setText('Disable cross hover') self._actions['cross_hover_view'].setIcon(QIcon(os.path.join(self._icon_dir,'cross_hover_enable.png'))) def _reset_view(self): """Reset view parameters.""" if self.volume_view.display_type() == 'orth': if not self.volume_model.get_scale_factor('orth') == \ self.default_orth_scale_factor: self._spinbox.setValue(100 * self.default_orth_scale_factor) self.volume_view.reset_view() elif self.volume_view.display_type() == 'grid': if not self.volume_model.get_scale_factor('grid') == \ self.default_grid_scale_factor: self._spinbox.setValue(100 * self.default_grid_scale_factor) def _binarization(self): """Image binarization dialog.""" if self.tabWidget.currentWidget() is self.list_view: binarization_dialog = VolBinarizationDialog(self.volume_model) elif self.tabWidget.currentWidget() is self.surface_tree_view: binarization_dialog = SurfBinarizationDialog(self.surface_model) else: return binarization_dialog.exec_() def _binaryerosion(self): """Image binary erosion dialog.""" if self.tabWidget.currentWidget() is self.list_view: binaryerosion_dialog = VolBinErosionDialog(self.volume_model) elif self.tabWidget.currentWidget() is self.surface_tree_view: binaryerosion_dialog = SurfBinErosionDialog(self.surface_model) else: return binaryerosion_dialog.exec_() def _binarydilation(self): """Image binarydilation dialog.""" if self.tabWidget.currentWidget() is self.list_view: binarydilation_dialog = VolBinDilationDialog(self.volume_model) elif self.tabWidget.currentWidget() is self.surface_tree_view: binarydilation_dialog = SurfBinDilationDialog(self.surface_model) else: return binarydilation_dialog.exec_() def _greyerosion(self): """Image greyerosion dialog.""" greyerosiondialog = GreyerosionDialog(self.volume_model) greyerosiondialog.exec_() def _greydilation(self): """Image greydilation dialog.""" greydilation_dialog = GreydilationDialog(self.volume_model) greydilation_dialog.exec_() def _intersect(self): """Image intersect dialog.""" if self.tabWidget.currentWidget() is self.list_view: intersect_dialog = VolIntersectDialog(self.volume_model) elif self.tabWidget.currentWidget() is self.surface_tree_view: intersect_dialog = SurfIntersectDialog(self.surface_model) else: return intersect_dialog.exec_() def _meants(self): """Image meants dialog.""" new_dialog = MeanTSDialog(self.volume_model) new_dialog.exec_() def _local_max(self): """Compute image local max value dialog.""" new_dialog = LocalMaxDialog(self.volume_model, self) new_dialog.exec_() def _inverse(self): """Inverse the given image.""" if self.tabWidget.currentWidget() is self.list_view: index = self.volume_model.currentIndex() data = self.volume_model.data(index, Qt.UserRole + 6) name = self.volume_model.data(index, Qt.DisplayRole) # inverse process new_data = inverse_transformation(data) new_name = 'inv_' + name # save result as a new image self.volume_model.addItem(new_data, None, new_name, self.volume_model.data(index, Qt.UserRole + 11)) elif self.tabWidget.currentWidget() is self.surface_tree_view: index = self.surface_model.current_index() depth = self.surface_model.index_depth(index) if depth != 2: QMessageBox.warning(self, 'Warning!', 'Get overlay failed!\nYou may have not selected any overlay!', QMessageBox.Yes) return data = self.surface_model.data(index, Qt.UserRole + 10) name = self.surface_model.data(index, Qt.DisplayRole) new_data = inverse_transformation(data) new_name = "inv_" + name # save result as a new overlay self.surface_model.add_item(index, source=new_data, name=new_name) else: return def _smooth(self): """Image smooth dialog.""" new_dialog = SmoothingDialog(self.volume_model) new_dialog.exec_() def _prob_map(self): """Calculate probability map""" dialog = SurfProbMapDialog(self.surface_model) dialog.exec_() def _concatenate(self): dialog = SurfConcatenateDialog(self.surface_model) dialog.exec_() def _region_grow(self): """Image region grow dialog.""" # new_dialog = GrowDialog(self.volume_model, self) new_dialog = VolumeRGDialog(self.volume_model) new_dialog.exec_() def _watershed(self): """Image watershed dialog.""" new_dialog = WatershedDialog(self.volume_model, self) new_dialog.exec_() def _slic(self): """Image supervoxel segmentation dialog.""" new_dialog = SLICDialog(self.volume_model, self) new_dialog.exec_() def _cluster(self): """Image cluster dialog.""" if self.tabWidget.currentWidget() is self.list_view: cluster_dialog = VolClusterDialog(self.volume_model) elif self.tabWidget.currentWidget() is self.surface_tree_view: cluster_dialog = SurfClusterDialog(self.surface_model) else: return cluster_dialog.exec_() def _vol_func_module_set_enabled(self, status): """ set enabled status for actions of volume functional module. """ self._actions['meants'].setEnabled(status) self._actions['voxelstats'].setEnabled(status) self._actions['localmax'].setEnabled(status) self._actions['smoothing'].setEnabled(status) self._actions['atlas'].setEnabled(status) self._actions['region_grow'].setEnabled(status) self._actions['watershed'].setEnabled(status) self._actions['slic'].setEnabled(status) self._actions['opening'].setEnabled(status) self._actions['greydilation'].setEnabled(status) self._actions['greyerosion'].setEnabled(status) self._actions['regular_roi'].setEnabled(status) self._actions['regular_roi_from_csv'].setEnabled(status) self._actions['r2i'].setEnabled(status) self._actions['roi_merge'].setEnabled(status) def _surf_func_module_set_enabled(self, status): """ set enabled status for actions of surface functional module. """ self._actions['scribing'].setEnabled(status) self._actions['surf_region_grow'].setEnabled(status) self._actions['concatenate'].setEnabled(status) self._actions['probability_map'].setEnabled(status) def _snapshot(self): """Capture images from OrthView.""" self.volume_view.save_image() def set_save_dir(self, path): self._save_dir = path
81,886
22,842
import yaml import json from pprint import pprint as pp my_list = [] while True: x = raw_input("Enter num or text to add to list or press enter to finish: ") if not x: break my_list.append(x) my_list.append({}) while True: y = raw_input("Enter the name of the key in the list or press enter to finish: ") z = raw_input("Enter the value of the key or press enter to finish: ") if not y: break if not z: break my_list[-1][y] = z print "----------------------------------" print "Importing this list to YAML format" print "----------------------------------" with open ("computer_details.yml", "w") as f: f.write(yaml.dump(my_list, default_flow_style=False)) print yaml.dump(my_list, default_flow_style=False) print "----------------------------------" print "Importing this list to JSON format" print "----------------------------------" with open("computer_details.json", "w") as f: json.dump(my_list, f) pp(my_list)
975
310
import colander from sqlalchemy import Column, ForeignKey from sqlalchemy.types import DateTime, Integer, Unicode, UnicodeText from naki.model.meta import Base class DigitalItem(Base): __tablename__ = "tDigitalItem" id_item = Column('sID_Item', Unicode(64), primary_key = True, info={'colanderalchemy': {'missing': None}}) mime = Column('sMime', Unicode(64)) created = Column('dCreated', DateTime) description = Column('sDescription', UnicodeText, info={'colanderalchemy': {'missing': ''}}) id_user = Column('sAuthor', Unicode(64)) rights = Column('sRights', Integer, info={'colanderalchemy': {'missing': 0}}) def __init__(self, id_item, mime, created, description, id_user, rights): self.id_item = id_item self.mime = mime self.created = created self.description = description self.id_user = id_user self.rights = rights def get_dict(self): return ({ 'id_item': self.id_item, 'mime': self.mime, 'created': str(self.created), 'description': self.description, 'id_user': self.id_user, 'rights': self.rights, }) def set_from_dict(self, d): #self.id_item = d['id_item'] self.mime = d['mime'] #self.created = d['created'] self.description = d['description'] self.id_user = d['id_user'] self.rights = d['rights']
1,475
458
#!/usr/bin/python -u # -*- coding: latin-1 -*- # # Labeled dice and Building block problems in Z3 # # * Labeled dice # # From Jim Orlin 'Colored letters, labeled dice: a logic puzzle' # http://jimorlin.wordpress.com/2009/02/17/colored-letters-labeled-dice-a-logic-puzzle/ # ''' # My daughter Jenn bough a puzzle book, and showed me a cute puzzle. There # are 13 words as follows: BUOY, CAVE, CELT, FLUB, FORK, HEMP, JUDY, # JUNK, LIMN, QUIP, SWAG, VISA, WISH. # # There are 24 different letters that appear in the 13 words. The question # is: can one assign the 24 letters to 4 different cubes so that the # four letters of each word appears on different cubes. (There is one # letter from each word on each cube.) It might be fun for you to try # it. I'll give a small hint at the end of this post. The puzzle was # created by Humphrey Dudley. # ''' # # Also, see Jim Orlin's followup 'Update on Logic Puzzle': # http://jimorlin.wordpress.com/2009/02/21/update-on-logic-puzzle/ # # # * Building Blocks puzzle (Dell Logic Puzzles) in MiniZinc. # # From http://brownbuffalo.sourceforge.net/BuildingBlocksClues.html # """ # Each of four alphabet blocks has a single letter of the alphabet on each # of its six sides. In all, the four blocks contain every letter but # Q and Z. By arranging the blocks in various ways, you can spell all of # the words listed below. Can you figure out how the letters are arranged # on the four blocks? # # BAKE ONYX ECHO OVAL # # GIRD SMUG JUMP TORN # # LUCK VINY LUSH WRAP # """ # # This Z3 model was written by Hakan Kjellerstrand (hakank@gmail.com) # See also my Z3 page: http://hakank.org/z3/ # # from __future__ import print_function from z3_utils_hakank import * def labeled_dice(): print("Labeled dice\n") # # data # n = 4 m = 24 A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, Y = ( list(range(m))) letters = [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, Y] letters_s = ["A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "Y"] num_words = 13 words = [ [B,U,O,Y], [C,A,V,E], [C,E,L,T], [F,L,U,B], [F,O,R,K], [H,E,M,P], [J,U,D,Y], [J,U,N,K], [L,I,M,N], [Q,U,I,P], [S,W,A,G], [V,I,S,A], [W,I,S,H] ] solve_it(n,m,letters,letters_s,num_words,words) def building_blocks(): print("Building blocks\n") n = 4 m = 24 A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, R, S, T, U, V, W, X, Y = ( list(range(m))) letters = [A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, R, S, T, U, V, W, X, Y] letters_s = ["A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X","Y"] num_words = 12 words = [ [B,A,K,E], [O,N,Y,X], [E,C,H,O], [O,V,A,L], [G,I,R,D], [S,M,U,G], [J,U,M,P], [T,O,R,N], [L,U,C,K], [V,I,N,Y], [L,U,S,H], [W,R,A,P] ] solve_it(n,m,letters,letters_s,num_words,words) def solve_it(n,m,letters,letters_s,num_words,words): sol = Solver() # # declare variables # dice = [makeIntVar(sol, "dice[%i]" % i, 0, n - 1) for i in range(m)] # constraints # the letters in a word must be on a different die for i in range(num_words): sol.add(Distinct([dice[words[i][j]] for j in range(n)])) # there must be exactly 6 letters of each die for i in range(n): sol.add(Sum([If(dice[j] == i,1,0) for j in range(m)]) == 6) # # solution and search num_solutions = 0 while sol.check() == sat: num_solutions += 1 mod = sol.model() for d in range(n): print("die %i:" % d, end=' ') for i in range(m): if mod.eval(dice[i]) == d: print(letters[i], end=' ') print() print("The words with the cube label:") for i in range(num_words): for j in range(n): print("%s (%i)" % (letters_s[words[i][j]], mod.eval(dice[words[i][j]]).as_long()), end=' ') print() sol.add(Or([dice[i] != mod.eval(dice[i]) for i in range(m)])) print() print() print("num_solutions:", num_solutions) if __name__ == "__main__": labeled_dice() print("\n\n\n") building_blocks()
4,389
1,968
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (c) 2017 Ben Lindsay <benjlindsay@gmail.com> from job_tree import job_tree # Tier 1 is 3 values of VAR_1 (1, 2, and 3) found in 'tier_1.csv' tier_1_csv_file = 'tier_1.csv' # Tier 2 is 2 values of VAR_2 for each VAR_1, defined as a function of VAR_1 def tier_2_func(df, param_dict): var_1 = int(param_dict['VAR_1']) var_2_arr = [var_1, var_1 + 1] return { 'VAR_2': var_2_arr } # Tier 3 is 2 values of VAR_3 (100 and 200) defined in a python dictionary tier_3_dict = {'VAR_3': [100,200]} tier_list = [ tier_1_csv_file, tier_2_func, tier_3_dict ] job_file_list = ['params.input', 'sub.sh'] # Generate a flat job tree submit the jobs. # Add 'submit = False' to prevent submission. job_tree(job_file_list, tier_list) # Running this script should generate a directory tree that looks like this: # ~ $ tree # . # |-- 1-1-100 # | |-- 1-1-100.e3654633 # | |-- 1-1-100.o3654633 # | |-- files.log # | |-- params.input # | `-- sub.sh # |-- 1-1-200 # | |-- 1-1-200.e3654634 # | |-- 1-1-200.o3654634 # | |-- files.log # | |-- params.input # | `-- sub.sh # |-- 1-2-100 # | |-- 1-2-100.e3654635 # | |-- 1-2-100.o3654635 # | |-- files.log # | |-- params.input # | `-- sub.sh # |-- 1-2-200 # | |-- 1-2-200.e3654636 # | |-- 1-2-200.o3654636 # | |-- files.log # | |-- params.input # | `-- sub.sh # |-- 2-2-100 # | |-- 2-2-100.e3654637 # | |-- 2-2-100.o3654637 # | |-- files.log # | |-- params.input # | `-- sub.sh # |-- 2-2-200 # | |-- 2-2-200.e3654638 # | |-- 2-2-200.o3654638 # | |-- files.log # | |-- params.input # | `-- sub.sh # |-- 2-3-100 # | |-- 2-3-100.e3654639 # | |-- 2-3-100.o3654639 # | |-- files.log # | |-- params.input # | `-- sub.sh # |-- 2-3-200 # | |-- 2-3-200.e3654640 # | |-- 2-3-200.o3654640 # | |-- files.log # | |-- params.input # | `-- sub.sh # |-- 3-3-100 # | |-- 3-3-100.e3654641 # | |-- 3-3-100.o3654641 # | |-- files.log # | |-- params.input # | `-- sub.sh # |-- 3-3-200 # | |-- 3-3-200.e3654642 # | |-- 3-3-200.o3654642 # | |-- files.log # | |-- params.input # | `-- sub.sh # |-- 3-4-100 # | |-- 3-4-100.e3654643 # | |-- 3-4-100.o3654643 # | |-- files.log # | |-- params.input # | `-- sub.sh # |-- 3-4-200 # | |-- 3-4-200.e3654644 # | |-- 3-4-200.o3654644 # | |-- files.log # | |-- params.input # | `-- sub.sh # |-- job_tree.py # |-- job_tree.pyc # |-- params.input # |-- sub.sh # |-- test.py # `-- tier_1.csv # Where the {VAR_1}, {VAR_2}, and {VAR_3} in each params.input file is replaced # with the corresponding variables. {JOB_NAME} is replaced by # hyphen-separated string representing the directory tree in which it resides
2,741
1,489
import pickle import random from argparse import ArgumentParser # Requires NLTK to be installed: # python3 -m pip install nltk # python3 -c 'import nltk;nltk.download("punkt")' # May be slow at first start due to NLTK preparing its dependencies from nltk.tokenize.treebank import TreebankWordDetokenizer from nltk.lm import MLE detokenize = TreebankWordDetokenizer().detokenize def generate_sentence(model: MLE, length: int, seed=random.randint(0, 1e10)): content = [] for token in model.generate(length, random_seed=seed): if token == '<s>': continue if token == '</s>': break content.append(token) return detokenize(content) def main() -> None: """Main entrypoint.""" # Create an argument parser for parsing CLI arguments parser = ArgumentParser(description="A tool to train an AI to predict the probability of a word in a sentence") # Add parameters for the server connection parser.add_argument("-i", "--input", required=True, type=str, help="The serialized model previously trained") parser.add_argument("-w", "--word", required=True, type=str, help="The word to check the probability for") parser.add_argument("-c", "--context", required=True, type=str, help="The context / sentence for the word") # Parse the arguments options = parser.parse_args() model = None with open(options.input, "rb") as file: model = pickle.load(file) print(model.logscore(options.word, options.context.split())) print(generate_sentence(model, 10)) if __name__ == '__main__': main()
1,602
484
# -*- coding: utf-8 -*- # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is # holder of all proprietary rights on this computer program. # You can only use this computer program if you have closed # a license agreement with MPG or you get the right to use the computer # program from someone who is authorized to grant you that right. # Any use of the computer program without a valid license is prohibited and # liable to prosecution. # # Copyright©2019 Max-Planck-Gesellschaft zur Förderung # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute # for Intelligent Systems. All rights reserved. # # Contact: ps-license@tuebingen.mpg.de import torch from torch import nn import trimesh import math from typing import NewType from pytorch3d.structures import Meshes from pytorch3d.renderer.mesh import rasterize_meshes Tensor = NewType('Tensor', torch.Tensor) def solid_angles(points: Tensor, triangles: Tensor, thresh: float = 1e-8) -> Tensor: ''' Compute solid angle between the input points and triangles Follows the method described in: The Solid Angle of a Plane Triangle A. VAN OOSTEROM AND J. STRACKEE IEEE TRANSACTIONS ON BIOMEDICAL ENGINEERING, VOL. BME-30, NO. 2, FEBRUARY 1983 Parameters ----------- points: BxQx3 Tensor of input query points triangles: BxFx3x3 Target triangles thresh: float float threshold Returns ------- solid_angles: BxQxF A tensor containing the solid angle between all query points and input triangles ''' # Center the triangles on the query points. Size should be BxQxFx3x3 centered_tris = triangles[:, None] - points[:, :, None, None] # BxQxFx3 norms = torch.norm(centered_tris, dim=-1) # Should be BxQxFx3 cross_prod = torch.cross(centered_tris[:, :, :, 1], centered_tris[:, :, :, 2], dim=-1) # Should be BxQxF numerator = (centered_tris[:, :, :, 0] * cross_prod).sum(dim=-1) del cross_prod dot01 = (centered_tris[:, :, :, 0] * centered_tris[:, :, :, 1]).sum(dim=-1) dot12 = (centered_tris[:, :, :, 1] * centered_tris[:, :, :, 2]).sum(dim=-1) dot02 = (centered_tris[:, :, :, 0] * centered_tris[:, :, :, 2]).sum(dim=-1) del centered_tris denominator = (norms.prod(dim=-1) + dot01 * norms[:, :, :, 2] + dot02 * norms[:, :, :, 1] + dot12 * norms[:, :, :, 0]) del dot01, dot12, dot02, norms # Should be BxQ solid_angle = torch.atan2(numerator, denominator) del numerator, denominator torch.cuda.empty_cache() return 2 * solid_angle def winding_numbers(points: Tensor, triangles: Tensor, thresh: float = 1e-8) -> Tensor: ''' Uses winding_numbers to compute inside/outside Robust inside-outside segmentation using generalized winding numbers Alec Jacobson, Ladislav Kavan, Olga Sorkine-Hornung Fast Winding Numbers for Soups and Clouds SIGGRAPH 2018 Gavin Barill NEIL G. Dickson Ryan Schmidt David I.W. Levin and Alec Jacobson Parameters ----------- points: BxQx3 Tensor of input query points triangles: BxFx3x3 Target triangles thresh: float float threshold Returns ------- winding_numbers: BxQ A tensor containing the Generalized winding numbers ''' # The generalized winding number is the sum of solid angles of the point # with respect to all triangles. return 1 / (4 * math.pi) * solid_angles(points, triangles, thresh=thresh).sum(dim=-1) def batch_contains(verts, faces, points): B = verts.shape[0] N = points.shape[1] verts = verts.detach().cpu() faces = faces.detach().cpu() points = points.detach().cpu() contains = torch.zeros(B, N) for i in range(B): contains[i] = torch.as_tensor( trimesh.Trimesh(verts[i], faces[i]).contains(points[i])) return 2.0 * (contains - 0.5) def dict2obj(d): # if isinstance(d, list): # d = [dict2obj(x) for x in d] if not isinstance(d, dict): return d class C(object): pass o = C() for k in d: o.__dict__[k] = dict2obj(d[k]) return o def face_vertices(vertices, faces): """ :param vertices: [batch size, number of vertices, 3] :param faces: [batch size, number of faces, 3] :return: [batch size, number of faces, 3, 3] """ bs, nv = vertices.shape[:2] bs, nf = faces.shape[:2] device = vertices.device faces = faces + (torch.arange(bs, dtype=torch.int32).to(device) * nv)[:, None, None] vertices = vertices.reshape((bs * nv, vertices.shape[-1])) return vertices[faces.long()] class Pytorch3dRasterizer(nn.Module): """ Borrowed from https://github.com/facebookresearch/pytorch3d Notice: x,y,z are in image space, normalized can only render squared image now """ def __init__(self, image_size=224): """ use fixed raster_settings for rendering faces """ super().__init__() raster_settings = { 'image_size': image_size, 'blur_radius': 0.0, 'faces_per_pixel': 1, 'bin_size': None, 'max_faces_per_bin': None, 'perspective_correct': True, 'cull_backfaces': True, } raster_settings = dict2obj(raster_settings) self.raster_settings = raster_settings def forward(self, vertices, faces, attributes=None): fixed_vertices = vertices.clone() fixed_vertices[..., :2] = -fixed_vertices[..., :2] meshes_screen = Meshes(verts=fixed_vertices.float(), faces=faces.long()) raster_settings = self.raster_settings pix_to_face, zbuf, bary_coords, dists = rasterize_meshes( meshes_screen, image_size=raster_settings.image_size, blur_radius=raster_settings.blur_radius, faces_per_pixel=raster_settings.faces_per_pixel, bin_size=raster_settings.bin_size, max_faces_per_bin=raster_settings.max_faces_per_bin, perspective_correct=raster_settings.perspective_correct, ) vismask = (pix_to_face > -1).float() D = attributes.shape[-1] attributes = attributes.clone() attributes = attributes.view(attributes.shape[0] * attributes.shape[1], 3, attributes.shape[-1]) N, H, W, K, _ = bary_coords.shape mask = pix_to_face == -1 pix_to_face = pix_to_face.clone() pix_to_face[mask] = 0 idx = pix_to_face.view(N * H * W * K, 1, 1).expand(N * H * W * K, 3, D) pixel_face_vals = attributes.gather(0, idx).view(N, H, W, K, 3, D) pixel_vals = (bary_coords[..., None] * pixel_face_vals).sum(dim=-2) pixel_vals[mask] = 0 # Replace masked values in output. pixel_vals = pixel_vals[:, :, :, 0].permute(0, 3, 1, 2) pixel_vals = torch.cat( [pixel_vals, vismask[:, :, :, 0][:, None, :, :]], dim=1) return pixel_vals
7,529
2,518
######################### AABCAAADA from collections import OrderedDict def remove_duplicates(block): """ >>> remove_duplicates('AAB') >>> 'AB' """ freq = OrderedDict() for c in block: freq[c] = freq.get(c, 0) + 1 return ''.join(freq.keys()) def solve(text, block_size): return '\n'.join(map(remove_duplicates, [text[i:i+block_size] for i in range(0, len(text), block_size)])) print(solve('AABCAAADA', 3))
456
177
__all__ = 'TMP_NAME_FACTORY', 'tmp_file_name', 'str_uuid' import os from tempfile import NamedTemporaryFile from uuid import uuid4 def tmp_file_name(): with NamedTemporaryFile() as named_tmp_file: return os.path.basename(named_tmp_file.name) def str_uuid(): return str(uuid4()) TMP_NAME_FACTORY = tmp_file_name
335
130
import sys sys.path.append("apps/controllerx")
48
17
from django_unicorn.components import UnicornView from django_unicorn.views.utils import _is_component_field_model_or_unicorn_field from example.coffee.models import Flavor class TypeHintView(UnicornView): model: Flavor = None class ModelInstanceView(UnicornView): model = Flavor() def test_type_hint(): component = TypeHintView(component_name="asdf", component_id="hjkl") name = "model" actual = _is_component_field_model_or_unicorn_field(component, name) assert actual assert component.model is not None assert type(component.model) == Flavor def test_model_instance(): component = ModelInstanceView(component_name="asdf", component_id="hjkl") name = "model" actual = _is_component_field_model_or_unicorn_field(component, name) assert actual assert component.model is not None assert type(component.model) == Flavor
884
279
import pytest testdata = [ (2, True), (3, False), (4, True), (5, True) # We expect this test to fail ] def check_if_even(a): """ Returns True if 'a' is an even number """ return a % 2 == 0 @pytest.mark.parametrize('sample, expected_output', testdata) def test_check_if_even(sample, expected_output): """ Define test cases """ assert check_if_even(sample) == expected_output
431
155
import numpy as np from scipy.optimize import minimize from intvalpy.MyClass import Interval from intvalpy.intoper import zeros def Uni(A, b, x=None, maxQ=False, x0=None, tol=1e-12, maxiter=1e3): """ Вычисление распознающего функционала Uni. В случае, если maxQ=True то находится максимум функционала. Parameters: A: Interval Матрица ИСЛАУ. b: Interval Вектор правой части ИСЛАУ. Optional Parameters: x: float, array_like Точка в которой вычисляется распознающий функционал. По умолчанию x равен массиву из нулей. maxQ: bool Если значение параметра равно True, то производится максимизация функционала. x0: float, array_like Первоначальная догадка. tol: float Погрешность для прекращения оптимизационного процесса. maxiter: int Максимальное количество итераций. Returns: out: float, tuple Возвращается значение распознающего функционала в точке x. В случае, если maxQ=True, то возвращается кортеж, где первый элемент -- корректность завершения оптимизации, второй элемент -- точка оптимума, третий элемент -- значение функции в этой точке. """ __uni = lambda x: min(b.rad - (b.mid - A @ x).mig) __minus_uni = lambda x: -__uni(x) if maxQ==False: if x is None: x = np.zeros(A.shape[1]) return __uni(x) else: from scipy.optimize import minimize if x0 is None: x0 = np.zeros(A.shape[1])+1 maximize = minimize(__minus_uni, x0, method='Nelder-Mead', tol=tol, options={'maxiter': maxiter}) return maximize.success, maximize.x, -maximize.fun def Tol(A, b, x=None, maxQ=False, x0=None, tol=1e-12, maxiter=1e3): """ Вычисление распознающего функционала Tol. В случае, если maxQ=True то находится максимум функционала. Parameters: A: Interval Матрица ИСЛАУ. b: Interval Вектор правой части ИСЛАУ. Optional Parameters: x: float, array_like Точка в которой вычисляется распознающий функционал. По умолчанию x равен массиву из нулей. maxQ: bool Если значение параметра равно True, то производится максимизация функционала. x0: float, array_like Первоначальная догадка. tol: float Погрешность для прекращения оптимизационного процесса. maxiter: int Максимальное количество итераций. Returns: out: float, tuple Возвращается значение распознающего функционала в точке x. В случае, если maxQ=True, то возвращается кортеж, где первый элемент -- корректность завершения оптимизации, второй элемент -- точка оптимума, третий элемент -- значение функции в этой точке. """ __tol = lambda x: min(b.rad - abs(b.mid - A @ x)) __minus_tol = lambda x: -__tol(x) if maxQ==False: if x is None: x = np.zeros(A.shape[1]) return __tol(x) else: from scipy.optimize import minimize if x0 is None: x0 = np.zeros(A.shape[1])+1 maximize = minimize(__minus_tol, x0, method='Nelder-Mead', tol=tol, options={'maxiter': maxiter}) return maximize.success, maximize.x, -maximize.fun def ive(A, b, N=40): """ Вычисление меры вариабельности оценки параметров. Parameters: A: Interval Матрица ИСЛАУ. b: Interval Вектор правой части ИСЛАУ. Optional Parameters: N: int Количество угловых матриц для которых вычисляется обусловленность. Returns: out: float Возвращается мера вариабельности IVE. """ success, _arg_max, _max = Tol(A, b, maxQ=True) if not success: print('Оптимизация функционала Tol завершена некорректно!') _inf = A.a _sup = A.b cond = float('inf') angle_A = np.zeros(A.shape, dtype='float64') for _ in range(N): for k in range(A.shape[0]): for l in range(A.shape[1]): angle_A[k, l] = np.random.choice([_inf[k,l], _sup[k,l]]) tmp = np.linalg.cond(angle_A) cond = tmp if tmp<cond else cond return np.sqrt(A.shape[1]) * _max * cond * \ (np.linalg.norm(_arg_max, ord=2)/np.sqrt(sum(abs(b)**2)))
4,970
1,613
import math R_EARTH = 6371000 # meter def angle2radian(angle): """ convert from an angle to a radian :param angle: (float) :return: radian (float) """ return math.radians(angle) def radian2angle(radian): return math.degrees(radian) def spherical_law_of_cosines(phi1, lambda1, phi2, lambda2): """ calculate great circle distance with spherical law of cosines phi/lambda for latitude/longitude in radians :param phi1: point one's latitude in radians :param lambda1: point one's longitude in radians :param phi2: point two's latitude in radians :param lambda2: point two's longitude in radians :return: """ d_lambda = lambda2 - lambda1 return math.acos(math.sin(phi1) * math.sin(phi2) + math.cos(phi1) * math.cos(phi2) * math.cos(d_lambda)) def haversine(phi1, lambda1, phi2, lambda2): """ calculate angular great circle distance with haversine formula see parameters in spherical_law_of_cosines """ d_phi = phi2 - phi1 d_lambda = lambda2 - lambda1 a = math.pow(math.sin(d_phi / 2), 2) + \ math.cos(phi1) * math.cos(phi2) * math.pow(math.sin(d_lambda / 2), 2) c = 2 * math.atan2(math.sqrt(a), math.sqrt(1-a)) return c def equirectangular_approximation(phi1, lambda1, phi2, lambda2): """ calculate angular great circle distance with Pythagoras’ theorem performed on an equirectangular projection see parameters in spherical_law_of_cosines """ x = (lambda2 - lambda1) * math.cos((phi1 + phi2) / 2) y = phi2 - phi1 return math.sqrt(math.pow(x, 2) + math.pow(y, 2)) def dist(phi1, lambda1, phi2, lambda2, r=R_EARTH, method='hav'): """ calculate great circle distance with given latitude and longitude, :param phi1: point one's latitude in angle :param lambda1: point one's longitude in angle :param phi2: point two's latitude in angle :param lambda2: point two's longitude in angle :param r: earth radius(m) :param method: 'hav' means haversine, 'LoC' means Spherical Law of Cosines, 'approx' means Pythagoras’ theorem performed on an equirectangular projection :return: distance (m) """ return angular_dist(phi1, lambda1, phi2, lambda2, method) * r def angular_dist(phi1, lambda1, phi2, lambda2, method='hav'): """ calculate angular great circle distance with given latitude and longitude :return: angle """ if method.lower() == 'hav': return haversine(phi1, lambda1, phi2, lambda2) elif method.lower() == 'loc': return spherical_law_of_cosines(phi1, lambda1, phi2, lambda2) elif method.lower() == 'approx': return equirectangular_approximation(phi1, lambda1, phi2, lambda2) else: assert False def destination(phi1, lambda1, brng, distance, r=R_EARTH): """ :param phi1: :param lambda1: :param brng: :param distance: :return: """ delta = distance / r phi2 = math.asin(math.sin(phi1) * math.cos(delta) + math.cos(phi1) * math.sin(delta) * math.cos(brng)) lambda2 = lambda1 + math.atan2( math.sin(brng) * math.sin(delta) * math.cos(phi1), math.cos(delta) - math.sin(phi1) * math.sin(phi2) ) return phi2, lambda2 def init_bearing(phi1, lambda1, phi2, lambda2): """ initial bearing of a great circle route :return: 0~360 """ y = math.sin(lambda2 - lambda1) * math.cos(phi2) x = math.cos(phi1) * math.sin(phi2) - math.sin(phi1) * math.cos(phi2) * math.cos(lambda2 - lambda1) theta = math.atan2(y, x) brng = (theta * 180 / math.pi + 360) % 360 return brng
3,648
1,300
""" Python Template for docstring ----------------------------- python.docstring is a package cotaining TEMPLATE for docstring in python. usage: >>> from python import docstring >>> help(docstring) >>> print(docstring.module_.__doc__) >>> print(docstring.class_.__doc__) >>> print(docstring.method_.__doc__) """ import importlib import os spec = importlib.util.spec_from_file_location( '.'.join([__name__, 'module']), os.path.join(os.path.dirname(__file__), 'module.py')) module_ = importlib.util.module_from_spec(spec) spec.loader.exec_module(module_) spec = importlib.util.spec_from_file_location( 'method', os.path.join(os.path.dirname(__file__), 'method.py')) method_ = importlib.util.module_from_spec(spec) spec.loader.exec_module(method_) method_ = method_.template_method method_.__module__ = __name__ spec = importlib.util.spec_from_file_location( 'class', os.path.join(os.path.dirname(__file__), 'class.py')) class_ = importlib.util.module_from_spec(spec) spec.loader.exec_module(class_) class_ = class_.TemplateClass class_.__module__ = __name__ del spec, os, importlib
1,154
398
import logging import sys import socket from cliff.app import App from cliff.command import Command from cliff.commandmanager import CommandManager class RegisteredCommand(Command): def __init__(self, app, app_args): super(RegisteredCommand, self).__init__(app, app_args) @classmethod def register_to(klass, command_manager): command_manager.add_command(klass._command_name, klass) class SingleCommand(RegisteredCommand): def take_action(self, parsed_args): self.app.send(self._command_name + ' ' + parsed_args.object[0]) class List(SingleCommand): "List something." log = logging.getLogger(__name__) port = None host = socket.gethostbyname(socket.getfqdn()) def take_action(self, parsed_args): self.app.send( ' '.join(( self._command_name, List.host, List.port))) message = self.app.receive() self.log.info(message) class ListPlayer(List): "List all players." log = logging.getLogger(__name__) _command_name = 'list player' class ListRobot(List): "List all robots." log = logging.getLogger(__name__) _command_name = 'list robot' class Add(SingleCommand): "Add something." log = logging.getLogger(__name__) def get_parser(self, prog_name): parser = super(Add, self).get_parser(prog_name) parser.add_argument( 'object', nargs=1) return parser class AddPlayer(Add): "Add a player." log = logging.getLogger(__name__) _command_name = 'add player' class AddRobot(Add): "Add a robot." log = logging.getLogger(__name__) _command_name = 'add robot' class Remove(SingleCommand): "Remove something." log = logging.getLogger(__name__) def get_parser(self, prog_name): parser = super(Remove, self).get_parser(prog_name) parser.add_argument( 'object', nargs=1) return parser class RemovePlayer(Remove): "Remove a player." log = logging.getLogger(__name__) _command_name = 'remove player' class RemoveRobot(Remove): "Remove a robot." log = logging.getLogger(__name__) _command_name = 'remove robot' class Start(SingleCommand): "Start something." log = logging.getLogger(__name__) _command_name = 'start' def get_parser(self, prog_name): parser = super(Start, self).get_parser(prog_name) parser.add_argument( 'object', nargs=1, choices=('game',)) return parser class Stop(SingleCommand): "Stop something." log = logging.getLogger(__name__) _command_name = 'stop' def get_parser(self, prog_name): parser = super(Stop, self).get_parser(prog_name) parser.add_argument( 'object', nargs=1, choices=('application', 'game')) return parser class AgentApp(App): log = logging.getLogger(__name__) def __init__(self): command = CommandManager('orwell.agent') super(AgentApp, self).__init__( description='Orwell agent.', version='0.0.1', command_manager=command, ) Start.register_to(command) Stop.register_to(command) ListPlayer.register_to(command) ListRobot.register_to(command) AddPlayer.register_to(command) AddRobot.register_to(command) RemovePlayer.register_to(command) RemoveRobot.register_to(command) self._zmq_context = None self._zmq_publish_socket = None self._zmq_pull_socket = None def build_option_parser( self, description, version, argparse_kwargs=None): parser = super(AgentApp, self).build_option_parser( description, version, argparse_kwargs) parser.add_argument( '-p', '--port', type=int, default=9003, help='The port to send commands to.') parser.add_argument( '-a', '--address', type=str, default='127.0.0.1', help='The address to send commands to.') parser.add_argument( '-l', '--listen', type=int, default=9004, help='The port to listen to for replies.') return parser def initialize_app(self, argv): self.log.debug('initialize_app') import zmq self._zmq_context = zmq.Context() self.log.debug('created context = %s' % self._zmq_context) self._zmq_publish_socket = self._zmq_context.socket(zmq.PUB) self.log.debug( 'created publish socket = %s' % self._zmq_publish_socket) self._zmq_publish_socket.setsockopt(zmq.LINGER, 1) self._zmq_publish_socket.connect("tcp://%s:%i" % ( self.options.address, self.options.port)) self._zmq_pull_socket = self._zmq_context.socket(zmq.PULL) self.log.debug('created pull socket = %s' % self._zmq_pull_socket) self._zmq_pull_socket.setsockopt(zmq.LINGER, 1) self._zmq_pull_socket.bind("tcp://0.0.0.0:%i" % self.options.listen) List.port = str(self.options.listen) import time time.sleep(0.001) def send(self, command): self.log.debug('send command "%s"' % command) self.log.debug('call socket.send("%s")' % command) self._zmq_publish_socket.send(command) def receive(self): self.log.debug('try to receive a message') message = self._zmq_pull_socket.recv() self.log.debug('received: %s', message) return message def prepare_to_run_command(self, cmd): self.log.debug('prepare_to_run_command %s', cmd.__class__.__name__) def clean_up(self, cmd, result, err): self.log.debug('clean_up %s', cmd.__class__.__name__) if err: self.log.debug('got an error: %s', err) def main(argv=sys.argv[1:]): myapp = AgentApp() return myapp.run(argv) if ("__main__" == __name__): sys.exit(main(sys.argv[1:])) # pragma: no coverage
6,234
1,943
from format2 import Format2 from format40 import Format40 from format80 import Format80
88
26
import sys import os import gzip import argparse import numpy as np import pysam import util import snptable import tables MAX_SEQS_DEFAULT = 64 MAX_SNPS_DEFAULT = 6 class DataFiles(object): """Object to hold names and filehandles for all input / output datafiles""" def __init__(self, bam_filename, is_sorted, output_dir=None, snp_dir=None): # prefix for output files self.prefix = None # name of input BAM filename self.bam_filename = bam_filename # name of sorted input bam_filename # (new file is created if input file is not # already sorted) self.bam_sort_filename = None # pysam file handle for input BAM self.input_bam = None # name of output keep and to.remap BAM files self.maternal_filename = None self.paternal_filename = None self.hom_filename = None # pysam file handles for output BAM filenames self.maternal_bam = None self.paternal_bam = None self.hom_bam = None # name of directory to read SNPs from self.snp_dir = snp_dir # separate input directory and bam filename tokens = self.bam_filename.split("/") bam_dir = "/".join(tokens[:-1]) filename = tokens[-1] if output_dir is None: # if no output dir specified, use same directory as input # bam file output_dir = bam_dir else: if output_dir.endswith("/"): # strip trailing '/' from output dir name output_dir = output_dir[:-1] name_split = filename.split(".") if len(name_split) > 1: self.prefix = output_dir + "/" + ".".join(name_split[:-1]) else: self.prefix = output_dir + "/" + name_split[0] # TODO: could allow names of output files to be specified # on command line rather than appending name to prefix sys.stderr.write("prefix: %s\n" % self.prefix) if not is_sorted: util.sort_bam(self.bam_filename, self.prefix) self.bam_sort_filename = self.prefix + ".sort.bam" else: self.bam_sort_filename = self.bam_filename self.maternal_filename = self.prefix + ".maternal.bam" self.paternal_filename = self.prefix + ".paternal.bam" self.hom_filename = self.prefix + ".hom.bam" sys.stderr.write("reading reads from:\n %s\n" % self.bam_sort_filename) sys.stderr.write("writing output files to:\n") self.input_bam = pysam.Samfile(self.bam_sort_filename, "rb") self.hom_bam = pysam.Samfile(self.hom_filename, "wb", template=self.input_bam) self.maternal_bam = pysam.Samfile(self.maternal_filename, "wb", template=self.input_bam) self.paternal_bam = pysam.Samfile(self.paternal_filename, "wb", template=self.input_bam) sys.stderr.write(" %s\n %s\n %s\n" % (self.hom_filename, self.maternal_filename, self.paternal_filename)) def close(self): """close open filehandles""" filehandles = [self.maternal_bam, self.paternal_bam, self.hom_bam] for fh in filehandles: if fh: fh.close() class ReadStats(object): """Track information about reads and SNPs that they overlap""" def __init__(self): # number of read matches to reference allele self.mat_count = 0 # number of read matches to alternative allele self.pat_count = 0 self.hom_count = 0 # number of reads that overlap SNP but match neither allele self.other_count = 0 # number of reads discarded becaused not mapped self.discard_unmapped = 0 # number of reads discarded because overlap an indel self.discard_indel = 0 # number of reads discarded because secondary match self.discard_secondary = 0 # number of reads discarded because of too many overlapping SNPs self.discard_excess_snps = 0 # number of reads discarded because too many allelic combinations self.discard_excess_reads = 0 # number of maternal reads self.maternal_single = 0 # number of paternal reads self.paternal_single = 0 # number of homozygous reads self.hom_single = 0 def write(self, file_handle): sys.stderr.write("DISCARD reads:\n" " unmapped: %d\n" " indel: %d\n" " secondary alignment: %d\n" " excess overlapping snps: %d\n" " excess allelic combinations: %d\n" "PO reads:\n" " maternal: %d\n" " paternal: %d\n" "hom reads:\n" " hom_single: %d\n" % (self.discard_unmapped, self.discard_indel, self.discard_secondary, self.discard_excess_snps, self.discard_excess_reads, self.maternal_single, self.paternal_single, self.hom_single)) file_handle.write("read SNP mat matches: %d\n" % self.mat_count) file_handle.write("read SNP pat matches: %d\n" % self.pat_count) file_handle.write("read SNP mismatches: %d\n" % self.other_count) total = self.mat_count + self.pat_count + self.other_count if total > 0: mismatch_pct = 100.0 * float(self.other_count) / total if mismatch_pct > 10.0: sys.stderr.write("WARNING: many read SNP overlaps do not match " "either allele (%.1f%%). SNP coordinates " "in input file may be incorrect.\n" % mismatch_pct) def parse_options(): parser = argparse.ArgumentParser(description="Looks for SNPs and indels " "overlapping reads. If a read overlaps " "SNPs, alternative versions of the read " "containing different alleles are created " "and written to files for remapping. " "Reads that do not overlap SNPs or indels " "are written to a 'keep' BAM file." "Reads that overlap indels are presently " "discarded.") parser.add_argument("--is_paired_end", "-p", action='store_true', dest='is_paired_end', default=False, help=("Indicates that reads are paired-end " "(default is single).")) parser.add_argument("--is_sorted", "-s", action='store_true', dest='is_sorted', default=False, help=('Indicates that the input BAM file' ' is coordinate-sorted (default ' 'is False).')) parser.add_argument("--max_seqs", type=int, default=MAX_SEQS_DEFAULT, help="The maximum number of sequences with different " "allelic combinations to consider remapping " "(default=%d). Read pairs with more allelic " "combinations than MAX_SEQs are discarded" % MAX_SEQS_DEFAULT) parser.add_argument("--max_snps", type=int, default=MAX_SNPS_DEFAULT, help="The maximum number of SNPs allowed to overlap " "a read before discarding the read. Allowing higher " "numbers will decrease speed and increase memory " "usage (default=%d)." % MAX_SNPS_DEFAULT) parser.add_argument("--output_dir", default=None, help="Directory to write output files to. If not " "specified, output files are written to the " "same directory as the input BAM file.") parser.add_argument("--snp_dir", action='store', help="Directory containing SNP text files " "This directory should contain one file per " "chromosome named like chr<#>.snps.txt.gz. " "Each file should contain 3 columns: position " "RefAllele AltAllele. This option should " "only be used if --snp_tab, --snp_index, " "and --haplotype arguments are not used." " If this argument is provided, all possible " "allelic combinations are used (rather " "than set of observed haplotypes).", default=None) parser.add_argument("--snp_tab", help="Path to HDF5 file to read SNP information " "from. Each row of SNP table contains SNP name " "(rs_id), position, allele1, allele2.", metavar="SNP_TABLE_H5_FILE", default=None) parser.add_argument("--snp_index", help="Path to HDF5 file containing SNP index. The " "SNP index is used to convert the genomic position " "of a SNP to its corresponding row in the haplotype " "and snp_tab HDF5 files.", metavar="SNP_INDEX_H5_FILE", default=None) parser.add_argument("--haplotype", help="Path to HDF5 file to read phased haplotypes " "from. When generating alternative reads " "use known haplotypes from this file rather " "than all possible allelic combinations.", metavar="HAPLOTYPE_H5_FILE", default=None) parser.add_argument("--samples", help="Use only haplotypes and SNPs that are " "polymorphic in these samples. " "SAMPLES can either be a comma-delimited string " "of sample names or a path to a file with one sample " "name per line (file is assumed to be whitespace-" "delimited and first column is assumed to be sample " "name). Sample names should match those present in the " "--haplotype file. Samples are ignored if no haplotype " "file is provided.", metavar="SAMPLES") parser.add_argument("bam_filename", action='store', help="Coordinate-sorted input BAM file " "containing mapped reads.") options = parser.parse_args() if options.snp_dir: if(options.snp_tab or options.snp_index or options.haplotype): parser.error("expected --snp_dir OR (--snp_tab, --snp_index and " "--haplotype) arguments but not both") else: if not (options.snp_tab and options.snp_index and options.haplotype): parser.error("either --snp_dir OR (--snp_tab, " "--snp_index AND --haplotype) arguments must be " "provided") if options.samples and not options.haplotype: # warn because no way to use samples if haplotype file not specified sys.stderr.write("WARNING: ignoring --samples argument " "because --haplotype argument not provided") return options def count_ref_alt_matches(read, read_stats, snp_tab, snp_idx, read_pos, files, cur_chrom): pat_alleles = snp_tab.snp_allele1[snp_idx] mat_alleles = snp_tab.snp_allele2[snp_idx] #if all SNPs in the read are equal th entire read will go to homozgyous count if np.array_equal(mat_alleles, pat_alleles): read_stats.hom_count +=1 files.hom_bam.write(read) for i in range(len(snp_idx)): print len(snp_idx), cur_chrom, snp_idx[i], snp_tab.snp_pos[snp_idx][i], mat_alleles[i], pat_alleles[i], i, "hom", read_pos[i], read.query_sequence else: # if they are not equal it means there is a read that could be used to "assign" the read. # loop through the SNPs to see which one it is, and then assign it. assign = 0 for i in range(len(snp_idx)): if mat_alleles[i] != pat_alleles[i]: if pat_alleles[i] == read.query_sequence[read_pos[i]-1]: # read matches reference allele if assign==0: read_stats.pat_count += 1 assign +=1 #output to paternal.bam file. files.paternal_bam.write(read) print len(snp_idx), cur_chrom, snp_idx[i], snp_tab.snp_pos[snp_idx][i], mat_alleles[i], pat_alleles[i], i, "pat", read_pos[i], read.query_sequence elif mat_alleles[i] == read.query_sequence[read_pos[i]-1]: # read matches non-reference allele if assign==0: read_stats.mat_count += 1 assign+=1 #output to maternal.bam file. files.maternal_bam.write(read) print len(snp_idx), cur_chrom, snp_idx[i], snp_tab.snp_pos[snp_idx][i], mat_alleles[i], pat_alleles[i], i, "mat", read_pos[i], read.query_sequence else: # read matches neither ref nor other if assign==0: assign +=1 read_stats.other_count += 1 print len(snp_idx), cur_chrom, snp_idx[i], snp_tab.snp_pos[snp_idx][i], mat_alleles[i], pat_alleles[i], i, "other", read_pos[i], read.query_sequence, read.query_sequence[read_pos[i]-1] def filter_reads(files, max_seqs=MAX_SEQS_DEFAULT, max_snps=MAX_SNPS_DEFAULT, samples=None): cur_chrom = None cur_tid = None seen_chrom = set([]) snp_tab = snptable.SNPTable() read_stats = ReadStats() read_pair_cache = {} cache_size = 0 read_count = 0 for read in files.input_bam: read_count += 1 # if (read_count % 100000) == 0: # sys.stderr.write("\nread_count: %d\n" % read_count) # sys.stderr.write("cache_size: %d\n" % cache_size) # TODO: need to change this to use new pysam API calls # but need to check pysam version for backward compatibility if read.tid == -1: # unmapped read read_stats.discard_unmapped += 1 continue if (cur_tid is None) or (read.tid != cur_tid): # this is a new chromosome cur_chrom = files.input_bam.getrname(read.tid) if len(read_pair_cache) != 0: sys.stderr.write("WARNING: failed to find pairs for %d " "reads on this chromosome\n" % len(read_pair_cache)) read_stats.discard_missing_pair += len(read_pair_cache) read_pair_cache = {} cache_size = 0 read_count = 0 if cur_chrom in seen_chrom: # sanity check that input bam file is sorted raise ValueError("expected input BAM file to be sorted " "but chromosome %s is repeated\n" % cur_chrom) seen_chrom.add(cur_chrom) cur_tid = read.tid sys.stderr.write("starting chromosome %s\n" % cur_chrom) # use HDF5 files if they are provided, otherwise use text # files from SNP dir snp_filename = "%s/%s.snps.txt.gz" % (files.snp_dir, cur_chrom) sys.stderr.write("reading SNPs from file '%s'\n" % snp_filename) snp_tab.read_file(snp_filename) sys.stderr.write("processing reads\n") if read.is_secondary: # this is a secondary alignment (i.e. read was aligned more than # once and this has align score that <= best score) read_stats.discard_secondary += 1 continue process_single_read(read, read_stats, files, snp_tab, max_seqs, max_snps, cur_chrom) read_stats.write(sys.stderr) def process_single_read(read, read_stats, files, snp_tab, max_seqs, max_snps, cur_chrom): """Check if a single read overlaps SNPs or indels, and writes this read (or generated read pairs) to appropriate output files""" # check if read overlaps SNPs or indels snp_idx, snp_read_pos, \ indel_idx, indel_read_pos = snp_tab.get_overlapping_snps(read) if len(indel_idx) > 0: # for now discard this read, we want to improve this to handle # the indel reads appropriately read_stats.discard_indel += 1 # TODO: add option to handle indels instead of throwing out reads return if len(snp_idx) > 0: mat_alleles = snp_tab.snp_allele1[snp_idx] pat_alleles = snp_tab.snp_allele2[snp_idx] count_ref_alt_matches(read, read_stats, snp_tab, snp_idx, snp_read_pos, files, cur_chrom) # limit recursion here by discarding reads that # overlap too many SNPs if len(snp_read_pos) > max_snps: read_stats.discard_excess_snps += 1 return # mat_seqs, pat_seqs = generate_reads(read.query_sequence, 0) # make set of unique reads, we don't want to remap # duplicates, or the read that matches original # unique_reads = set(read_seqs) # if read.query_sequence in unique_reads: # unique_reads.remove(read.query_sequence) # if len(unique_reads) == 0: # only read generated matches original read, # so keep original # files.maternal_bam.write(mat_seqs) # read_stats.maternal_single += 1 # elif len(unique_reads) < max_seqs: # # write read to fastq file for remapping # write_fastq(files.fastq_single, read, unique_reads) # write read to 'to remap' BAM # this is probably not necessary with new implmentation # but kept for consistency with previous version of script # files.paternal_bam.write(pat_seqs) # read_stats.paternal_single += 1 # else: # discard read # read_stats.discard_excess_reads += 1 # return else: # no SNPs overlap read, write to keep file files.hom_bam.write(read) read_stats.hom_single += 1 def main(bam_filenames, is_sorted=False, max_seqs=MAX_SEQS_DEFAULT, max_snps=MAX_SNPS_DEFAULT, output_dir=None, snp_dir=None): files = DataFiles(bam_filenames, is_sorted, output_dir=output_dir, snp_dir=snp_dir) filter_reads(files, max_seqs=max_seqs, max_snps=max_snps) files.close() if __name__ == '__main__': sys.stderr.write("command line: %s\n" % " ".join(sys.argv)) sys.stderr.write("python version: %s\n" % sys.version) sys.stderr.write("pysam version: %s\n" % pysam.__version__) util.check_pysam_version() options = parse_options() main(options.bam_filename, is_sorted=options.is_sorted, max_seqs=options.max_seqs, max_snps=options.max_snps, output_dir=options.output_dir, snp_dir=options.snp_dir)
20,705
5,987
# generated by update to not change manually import typing as t from bungieapi.base import BaseClient, clean_query_value from bungieapi.forge import forge from bungieapi.generated.components.responses import ( CEListOfGroupOptionalConversationClientResponse, DictionaryOfint32AndstringClientResponse, ListOfEntityActionResultClientResponse, ListOfGroupThemeClientResponse, ListOfGroupV2CardClientResponse, SearchResultOfGroupBanClientResponse, SearchResultOfGroupMemberApplicationClientResponse, SearchResultOfGroupMemberClientResponse, booleanClientResponse, int32ClientResponse, int64ClientResponse, ) from bungieapi.generated.components.responses.groups_v2 import ( GetGroupsForMemberClientResponse, GroupApplicationClientResponse, GroupClientResponse, GroupMemberLeaveResultClientResponse, GroupMembershipSearchClientResponse, GroupPotentialMembershipSearchClientResponse, GroupSearchClientResponse, ) from bungieapi.generated.components.schemas import BungieMembershipType from bungieapi.generated.components.schemas.groups_v2 import ( ClanBanner, GroupApplicationListRequest, GroupApplicationRequest, GroupBanRequest, GroupDateRange, GroupEditAction, GroupNameSearchRequest, GroupOptionalConversationAddRequest, GroupOptionalConversationEditRequest, GroupOptionsEditAction, GroupPotentialMemberStatus, GroupQuery, GroupsForMemberFilter, GroupType, RuntimeGroupMemberType, ) class Client(BaseClient): async def get_available_avatars( self, ) -> DictionaryOfint32AndstringClientResponse: """Returns a list of all available group avatars for the signed-in user.""" query = None result = await self.get( path="/GroupV2/GetAvailableAvatars/", query=query, ) return forge(DictionaryOfint32AndstringClientResponse, result) async def get_available_themes( self, ) -> ListOfGroupThemeClientResponse: """Returns a list of all available group themes.""" query = None result = await self.get( path="/GroupV2/GetAvailableThemes/", query=query, ) return forge(ListOfGroupThemeClientResponse, result) async def get_user_clan_invite_setting( self, m_type: "BungieMembershipType", ) -> booleanClientResponse: """Gets the state of the user's clan invite preferences for a particular membership type - true if they wish to be invited to clans, false otherwise. Parameters: m_type: The Destiny membership type of the account we wish to access settings.""" query = None result = await self.get( path=f"/GroupV2/GetUserClanInviteSetting/{clean_query_value(m_type)}/", query=query, ) return forge(booleanClientResponse, result) async def get_recommended_groups( self, create_date_range: "GroupDateRange", group_type: "GroupType", ) -> ListOfGroupV2CardClientResponse: """Gets groups recommended for you based on the groups to whom those you follow belong. Parameters: create_date_range: Requested range in which to pull recommended groups group_type: Type of groups requested """ query = None result = await self.post( path=f"/GroupV2/Recommended/{clean_query_value(group_type)}/{clean_query_value(create_date_range)}/", query=query, ) return forge(ListOfGroupV2CardClientResponse, result) async def group_search( self, request: "GroupQuery", ) -> GroupSearchClientResponse: """Search for Groups.""" query = None result = await self.post(path="/GroupV2/Search/", query=query, request=request) return forge(GroupSearchClientResponse, result) async def get_group( self, group_id: int, ) -> GroupClientResponse: """Get information about a specific group of the given ID. Parameters: group_id: Requested group's id. """ query = None result = await self.get( path=f"/GroupV2/{clean_query_value(group_id)}/", query=query, ) return forge(GroupClientResponse, result) async def get_group_by_name( self, group_name: str, group_type: "GroupType", ) -> GroupClientResponse: """Get information about a specific group with the given name and type. Parameters: group_name: Exact name of the group to find. group_type: Type of group to find. """ query = None result = await self.get( path=f"/GroupV2/Name/{clean_query_value(group_name)}/{clean_query_value(group_type)}/", query=query, ) return forge(GroupClientResponse, result) async def get_group_by_name_v2( self, request: "GroupNameSearchRequest", ) -> GroupClientResponse: """Get information about a specific group with the given name and type. The POST version. """ query = None result = await self.post(path="/GroupV2/NameV2/", query=query, request=request) return forge(GroupClientResponse, result) async def get_group_optional_conversations( self, group_id: int, ) -> CEListOfGroupOptionalConversationClientResponse: """Gets a list of available optional conversation channels and their settings. Parameters: group_id: Requested group's id. """ query = None result = await self.get( path=f"/GroupV2/{clean_query_value(group_id)}/OptionalConversations/", query=query, ) return forge(CEListOfGroupOptionalConversationClientResponse, result) async def edit_group( self, request: "GroupEditAction", group_id: int, ) -> int32ClientResponse: """Edit an existing group. You must have suitable permissions in the group to perform this operation. This latest revision will only edit the fields you pass in - pass null for properties you want to leave unaltered. Parameters: group_id: Group ID of the group to edit. """ query = None result = await self.post( path=f"/GroupV2/{clean_query_value(group_id)}/Edit/", query=query, request=request, ) return forge(int32ClientResponse, result) async def edit_clan_banner( self, request: "ClanBanner", group_id: int, ) -> int32ClientResponse: """Edit an existing group's clan banner. You must have suitable permissions in the group to perform this operation. All fields are required. Parameters: group_id: Group ID of the group to edit. """ query = None result = await self.post( path=f"/GroupV2/{clean_query_value(group_id)}/EditClanBanner/", query=query, request=request, ) return forge(int32ClientResponse, result) async def edit_founder_options( self, request: "GroupOptionsEditAction", group_id: int, ) -> int32ClientResponse: """Edit group options only available to a founder. You must have suitable permissions in the group to perform this operation. Parameters: group_id: Group ID of the group to edit. """ query = None result = await self.post( path=f"/GroupV2/{clean_query_value(group_id)}/EditFounderOptions/", query=query, request=request, ) return forge(int32ClientResponse, result) async def add_optional_conversation( self, request: "GroupOptionalConversationAddRequest", group_id: int, ) -> int64ClientResponse: """Add a new optional conversation/chat channel. Requires admin permissions to the group. Parameters: group_id: Group ID of the group to edit. """ query = None result = await self.post( path=f"/GroupV2/{clean_query_value(group_id)}/OptionalConversations/Add/", query=query, request=request, ) return forge(int64ClientResponse, result) async def edit_optional_conversation( self, request: "GroupOptionalConversationEditRequest", conversation_id: int, group_id: int, ) -> int64ClientResponse: """Edit the settings of an optional conversation/chat channel. Requires admin permissions to the group. Parameters: conversation_id: Conversation Id of the channel being edited. group_id: Group ID of the group to edit. """ query = None result = await self.post( path=f"/GroupV2/{clean_query_value(group_id)}/OptionalConversations/Edit/{clean_query_value(conversation_id)}/", query=query, request=request, ) return forge(int64ClientResponse, result) async def get_members_of_group( self, currentpage: int, group_id: int, member_type: t.Optional["RuntimeGroupMemberType"] = None, name_search: t.Optional[str] = None, ) -> SearchResultOfGroupMemberClientResponse: """Get the list of members in a given group. Parameters: currentpage: Page number (starting with 1). Each page has a fixed size of 50 items per page. group_id: The ID of the group. member_type: Filter out other member types. Use None for all members. name_search: The name fragment upon which a search should be executed for members with matching display or unique names. """ query = {"memberType": member_type, "nameSearch": name_search} result = await self.get( path=f"/GroupV2/{clean_query_value(group_id)}/Members/", query=query, ) return forge(SearchResultOfGroupMemberClientResponse, result) async def get_admins_and_founder_of_group( self, currentpage: int, group_id: int, ) -> SearchResultOfGroupMemberClientResponse: """Get the list of members in a given group who are of admin level or higher. Parameters: currentpage: Page number (starting with 1). Each page has a fixed size of 50 items per page. group_id: The ID of the group. """ query = None result = await self.get( path=f"/GroupV2/{clean_query_value(group_id)}/AdminsAndFounder/", query=query, ) return forge(SearchResultOfGroupMemberClientResponse, result) async def edit_group_membership( self, group_id: int, membership_id: int, membership_type: "BungieMembershipType", member_type: "RuntimeGroupMemberType", ) -> int32ClientResponse: """Edit the membership type of a given member. You must have suitable permissions in the group to perform this operation. Parameters: group_id: ID of the group to which the member belongs. membership_id: Membership ID to modify. membership_type: Membership type of the provide membership ID. member_type: New membertype for the specified member. """ query = None result = await self.post( path=f"/GroupV2/{clean_query_value(group_id)}/Members/{clean_query_value(membership_type)}/{clean_query_value(membership_id)}/SetMembershipType/{clean_query_value(member_type)}/", query=query, ) return forge(int32ClientResponse, result) async def kick_member( self, group_id: int, membership_id: int, membership_type: "BungieMembershipType", ) -> GroupMemberLeaveResultClientResponse: """Kick a member from the given group, forcing them to reapply if they wish to re-join the group. You must have suitable permissions in the group to perform this operation. Parameters: group_id: Group ID to kick the user from. membership_id: Membership ID to kick. membership_type: Membership type of the provided membership ID. """ query = None result = await self.post( path=f"/GroupV2/{clean_query_value(group_id)}/Members/{clean_query_value(membership_type)}/{clean_query_value(membership_id)}/Kick/", query=query, ) return forge(GroupMemberLeaveResultClientResponse, result) async def ban_member( self, request: "GroupBanRequest", group_id: int, membership_id: int, membership_type: "BungieMembershipType", ) -> int32ClientResponse: """Bans the requested member from the requested group for the specified period of time. Parameters: group_id: Group ID that has the member to ban. membership_id: Membership ID of the member to ban from the group. membership_type: Membership type of the provided membership ID. """ query = None result = await self.post( path=f"/GroupV2/{clean_query_value(group_id)}/Members/{clean_query_value(membership_type)}/{clean_query_value(membership_id)}/Ban/", query=query, request=request, ) return forge(int32ClientResponse, result) async def unban_member( self, group_id: int, membership_id: int, membership_type: "BungieMembershipType", ) -> int32ClientResponse: """Unbans the requested member, allowing them to re-apply for membership. Parameters: membership_id: Membership ID of the member to unban from the group membership_type: Membership type of the provided membership ID. """ query = None result = await self.post( path=f"/GroupV2/{clean_query_value(group_id)}/Members/{clean_query_value(membership_type)}/{clean_query_value(membership_id)}/Unban/", query=query, ) return forge(int32ClientResponse, result) async def get_banned_members_of_group( self, currentpage: int, group_id: int, ) -> SearchResultOfGroupBanClientResponse: """Get the list of banned members in a given group. Only accessible to group Admins and above. Not applicable to all groups. Check group features. Parameters: currentpage: Page number (starting with 1). Each page has a fixed size of 50 entries. group_id: Group ID whose banned members you are fetching """ query = None result = await self.get( path=f"/GroupV2/{clean_query_value(group_id)}/Banned/", query=query, ) return forge(SearchResultOfGroupBanClientResponse, result) async def abdicate_foundership( self, founder_id_new: int, group_id: int, membership_type: "BungieMembershipType", ) -> booleanClientResponse: """An administrative method to allow the founder of a group or clan to give up their position to another admin permanently. Parameters: founder_id_new: The new founder for this group. Must already be a group admin. group_id: The target group id. membership_type: Membership type of the provided founderIdNew. """ query = None result = await self.post( path=f"/GroupV2/{clean_query_value(group_id)}/Admin/AbdicateFoundership/{clean_query_value(membership_type)}/{clean_query_value(founder_id_new)}/", query=query, ) return forge(booleanClientResponse, result) async def get_pending_memberships( self, currentpage: int, group_id: int, ) -> SearchResultOfGroupMemberApplicationClientResponse: """Get the list of users who are awaiting a decision on their application to join a given group. Modified to include application info. Parameters: currentpage: Page number (starting with 1). Each page has a fixed size of 50 items per page. group_id: ID of the group. """ query = None result = await self.get( path=f"/GroupV2/{clean_query_value(group_id)}/Members/Pending/", query=query, ) return forge(SearchResultOfGroupMemberApplicationClientResponse, result) async def get_invited_individuals( self, currentpage: int, group_id: int, ) -> SearchResultOfGroupMemberApplicationClientResponse: """Get the list of users who have been invited into the group. Parameters: currentpage: Page number (starting with 1). Each page has a fixed size of 50 items per page. group_id: ID of the group. """ query = None result = await self.get( path=f"/GroupV2/{clean_query_value(group_id)}/Members/InvitedIndividuals/", query=query, ) return forge(SearchResultOfGroupMemberApplicationClientResponse, result) async def approve_all_pending( self, request: "GroupApplicationRequest", group_id: int, ) -> ListOfEntityActionResultClientResponse: """Approve all of the pending users for the given group. Parameters: group_id: ID of the group. """ query = None result = await self.post( path=f"/GroupV2/{clean_query_value(group_id)}/Members/ApproveAll/", query=query, request=request, ) return forge(ListOfEntityActionResultClientResponse, result) async def deny_all_pending( self, request: "GroupApplicationRequest", group_id: int, ) -> ListOfEntityActionResultClientResponse: """Deny all of the pending users for the given group. Parameters: group_id: ID of the group. """ query = None result = await self.post( path=f"/GroupV2/{clean_query_value(group_id)}/Members/DenyAll/", query=query, request=request, ) return forge(ListOfEntityActionResultClientResponse, result) async def approve_pending_for_list( self, request: "GroupApplicationListRequest", group_id: int, ) -> ListOfEntityActionResultClientResponse: """Approve all of the pending users for the given group. Parameters: group_id: ID of the group. """ query = None result = await self.post( path=f"/GroupV2/{clean_query_value(group_id)}/Members/ApproveList/", query=query, request=request, ) return forge(ListOfEntityActionResultClientResponse, result) async def approve_pending( self, request: "GroupApplicationRequest", group_id: int, membership_id: int, membership_type: "BungieMembershipType", ) -> booleanClientResponse: """Approve the given membershipId to join the group/clan as long as they have applied. Parameters: group_id: ID of the group. membership_id: The membership id being approved. membership_type: Membership type of the supplied membership ID. """ query = None result = await self.post( path=f"/GroupV2/{clean_query_value(group_id)}/Members/Approve/{clean_query_value(membership_type)}/{clean_query_value(membership_id)}/", query=query, request=request, ) return forge(booleanClientResponse, result) async def deny_pending_for_list( self, request: "GroupApplicationListRequest", group_id: int, ) -> ListOfEntityActionResultClientResponse: """Deny all of the pending users for the given group that match the passed-in . Parameters: group_id: ID of the group. """ query = None result = await self.post( path=f"/GroupV2/{clean_query_value(group_id)}/Members/DenyList/", query=query, request=request, ) return forge(ListOfEntityActionResultClientResponse, result) async def get_groups_for_member( self, filter: "GroupsForMemberFilter", group_type: "GroupType", membership_id: int, membership_type: "BungieMembershipType", ) -> GetGroupsForMemberClientResponse: """Get information about the groups that a given member has joined. Parameters: filter: Filter apply to list of joined groups. group_type: Type of group the supplied member founded. membership_id: Membership ID to for which to find founded groups. membership_type: Membership type of the supplied membership ID. """ query = None result = await self.get( path=f"/GroupV2/User/{clean_query_value(membership_type)}/{clean_query_value(membership_id)}/{clean_query_value(filter)}/{clean_query_value(group_type)}/", query=query, ) return forge(GetGroupsForMemberClientResponse, result) async def recover_group_for_founder( self, group_type: "GroupType", membership_id: int, membership_type: "BungieMembershipType", ) -> GroupMembershipSearchClientResponse: """Allows a founder to manually recover a group they can see in game but not on bungie.net Parameters: group_type: Type of group the supplied member founded. membership_id: Membership ID to for which to find founded groups. membership_type: Membership type of the supplied membership ID.""" query = None result = await self.get( path=f"/GroupV2/Recover/{clean_query_value(membership_type)}/{clean_query_value(membership_id)}/{clean_query_value(group_type)}/", query=query, ) return forge(GroupMembershipSearchClientResponse, result) async def get_potential_groups_for_member( self, filter: "GroupPotentialMemberStatus", group_type: "GroupType", membership_id: int, membership_type: "BungieMembershipType", ) -> GroupPotentialMembershipSearchClientResponse: """Get information about the groups that a given member has applied to or been invited to. Parameters: filter: Filter apply to list of potential joined groups. group_type: Type of group the supplied member applied. membership_id: Membership ID to for which to find applied groups. membership_type: Membership type of the supplied membership ID. """ query = None result = await self.get( path=f"/GroupV2/User/Potential/{clean_query_value(membership_type)}/{clean_query_value(membership_id)}/{clean_query_value(filter)}/{clean_query_value(group_type)}/", query=query, ) return forge(GroupPotentialMembershipSearchClientResponse, result) async def individual_group_invite( self, request: "GroupApplicationRequest", group_id: int, membership_id: int, membership_type: "BungieMembershipType", ) -> GroupApplicationClientResponse: """Invite a user to join this group. Parameters: group_id: ID of the group you would like to join. membership_id: Membership id of the account being invited. membership_type: MembershipType of the account being invited. """ query = None result = await self.post( path=f"/GroupV2/{clean_query_value(group_id)}/Members/IndividualInvite/{clean_query_value(membership_type)}/{clean_query_value(membership_id)}/", query=query, request=request, ) return forge(GroupApplicationClientResponse, result) async def individual_group_invite_cancel( self, group_id: int, membership_id: int, membership_type: "BungieMembershipType", ) -> GroupApplicationClientResponse: """Cancels a pending invitation to join a group. Parameters: group_id: ID of the group you would like to join. membership_id: Membership id of the account being cancelled. membership_type: MembershipType of the account being cancelled. """ query = None result = await self.post( path=f"/GroupV2/{clean_query_value(group_id)}/Members/IndividualInviteCancel/{clean_query_value(membership_type)}/{clean_query_value(membership_id)}/", query=query, ) return forge(GroupApplicationClientResponse, result)
25,145
6,814
from causalprob import CausalProb import unittest import jax.numpy as jnp import numpy as np class TestNFConfounderModel(unittest.TestCase): def test_is_inverse_function(self): from models.nf_confounder_model import define_model dim = 2 model = define_model(dim=dim) cp = CausalProb(model=model) theta = {k: cp.init_params[k](i) for i, k in enumerate(cp.init_params)} u, v = cp.fill({k: cp.draw_u[k](1, theta, seed) for seed, k in enumerate(cp.draw_u)}, {}, theta, cp.draw_u.keys()) for rv in cp.f: assert jnp.allclose(cp.finv[rv](cp.f[rv](u[rv], theta, v), theta, v), u[rv]) def test_determinant(self): from models.nf_confounder_model import define_model dim = 2 model = define_model(dim=dim) cp = CausalProb(model=model) theta = {k: cp.init_params[k](i) for i, k in enumerate(cp.init_params)} u, v = cp.fill({k: cp.draw_u[k](1, theta, seed) for seed, k in enumerate(cp.draw_u)}, {}, theta, cp.draw_u.keys()) for rv in cp.ldij: assert jnp.allclose(jnp.round(cp.ldij[rv](v[rv], theta, v).squeeze(), 4), jnp.round( jnp.log( jnp.abs( jnp.linalg.det( cp.dfinvv_dv(rv, {k: _v.squeeze(0) for k, _v in v.items()}, theta)))), 4))
1,475
527
''' Generate a bar chart with error bars -- might be easier to read than a line graph with error bars. ''' import matplotlib.pyplot as plt import pandas as pd import numpy as np def plot_result(idx, filename, xlabel, ylabel, linelabel, fig=None): if fig is None: fig = plt.figure(figsize=(5, 4)) else: plt.figure(fig.number) # ax1.semilogx(inverse_scales # plt.plot(mean_results[:, 0], mean_results[:, idx], # marker=markers[lineidx], label=linelabel, linewidth=2, markersize=8, linestyle=linestyles[lineidx]) # plt.errorbar(mean_results[:, 0], mean_results[:, idx], std_results[:, idx], # marker=markers[lineidx], label=linelabel, linewidth=2, markersize=8, linestyle=linestyles[lineidx]) plt.bar(mean_results[:, 0]+(50*idx), mean_results[:, idx+1], yerr=std_results[:, idx+1], label=linelabel, width=50) plt.ylabel(ylabel) # plt.xlabel('inverse function scale, s') plt.xlabel(xlabel) plt.grid('on', axis='y') plt.legend(loc='best') plt.tight_layout() plt.savefig(figure_save_path + '/%s.pdf' % filename) return fig if __name__ == '__main__': figure_save_path = './results/synth2/' filename = 'r_pairs_bar.pdf' fig = None cs = [1,3,10,20] markers = ['o', 'x', '+', '>', '<', '*'] linestyles = [':', '-.', '--', '-'] for idx, c in enumerate(cs): mean_results = np.genfromtxt('./results/synth_latent_mean_results_%i.csv' % c) std_results = np.genfromtxt('./results/synth_latent_std_results_%i.csv' % c) fig = plot_result(idx, filename, 'noise rate in pairwise training labels', '$\\tau$ (on test set)', 'C=%i' % c, fig)
1,713
631
from .loader import * from .model import * from .train_and_test import *
74
24
# Program 77 : make Tkinter menu from tkinter import * from tkinter import * root = Tk() menu = Menu(root) root.config(menu=menu) filemenu = Menu(menu) menu.add_cascade(label='File', menu=filemenu) filemenu.add_command(label='New') filemenu.add_command(label='Open...') filemenu.add_separator() filemenu.add_command(label='Exit', command=root.quit) helpmenu = Menu(menu) menu.add_cascade(label='Help', menu=helpmenu) helpmenu.add_command(label='About') mainloop()
468
169
import factory from chat.models import Chat, ChatRoom from users.factories import UserFactory class ChatRoomFactory(factory.django.DjangoModelFactory): class Meta: model = ChatRoom name = factory.Faker('word') class ChatFactory(factory.django.DjangoModelFactory): class Meta: model = Chat body = factory.Faker('sentence') author = factory.SubFactory(UserFactory) room = factory.SubFactory(ChatRoom)
445
137
"""Downloads the training dataset and removes bad samples. """ import csv import os import urllib.request import tarfile import glob DATA_URL = 'http://download.tensorflow.org/data/speech_commands_v0.01.tar.gz' TRAIN_DIR = '../dataset/train/audio/' FILE_BAD = 'bad_samples.txt' def maybe_download(data_url, dest_directory): """Download and extract data set tar file. """ if not os.path.exists(dest_directory): os.makedirs(dest_directory) filename = data_url.split('/')[-1] filepath = os.path.join(dest_directory, filename) if not os.path.exists(filepath): print('Downloading %s ...' % filename) filepath, _ = urllib.request.urlretrieve(data_url, filepath) tarfile.open(filepath, 'r:gz').extractall(dest_directory) print('Successfully unzipped %s' % filename) def remove_bad(f_bad, train_dir): """Deletes bad samples in the dataset. """ num_bad = 0 with open(f_bad, 'r') as fp: for wav in csv.reader(fp, delimiter=','): try: os.remove(train_dir + wav[0]) num_bad += 1 except FileNotFoundError: pass print('bad_training_samples removed: %d' % num_bad) wav_paths = glob.glob(os.path.join(train_dir, '*', '*nohash*.wav')) print('num_training_samples = %d' % len(wav_paths)) maybe_download(DATA_URL, TRAIN_DIR) remove_bad(FILE_BAD, TRAIN_DIR)
1,335
487
from django.apps import AppConfig class RemoteControlConfig(AppConfig): name = 'remote_control' def ready(self): import remote_control.signals
162
47
from http import HTTPStatus from authlib.jose import jwt from pytest import fixture from .utils import get_headers from api.errors import AUTH_ERROR def routes(): yield '/health' yield '/deliberate/observables' yield '/observe/observables' yield '/refer/observables' yield '/respond/observables' yield '/respond/trigger' @fixture(scope='module', params=routes(), ids=lambda route: f'POST {route}') def route(request): return request.param @fixture(scope='module') def wrong_jwt_structure(): return 'wrong_jwt_structure' @fixture(scope='module') def wrong_payload_structure_jwt(client): header = {'alg': 'HS256'} payload = {'not_key': 'something'} secret_key = client.application.secret_key return jwt.encode(header, payload, secret_key).decode('ascii') @fixture(scope='session') def invalid_jwt(valid_jwt): header, payload, signature = valid_jwt.split('.') def jwt_decode(s: str) -> dict: from authlib.common.encoding import urlsafe_b64decode, json_loads return json_loads(urlsafe_b64decode(s.encode('ascii'))) def jwt_encode(d: dict) -> str: from authlib.common.encoding import json_dumps, urlsafe_b64encode return urlsafe_b64encode(json_dumps(d).encode('ascii')).decode('ascii') payload = jwt_decode(payload) # Corrupt the valid JWT by tampering with its payload. payload['superuser'] = True payload = jwt_encode(payload) return '.'.join([header, payload, signature]) @fixture(scope='module') def authorization_errors_expected_payload(route): def _make_payload_message(message): payload = { 'errors': [{ 'code': AUTH_ERROR, 'message': f'Authorization failed: {message}', 'type': 'fatal'}] } return payload return _make_payload_message def test_call_with_authorization_header_failure( route, client, authorization_errors_expected_payload ): response = client.post(route) assert response.status_code == HTTPStatus.OK assert response.json == authorization_errors_expected_payload( 'Authorization header is missing' ) def test_call_with_wrong_authorization_type( route, client, valid_jwt, authorization_errors_expected_payload ): response = client.post( route, headers=get_headers(valid_jwt, auth_type='wrong_type') ) assert response.status_code == HTTPStatus.OK assert response.json == authorization_errors_expected_payload( 'Wrong authorization type' ) def test_call_with_wrong_jwt_structure( route, client, wrong_jwt_structure, authorization_errors_expected_payload ): response = client.post(route, headers=get_headers(wrong_jwt_structure)) assert response.status_code == HTTPStatus.OK assert response.json == authorization_errors_expected_payload( 'Wrong JWT structure' ) def test_call_with_jwt_encoded_by_wrong_key( route, client, invalid_jwt, authorization_errors_expected_payload ): response = client.post(route, headers=get_headers(invalid_jwt)) assert response.status_code == HTTPStatus.OK assert response.json == authorization_errors_expected_payload( 'Failed to decode JWT with provided key' ) def test_call_with_wrong_jwt_payload_structure( route, client, wrong_payload_structure_jwt, authorization_errors_expected_payload ): response = client.post(route, headers=get_headers(wrong_payload_structure_jwt)) assert response.status_code == HTTPStatus.OK assert response.json == authorization_errors_expected_payload( 'Wrong JWT payload structure' ) def test_call_with_missed_secret_key( route, client, valid_jwt, authorization_errors_expected_payload ): right_secret_key = client.application.secret_key client.application.secret_key = None response = client.post(route, headers=get_headers(valid_jwt)) client.application.secret_key = right_secret_key assert response.status_code == HTTPStatus.OK assert response.json == authorization_errors_expected_payload( '<SECRET_KEY> is missing' )
4,232
1,269
#!/usr/bin/env python3 # # Copyright 2020 IBM # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License.IBM Confidential # import os from multiprocessing import cpu_count TRUE = ('TRUE', 'True', 'true', '1') use_ssl = True if os.getenv('ENABLE_SSL') in TRUE else False settings = os.getenv('SETTINGS') # Gunicorn config variables workers = int(os.getenv('GUNICORN_WORKER_NUM')) \ if os.getenv('GUNICORN_WORKER_NUM') and int(os.getenv('GUNICORN_WORKER_NUM')) > 0 \ else cpu_count() * 2 + 1 # Gunicorn needs to store its temporary file in memory (e.g. /dev/shm) worker_tmp_dir = '/dev/shm' # Container schedulers typically expect logs to come out on stdout/stderr, thus gunicorn is configured to do so log_file = '-' ssl_version = 'TLSv1_2' bind = ':8080' ca_certs = f'{settings}/ca.crt' if use_ssl else None certfile = f'{settings}/server.crt' if use_ssl else None keyfile = f'{settings}/server.key' if use_ssl else None timeout = int(os.getenv('GUNICORN_TIMEOUT')) \ if os.getenv('GUNICORN_TIMEOUT') and int(os.getenv('GUNICORN_TIMEOUT')) > 0 \ else 30
1,562
560
def sum_all_to(num: int) -> int: """ Return the sum of all numbers up to and including the input number """ return num * (num + 1) // 2 def square_pyramidal_number(num: int) -> int: """ Return the sum of the squares of all numbers up to and including the input number """ # https://en.wikipedia.org/wiki/Square_pyramidal_number return num * (num + 1) * (2 * num + 1) // 6
394
130
import numpy as np def place_camera(time, data, camera, camera_distance, view): # Define camera parameters camera.SetViewUp([0,0,1]) if view == 1: # General view chs_pos = data[0][0].path_loc[time] # Chassis CG @ time cam_d = 12 # [m] cam_h = 4.5 # [m] chs2cam = [2 , -cam_d, cam_h] # vector from chassis to camera position chs_fix = [0,0,0] camera_pos = chs_pos + chs2cam cam_focal_point = chs_pos elif view == 2: # Rear view chassis_pos = data[0][0].path_loc[time] # Chassis CG @ time chs2cam = [-7,0,-0.5] # camera_pos = chassis_pos + chs2cam # Cam direction is locked on the chassis chassis_dir = data[0][0].path_dir[time] cam_d = 10 camera_pos = chassis_pos + [-cam_d*np.cos(chassis_dir[2]), -cam_d*np.sin(chassis_dir[2]), cam_d*np.sin(chassis_dir[1]) + 1.5] camera.Roll(np.rad2deg(chassis_dir[0])) cam_focal_point = chassis_pos elif view == 3: # Wheel view wheel_pos = data[1][7].path_loc[time] # Wheel #7 CG @ time # Cam direction is locked on the wheel wheel_dir = data[1][7].path_dir[time] cam_d = 1.5 camera_pos = wheel_pos + [cam_d*np.sin(wheel_dir[2]), -cam_d*np.cos(wheel_dir[2]), -np.sin(wheel_dir[0]) + 0.2] cam_focal_point = wheel_pos # camera_pos = wheel_pos + [0,-1.6,0.1] elif view == 4: # Top view # NEED TO FIX cam_d = 10 cam_focal_point = [0,0,0] camera_pos = [30,4,60] elif view == 5: # Cool side view test chassis_pos = data[0][0].path_loc[time] # Chassis CG @ time chs2cam = [-7,0,-0.5] camera_pos = chassis_pos + chs2cam # Cam direction is locked on the chassis chassis_dir = data[0][0].path_dir[time] cam_d = 7 cam_focal_point = chassis_pos + [cam_d*np.sin(chassis_dir[2]), -cam_d*np.cos(chassis_dir[2]), -np.sin(chassis_dir[0]) + 0.2] # Place camera and set focal point: camera.SetPosition(camera_pos) camera.SetFocalPoint(cam_focal_point)
2,143
869
""" .. /------------------------------------------------------------------------------\ | -- FACADE TECHNOLOGIES INC. CONFIDENTIAL -- | |------------------------------------------------------------------------------| | | | Copyright [2019] Facade Technologies Inc. | | All Rights Reserved. | | | | NOTICE: All information contained herein is, and remains the property of | | Facade Technologies Inc. and its suppliers if any. The intellectual and | | and technical concepts contained herein are proprietary to Facade | | Technologies Inc. and its suppliers and may be covered by U.S. and Foreign | | Patents, patents in process, and are protected by trade secret or copyright | | law. Dissemination of this information or reproduction of this material is | | strictly forbidden unless prior written permission is obtained from Facade | | Technologies Inc. | | | \------------------------------------------------------------------------------/ This file contains the Compiler class - the part of Facile that interprets a user's work in the gui, and converts it into the desired API. """ import os import sys import json from subprocess import check_call, DEVNULL, STDOUT, check_output from shutil import copyfile, rmtree from PySide2.QtCore import QObject, Signal from PySide2.QtWidgets import QApplication import data.statemachine as sm from data.compilationprofile import CompilationProfile from tools.api_compiler.copy_file_manifest import compilation_copy_files from libs.logging import compiler_logger as logger from libs.logging import log_exceptions import libs.env as env from multiprocessing.pool import ThreadPool curPath = os.path.abspath(os.path.join(env.FACILE_DIR, "tools/api_compiler/compiler.py")) dir, filename = os.path.split(curPath) def nongui(fun): """Decorator running the function in non-gui thread while processing the gui events.""" def wrap(*args, **kwargs): pool = ThreadPool(processes=1) a_sync = pool.apply_async(fun, args, kwargs) while not a_sync.ready(): a_sync.wait(0.01) QApplication.processEvents() return a_sync.get() return wrap class Compiler(QObject): stepStarted = Signal(str) stepComplete = Signal() finished = Signal() def __init__(self, compProf: 'CompilationProfile' = None) -> None: """ Initializes the compiler with required information. :return: None """ logger.debug("Instantiating compiler") QObject.__init__(self) self.statem = sm.StateMachine.instance self._compProf = compProf self._name = self.statem._project.getName() self._apiName = self.statem._project.getAPIName() self._backend = self.statem._project.getBackend() self._exeLoc = self.statem._project.getExecutableFile() self._opts = compProf.compResOpts self._apim = self.statem._project.getAPIModel() self._tguim = self.statem._project.getTargetGUIModel() # Save Folders self._saveFolder = os.path.join(compProf.apiFolderDir, self._name + '_API_Files') self._srcFolder = os.path.join(self._saveFolder, self._apiName) self._docFolder = os.path.join(self._srcFolder, 'Documentation') # Make all save folders if they don't exist if not os.path.exists(self._saveFolder): # If the user enters a path that doesn't exist, it is created os.mkdir(self._saveFolder) # TODO: Should notify them of this in compiler dialog if not os.path.exists(self._srcFolder): os.mkdir(self._srcFolder) if not os.path.exists(self._docFolder): os.mkdir(self._docFolder) self._necessaryFiles = ['apicore.pyd'] # THIS IS WHEN OBFUSCATING ALL FILES INDEPENDENTLY # # if sys.executable.endswith('facile.exe'): # self._necessaryFiles = [filepath + 'd' for tmp, filepath in compilation_copy_files] # # # baseapplication is out of place when we make facile into an executable # for filepath in self._necessaryFiles: # if filepath.endswith('baseapplication.pyd'): # self._necessaryFiles.remove(filepath) # self._necessaryFiles.append('baseapplication.pyd') # break # # else: # self._necessaryFiles = [filepath for tmp, filepath in compilation_copy_files] @nongui def _dev_generateAPICore(self): """ Makes the api core file and places it in facile's root directory NOTE: Should only ever be called in a development setting, never by a facile executable. """ msg = 'Generating API core file, this will take a while' logger.info(msg) self.stepStarted.emit(msg) os.chdir(os.path.abspath(os.path.join(env.FACILE_DIR, '..', 'scripts', 'obfuscation'))) exit_code = check_call([sys.executable, "obfuscate_files.py"], stdout=DEVNULL, stderr=STDOUT) if exit_code != 0: logger.critical("File compilation was unsuccessful, which will cause the API not to work.") raise Exception("File compilation was unsuccessful, which will cause the API not to work.") copyfile(os.path.abspath(os.path.join('compiled', 'apicore.pyd')), os.path.join(env.FACILE_DIR, 'apicore.pyd')) rmtree('compiled') os.chdir(dir) logger.info("Finished compiling api core and moving it to facile directory.") self.stepComplete.emit() def generateCustomApp(self) -> None: """ Creates the custom application class/file. :return: None """ msg = "Generating custom application driver" logger.info(msg) self.stepStarted.emit(msg) with open(os.path.join(self._srcFolder, "application.py"), "w+") as f: # TODO: The Facade Tech watermark thing is a little intense when the user needs # to use it for their own purposes and may want to share their generated API online. # Could make a custom tag. I put the original in for the moment though. logger.debug("Reading application-unfilled.py") try: with open(os.path.join(dir, 'application-template.py'), 'r') as g: appStr = g.read() except Exception as e: appStr = 'There was an error generating your API.\n' logger.exception(e) logger.debug("Generating options set") optStr = '{' for opt in self._opts: optStr += str(opt) + ', ' optStr = optStr[:-2] + '}' logger.debug("Generating str of required compIDs") alreadyWritten = [] aps, cas = self._apim.getActionsByType() compIDs = '[' for action in cas: alreadyWritten.append(action.getTargetComponent().getId()) compIDs += str(action.getTargetComponent().getId()) + ', ' # We also want the visibilitybehaviors' triggeractions' components' IDs vbs = self._tguim.getVisibilityBehaviors() for id in vbs: vb = vbs[id] name = vb.methodName triggerAction = vb.getTriggerAction() if name not in alreadyWritten and triggerAction is not None: compIDs += str(triggerAction.getTargetComponent().getId()) + ', ' compIDs = compIDs[:-2] + ']' # remove the final ", " and close bracket logger.debug("Format BaseApp superclass call with necessary info") try: appStr = appStr.format(exeLoc="'" + self._exeLoc + "'", options=optStr, name="'" + self._name + "'", backend="'" + self._backend + "'", reqCompIDs=compIDs) except Exception as e: logger.exception(e) logger.debug("Writing BaseApp") f.write(appStr) logger.debug("Writing methods generated from actions that are used in action pipelines.") alreadyWritten = [] for action in cas: alreadyWritten.append(action.getMethodName()) f.write(action.getMethod()) logger.debug("Writing methods generated from actions that are used by visibility behaviors.") for id in vbs: vb = vbs[id] name = vb.methodName triggerAction = vb.getTriggerAction() if name not in alreadyWritten and triggerAction is not None: f.write(triggerAction.getMethod()) logger.debug("Writing methods generated from action pipelines.") for ap in aps: f.write(ap.getMethod()) logger.info("Finished generating custom application driver.") self.stepComplete.emit() def copyNecessaryFiles(self) -> None: """ Adds all necessary files for compiler to work into created directory :return: None """ self.stepStarted.emit("Copying necessary files") # Only necessary when using multiple files # # make necessary directories before copying files # targetDirs = ['data', 'data/tguim', 'tguiil', 'libs'] # 'data/apim', # for tdir in targetDirs: # tdir = os.path.join(self._srcFolder, tdir) # if not os.path.exists(tdir): # os.mkdir(tdir) for path in self._necessaryFiles: src = os.path.abspath(os.path.join(env.FACILE_SRC_DIR, path)) dest = os.path.abspath(os.path.join(self._srcFolder, path)) logger.info(f"Copying file: {src} -> {dest}") try: copyfile(src, dest) except Exception as e: logger.critical("Unable to copy file.") logger.exception(e) self.stepComplete.emit() def saveTGUIM(self): """ Saves the tguim in the API folder. Saves project as well. :return: None """ msg = "Saving target GUI model" self.stepStarted.emit(msg) logger.info(msg) self.statem._project.save() with open(os.path.join(self._srcFolder, "tguim.json"), "w+") as f: f.write(json.dumps(self._tguim.asDict())) self.stepComplete.emit() def generateSetupFile(self): """ Generates the setup file for installing the API """ # Create setup.py so user can install install API as a package with pip. msg = "Generating setup.py file" self.stepStarted.emit(msg) logger.info(msg) setupTempFile = open(os.path.join(dir, "setup-template.txt"), 'r') setupStr = setupTempFile.read().format(projectName=self.statem._project.getAPIName(), projectVersion='0.1.0') # TODO Add versioning setupTempFile.close() setupFile = open(os.path.join(self._saveFolder, 'setup.py'), 'w') setupFile.write(setupStr) setupFile.close() self.stepComplete.emit() def generateInitFile(self): """ Generates the init file so the package can be installed as an API """ # Create __init__.py so API is a package. msg = "Generating __init__.py file" self.stepStarted.emit(msg) logger.info(msg) with open(os.path.join(dir, "__init__template.txt"), 'r') as initTempFile: targetAppName = self.statem._project.getExecutableFile().split('/')[-1].split('.')[0] # '/app.exe' -> 'app' targetAppName = targetAppName[0].upper() + targetAppName[1:] # 'app' -> 'App' initStr = initTempFile.read().format(targetApplicationName=targetAppName) with open(os.path.join(self._srcFolder, '__init__.py'), 'w') as initFile: initFile.write(initStr) self.stepComplete.emit() def installAPI(self): """ Installs the generated API to PATH """ msg = "Installing as python package" self.stepStarted.emit(msg) logger.info(msg) os.chdir(self._saveFolder) os.system(self._compProf.interpExeDir + " -m pip install . 1>install.log 2>&1") rmtree('setup.py') # Delete setup.py after it's used logger.info("Finished installing python package") self.stepComplete.emit() def copyHelpFiles(self): """ Generates files that give the basic structure and outline of a functional script. Will only write them if they do not yet exist, to avoid overwriting any existing work in the automate.py file. """ msg = "Copying help files" self.stepStarted.emit(msg) logger.info(msg) if not os.path.exists(os.path.join(self._saveFolder, "automate.py")): with open(os.path.join(self._saveFolder, "automate.py"), "w+") as f: with open(os.path.join(dir, 'automate-template.txt'), 'r') as g: autoStr = g.read() targetAppName = self.statem._project.getExecutableFile().split('/')[-1].split('.')[0] targetAppName = targetAppName[0].upper() + targetAppName[1:] # 'app' -> 'App' f.write(autoStr.format(name=self._name, targetapp=targetAppName)) # Remove run script and rewrite every time so that interpreter gets written to it if os.path.exists(os.path.join(self._saveFolder, "run-script.bat")): os.remove(os.path.join(self._saveFolder, "run-script.bat")) with open(os.path.join(self._saveFolder, "run-script.bat"), "w+") as f: with open(os.path.join(dir, "run-script-template.bat"), 'r') as g: rsStr = g.read() f.write(rsStr.format(interpreterLocation=self._compProf.interpExeDir)) self.stepComplete.emit() @nongui def installRequirements(self): """ Installs the necessary requirements to the chosen python interpreter, if they aren't already installed. """ # Get currently installed packages in a list current = check_output([self._compProf.interpExeDir, '-m', 'pip', 'freeze']) installed = [r.decode().split('==')[0] for r in current.split()] # Get necessary packages in a list with open(os.path.join(dir, "api_requirements.txt"), 'r') as f: reqFile = f.read() required = [r.split('==')[0] for r in reqFile.split()] # Check for each package and install the missing ones diff = set(required) - set(installed) for package in diff: msg = "Installing package: " + package self.stepStarted.emit(msg) logger.info(msg) check_call([self._compProf.interpExeDir, '-m', 'pip', 'install', package], stdout=DEVNULL, stderr=STDOUT) self.stepComplete.emit() @log_exceptions(logger=logger) def compileAPI(self): """ Generates the functional API: the final result of compilation. """ logger.info("Compiling API") self.installRequirements() if not sys.executable.endswith('facile.exe'): self._dev_generateAPICore() self.copyNecessaryFiles() self.saveTGUIM() if self._compProf.installApi: self.generateSetupFile() self.generateInitFile() # We want this regardless of installing the api or not self.generateCustomApp() if self._compProf.installApi: self.installAPI() self.copyHelpFiles() if not sys.executable.endswith('facile.exe'): os.remove(os.path.join(env.FACILE_DIR, 'apicore.pyd')) self.finished.emit() logger.info("Finished compiling API")
16,405
4,590