text
stringlengths
0
1.05M
meta
dict
"""Assertions related to logical operations. """ import pydash from .base import Assertion, Comparator, Negate __all__ = ( 'Truthy', 'Falsy', 'Not', 'Predicate', 'All', 'NotAll', 'Any', 'NotAny', ) class Truthy(Assertion): """Asserts that `value` is truthy. Aliases: - ``to_be_truthy`` - ``is_truthy`` .. versionadded:: 0.0.1 """ #: reason = '{0} is not truthy' op = bool to_be_truthy = Truthy is_truthy = Truthy class Falsy(Assertion): """Asserts that `value` is falsy. Aliases: - ``to_be_falsy`` - ``is_falsy`` .. versionadded:: 0.0.1 """ #: reason = '{0} is not falsy' op = pydash.negate(bool) to_be_falsy = Falsy is_falsy = Falsy class Not(Comparator): """Asserts that `comparable` doesn't raise an ``AssertionError``. Can be used to create "opposite" comparators. Examples: >>> from verify import * >>> expect(5, Not(In([1, 2, 3]))) <expect(5)> >>> Not(5, In([1, 2, 3])) <Not()> >>> Not(In([1, 2, 3]))(5) True Aliases: - ``not_`` - ``does_not`` - ``to_fail`` - ``fails`` .. versionadded:: 0.0.1 """ #: reason = ('The negation of {comparable} should not be true ' 'when evaluated with {0}') def compare(self, *args, **opts): try: return not self.comparable(*args, **opts) except AssertionError: return True not_ = Not does_not = Not to_fail = Not fails = Not class Predicate(Comparator): """Asserts that `value` evaluated by the predicate `comparable` is ``True``. Aliases: - ``does`` - ``to_pass`` - ``passes`` .. versionadded:: 0.1.0 .. versionchanged:: 0.6.0 Catch ``AssertionError`` thrown by `comparable` and return ``False`` as comparison value instead. """ #: reason = 'The evaluation of {0} using {comparable} is false' def compare(self, *args, **opts): try: result = self.comparable(*args, **opts) except AssertionError as ex: # Catch AssertionError so that our class will emit it's own error # message when False is returned. result = False if result is None: # Consider predicates that return None to pass. This is done to # support predicates that assert internally but don't have a return # value. result = True return result does = Predicate to_pass = Predicate passes = Predicate class All(Comparator): """Asserts that `value` evaluates as truthy for **all** predicates in `comparable`. Aliases: - ``all_`` - ``does_all`` - ``passes_all`` .. versionadded:: 0.2.0 """ #: reason = '{0} is not true for all {comparable}' @staticmethod def op(value, comparable): """Return whether all results from evaluating `value` in `comparable` predicates return truthy. """ return all(pydash.juxtapose(*comparable)(value)) all_ = All does_all = All passes_all = All class NotAll(Negate, All): """Asserts that `value` evaluates as falsy for **all** predicates in `comparable`. Aliases: - ``to_be_not_all`` - ``does_not_all`` - ``fails_all`` .. versionadded:: 0.5.0 """ #: reason = '{0} is true for all {comparable}' not_all = NotAll does_not_all = NotAll fails_all = NotAll class Any(Comparator): """Asserts that `value` evaluates as truthy for **any** predicates in `comparable`. Aliases: - ``any_`` - ``does_any`` - ``passes_any`` .. versionadded:: 0.2.0 """ #: reason = '{0} is not true for any {comparable}' @staticmethod def op(value, comparable): """Return whether any results from evaluating `value` in `comparable` predicates return truthy. """ return any(pydash.juxtapose(*comparable)(value)) any_ = Any does_any = Any passes_any = Any class NotAny(Negate, Any): """Asserts that `value` evaluates as falsy for **any** predicates in `comparable`. Aliases: - ``not_any`` - ``does_not_any`` - ``fails_any`` .. versionadded:: 0.5.0 """ #: reason = '{0} is true for some {comparable}' not_any = NotAny does_not_any = NotAny fails_any = NotAny
{ "repo_name": "dgilland/verify", "path": "verify/logic.py", "copies": "1", "size": "4500", "license": "mit", "hash": 5598259363816018000, "line_mean": 18.9115044248, "line_max": 79, "alpha_frac": 0.5484444444, "autogenerated": false, "ratio": 3.5183737294761532, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9564806910802943, "avg_score": 0.0004022526146419952, "num_lines": 226 }
"""Assertions related to numbers. """ import operator import pydash from .base import Assertion, Comparator, Negate, NotSet __all__ = ( 'Greater', 'GreaterThan', 'GreaterEqual', 'GreaterOrEqual', 'Less', 'LessThan', 'LessEqual', 'LessOrEqual', 'Between', 'NotBetween', 'Positive', 'Negative', 'Even', 'Odd', 'Monotone', 'Increasing', 'StrictlyIncreasing', 'Decreasing', 'StrictlyDecreasing', ) class Greater(Comparator): """Asserts that `value` is greater than `comparable`. Aliases: - ``GreaterThan`` - ``to_be_greater`` - ``to_be_greater_than`` - ``is_greater`` - ``is_greater_than`` .. versionadded:: 0.0.1 """ #: reason = '{0} is not greater than {comparable}' op = operator.gt GreaterThan = Greater to_be_greater = Greater to_be_greater_than = Greater is_greater = Greater is_greater_than = Greater class GreaterEqual(Comparator): """Asserts that `value` is greater than or equal to `comparable`. Aliases: - ``GreaterThanEqual`` - ``to_be_greater_equal`` - ``to_be_greater_or_equal`` - ``is_greater_equal`` - ``is_greater_or_equal`` .. versionadded:: 0.0.1 """ #: reason = '{0} is not greater than or equal to {comparable}' op = operator.ge GreaterOrEqual = GreaterEqual to_be_greater_equal = GreaterEqual to_be_greater_or_equal = GreaterEqual is_greqter_equal = GreaterEqual is_greater_or_equal = GreaterEqual class Less(Comparator): """Asserts that `value` is less than `comparable`. Aliases: - ``LessThan`` - ``to_be_less`` - ``to_be_less_than`` - ``is_less`` - ``is_less_than`` .. versionadded:: 0.0.1 """ #: reason = '{0} is not less than {comparable}' op = operator.lt LessThan = Less to_be_less = Less to_be_less_than = Less is_less = Less is_less_than = Less class LessEqual(Comparator): """Asserts that `value` is less than or equal to `comparable`. Aliases: - ``LessThanEqual`` - ``to_be_less_equal`` - ``to_be_less_or_equal`` - ``is_less_equal`` - ``is_less_or_equal`` .. versionadded:: 0.0.1 """ #: reason = '{0} is not less than or equal to {comparable}' op = operator.le LessOrEqual = LessEqual to_be_less_equal = LessEqual to_be_less_or_equal = LessEqual is_less_equal = LessEqual is_less_or_equal = LessEqual class Between(Assertion): """Asserts that `value` is between `min` and `max` inclusively. Examples: These will pass: >>> assert Between(5, min=4, max=6) # 4 <= 5 <= 6 >>> assert Between(5, min=5, max=6) # 5 <= 5 <= 6 >>> assert Between(5, max=6) # 5 <= 6 >>> assert Between(5, min=4) # 5 >= 4 This will fail: >>> Between(5, max=4) # 5 <= 4 Traceback (most recent call last): ... AssertionError... Args: value (mixed, optional): Value to compare. Keyword Args: min (int, optional): Minimum value that `value` must be greater than or equal to. max (int, optional): Maximum value that `value` must be less than or equal to. Aliases: - ``to_be_between`` - ``is_between`` .. versionadded:: 0.2.0 .. versionchanged:: 0.4.0 Allow keyword arguments ``min`` and ``max`` to be used in place of positional tuple. .. versionchanged:: 1.0.0 Removed positional tuple argument and only support ``min`` and ``max`` keyword arguments. """ #: reason = '{0} is not between {min} and {max}' def set_options(self, opts): self.min = opts.pop('min', None) self.max = opts.pop('max', None) def compare(self, value): return self.op(value, self.min, self.max) @staticmethod def op(value, min=None, max=None): ge_min = value >= min if min is not None else True le_max = value <= max if max is not None else True return ge_min and le_max to_be_between = Between is_between = Between class NotBetween(Negate, Between): """Asserts that `value` is not between `min` and `max` inclusively. Aliases: - ``to_not_be_between`` - ``is_not_between`` .. versionadded:: 0.5.0 """ #: reason = '{0} is between {min} and {max}' to_not_be_between = NotBetween is_not_between = NotBetween class Positive(Assertion): """Asserts that `value` is a positive number. Aliases: - ``to_be_positive`` - ``is_positive`` .. versionadded:: 0.3.0 """ #: reason = '{0} is not a positive number' op = staticmethod(pydash.is_positive) to_be_positive = Positive is_positive = Positive class Negative(Assertion): """Asserts that `value` is a negative number. Aliases: - ``to_be_negative`` - ``is_negative`` .. versionadded:: 0.3.0 """ #: reason = '{0} is not a negative number' op = staticmethod(pydash.is_negative) to_be_negative = Negative is_negative = Negative class Even(Assertion): """Asserts that `value` is an even number. Aliases: - ``to_be_even`` - ``is_even`` .. versionadded:: 0.3.0 """ #: reason = '{0} is not an even number' op = staticmethod(pydash.is_even) to_be_even = Even is_even = Even class Odd(Assertion): """Asserts that `value` is an odd number. Aliases: - ``to_be_odd`` - ``is_odd`` .. versionadded:: 0.3.0 """ #: reason = '{0} is not an odd number' op = staticmethod(pydash.is_odd) to_be_odd = Odd is_odd = Odd class Monotone(Comparator): """Asserts that `value` is a monotonic with respect to `comparable`. Aliases: - ``to_be_monotone`` - ``is_monotone`` .. versionadded:: 0.3.0 """ #: reason = '{0} is not monotonic as evaluated by {comparable}' op = staticmethod(pydash.is_monotone) to_be_monotone = Monotone is_monotone = Monotone class Increasing(Assertion): """Asserts that `value` is monotonically increasing. Aliases: - ``to_be_increasing`` - ``is_increasing`` .. versionadded:: 0.3.0 """ #: reason = '{0} is not monotonically increasing' op = staticmethod(pydash.is_increasing) to_be_increasing = Increasing is_increasing = Increasing class StrictlyIncreasing(Assertion): """Asserts that `value` is strictly increasing. Aliases: - ``to_be_strictly_increasing`` - ``is_strictly_increasing`` .. versionadded:: 0.3.0 """ #: reason = '{0} is not strictly increasing' op = staticmethod(pydash.is_strictly_increasing) to_be_strictly_increasing = StrictlyIncreasing is_strictly_increasing = StrictlyIncreasing class Decreasing(Assertion): """Asserts that `value` is monotonically decreasing. Aliases: - ``to_be_decreasing`` - ``is_decreasing`` .. versionadded:: 0.3.0 """ #: reason = '{0} is not monotonically decreasing' op = staticmethod(pydash.is_decreasing) to_be_decreasing = Decreasing is_decreasing = Decreasing class StrictlyDecreasing(Assertion): """Asserts that `value` is strictly decreasing. Aliases: - ``to_be_strictly_decreasing`` - ``is_strictly_decreasing`` .. versionadded:: 0.3.0 """ #: reason = '{0} is not strictly decreasing' op = staticmethod(pydash.is_strictly_decreasing) to_be_strictly_decreasing = StrictlyDecreasing is_strictly_decreasing = StrictlyDecreasing
{ "repo_name": "dgilland/verify", "path": "verify/numbers.py", "copies": "1", "size": "7662", "license": "mit", "hash": -8531949127484391000, "line_mean": 19.7642276423, "line_max": 79, "alpha_frac": 0.5894022448, "autogenerated": false, "ratio": 3.341474051460968, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9430876296260968, "avg_score": 0, "num_lines": 369 }
assert not __debug__ # Run with -OO from panda3d.core import * from collections import OrderedDict import subprocess, glob, sys, os import niraimarshal import aes SOURCE_ROOT = os.path.dirname(os.path.abspath(__file__)) NIRAI_ROOT = os.path.abspath(os.path.join(SOURCE_ROOT, '..')) PYTHON_ROOT = os.path.join(NIRAI_ROOT, 'python') PANDA3D_ROOT = os.path.join(NIRAI_ROOT, 'panda3d') THIRDPARTY_ROOT = os.path.join(PANDA3D_ROOT, 'thirdparty') class NiraiCompilerBase: def __init__(self, output, outputdir='built', includedirs=set(), libs=set(), libpath=set()): self.output = output self.outputdir = outputdir self.includedirs = includedirs.copy() self.includedirs.add(os.path.join(PANDA3D_ROOT, 'built', 'include')) self.includedirs.add(os.path.join(PYTHON_ROOT, 'Include')) self.includedirs.add(SOURCE_ROOT) self.libs = libs.copy() self.libpath = libpath.copy() self.builtLibs = os.path.join(NIRAI_ROOT, 'panda3d', 'built', 'lib') self.libpath.add(self.builtLibs) self.libpath.add(os.path.join(NIRAI_ROOT, 'python')) self.sources = set() self._built = set() def add_source(self, filename): self.sources.add(filename) def add_library(self, lib, thirdparty=False): if thirdparty: root = os.path.normpath(lib).split(os.sep)[0] self.includedirs.add(os.path.join(self.thirdpartydir, root, 'include')) lib = os.path.join(self.thirdpartydir, lib) self.libs.add(lib) def add_nirai_files(self): for filename in ('aes.cxx', 'main.cxx'): self.add_source(os.path.join(SOURCE_ROOT, filename)) self.add_library('pythonembed') def _run_command(self, cmd): p = subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr, shell=True) v = p.wait() if v != 0: print 'The following command returned non-zero value (%d): %s' % (v, cmd[:100] + '...') sys.exit(1) def run(self): print 'Compiling CXX codes...' for filename in self.sources: self.compile(filename) print 'Linking...' self.link() class NiraiCompilerWindows(NiraiCompilerBase): def add_nirai_files(self): NiraiCompilerBase.add_nirai_files(self) self.thirdpartydir = os.path.join(THIRDPARTY_ROOT, 'win-libs-vc10') self.libs |= set(glob.glob(os.path.join(self.builtLibs, '*.lib'))) self.add_library('ws2_32') self.add_library('shell32') self.add_library('advapi32') self.add_library('gdi32') self.add_library('user32') self.add_library('oleaut32') self.add_library('ole32') self.add_library('iphlpapi') self.add_library('shell32') self.add_library('wsock32') self.add_library('opengl32') self.add_library('imm32') self.add_library('crypt32') self.add_library('fftw\\lib\\fftw', thirdparty=True) self.add_library('fftw\\lib\\rfftw', thirdparty=True) self.add_library('freetype\\lib\\freetype', thirdparty=True) self.add_library('jpeg\\lib\\jpeg-static', thirdparty=True) self.add_library('nvidiacg\\lib\\cgGL', thirdparty=True) self.add_library('nvidiacg\\lib\\cgD3D9', thirdparty=True) self.add_library('nvidiacg\\lib\\cg', thirdparty=True) self.add_library('ode\\lib\\ode_single', thirdparty=True) self.add_library('openal\\lib\\OpenAL32', thirdparty=True) self.add_library('openssl\\lib\\libpandaeay', thirdparty=True) self.add_library('openssl\\lib\\libpandassl', thirdparty=True) self.add_library('png\\lib\\libpng_static', thirdparty=True) self.add_library('squish\\lib\\squish', thirdparty=True) self.add_library('tiff\\lib\\libtiff', thirdparty=True) self.add_library('zlib\\lib\\zlibstatic', thirdparty=True) self.add_library('vorbis\\lib\\libogg_static', thirdparty=True) self.add_library('vorbis\\lib\\libvorbis_static', thirdparty=True) self.add_library('vorbis\\lib\\libvorbisfile_static', thirdparty=True) def compile(self, filename): out = '%s/%s.obj' % (self.outputdir, os.path.basename(filename).rsplit('.', 1)[0]) cmd = 'cl /c /GF /MP4 /DPy_BUILD_CORE /DNTDDI_VERSION=0x0501 /wd4996 /wd4275 /wd4267 /wd4101 /wd4273 /nologo /EHsc /MD /Zi /O2' for ic in self.includedirs: cmd += ' /I"%s"' % ic cmd += ' /Fo%s "%s"' % (out, filename) self._run_command(cmd) self._built.add(out) def link(self): cmd = 'link /LTCG /LTCG:STATUS /nologo /out:%s/%s' % (self.outputdir, self.output) for obj in self._built: cmd += ' "%s"' % obj for lib in self.libs: if not lib.endswith('.lib'): lib += '.lib' cmd += ' "%s"' % lib for path in self.libpath: cmd += ' /LIBPATH:"%s"' % path cmd += ' /RELEASE /nodefaultlib:python27.lib /nodefaultlib:libcmt /ignore:4049 /ignore:4006 /ignore:4221' self._run_command(cmd) class NiraiCompilerDarwin(NiraiCompilerBase): def __init__(self, *args, **kwargs): self.frameworks = kwargs.pop('frameworks', set()).copy() self.frameworkDirs = kwargs.pop('frameworkDirs', set()).copy() NiraiCompilerBase.__init__(self, *args, **kwargs) def add_library(self, lib, thirdparty=False): if thirdparty: root = os.path.normpath(lib).split(os.sep)[0] self.includedirs.add(os.path.join(self.thirdpartydir, root, 'include')) lib = os.path.join(self.thirdpartydir, lib) self.libpath.add(os.path.join(self.thirdpartydir, root, 'lib')) self.libs.add(lib) def add_nirai_files(self): NiraiCompilerBase.add_nirai_files(self) self.thirdpartydir = os.path.join(THIRDPARTY_ROOT, 'darwin-libs-a') self.libpath.add(self.builtLibs) self.libs |= set(glob.glob(os.path.join(self.builtLibs, '*.a'))) self.add_library('crypto') self.add_library('z') self.add_library('ssl') self.add_library('freetype/lib/freetype', thirdparty=True) self.add_library('jpeg/lib/jpeg', thirdparty=True) self.add_library('png/lib/png', thirdparty=True) self.add_library('ode/lib/ode', thirdparty=True) self.add_library('squish/lib/squish', thirdparty=True) self.add_library('tiff/lib/pandatiff', thirdparty=True) self.add_library('tiff/lib/pandatiffxx', thirdparty=True) self.add_library('vorbis/lib/ogg', thirdparty=True) self.add_library('vorbis/lib/vorbis', thirdparty=True) self.add_library('vorbis/lib/vorbisenc', thirdparty=True) self.add_library('vorbis/lib/vorbisfile', thirdparty=True) self.frameworkDirs.add(os.path.join(PANDA3D_ROOT, 'built', 'Frameworks')) self.frameworks.add('AppKit') self.frameworks.add('OpenAL') self.frameworks.add('OpenGL') self.frameworks.add('Cg') self.frameworks.add('AGL') self.frameworks.add('Carbon') self.frameworks.add('Cocoa') def compile(self, filename): print filename out = '%s/%s.o' % (self.outputdir, os.path.basename(filename).rsplit('.', 1)[0]) cmd = 'g++ -c -DPy_BUILD_CORE -ftemplate-depth-70 -fPIC -O2 -Wno-deprecated-declarations -pthread' for ic in self.includedirs: cmd += ' -I"%s"' % ic cmd += ' -o "%s" "%s"' % (out, filename) self._run_command(cmd) self._built.add(out) def link(self): cmd = 'g++ -o %s/%s' % (self.outputdir, self.output) for path in self.libpath: cmd += ' -L"%s"' % path for path in self.frameworkDirs: cmd += ' -F"%s"' % path for framework in self.frameworks: cmd += ' -framework %s' % framework for lib in self.libs: lib = os.path.basename(lib) if lib.startswith('lib'): lib = lib[3:] if lib.endswith('.a'): lib = lib[:-2] cmd += ' -l%s' % lib for obj in self._built: cmd += ' "%s"' % obj cmd += ' -dylib_file /System/Library/Frameworks/OpenGL.framework/Versions/A/Libraries/libGL.dylib' cmd += ':/System/Library/Frameworks/OpenGL.framework/Versions/A/Libraries/libGL.dylib' self._run_command(cmd) class NiraiPackager: HEADER = 'NRI\n' def __init__(self, outfile): self.modules = OrderedDict() self.outfile = outfile def __read_file(self, filename, mangler=None): with open(filename, 'rb') as f: data = f.read() base = filename.rsplit('.', 1)[0].replace('\\', '/').replace('/', '.') pkg = base.endswith('.__init__') moduleName = base.rsplit('.', 1)[0] if pkg else base name = moduleName if mangler is not None: name = mangler(name) if not name: return '', ('', 0) try: data = self.compile_module(name, data) except: print 'WARNING: Failed to compile', filename return '', ('', 0) size = len(data) * (-1 if pkg else 1) return name, (data, size) def compile_module(self, name, data): return niraimarshal.dumps(compile(data, name, 'exec')) def add_module(self, moduleName, data, size=None, compile=False, negSize=False): if compile: data = self.compile_module(moduleName, data) if size is None: size = len(data) if negSize: size = -size self.modules[moduleName] = (data, size) def add_file(self, filename, mangler=None): print 'Adding file', filename moduleName, (data, size) = self.__read_file(filename, mangler) if moduleName: moduleName = os.path.basename(filename).rsplit('.', 1)[0] self.add_module(moduleName, data, size) def add_directory(self, dir, mangler=None): print 'Adding directory', dir def _recurse_dir(dir): for f in os.listdir(dir): f = os.path.join(dir, f) if os.path.isdir(f): _recurse_dir(f) elif f.endswith('py'): moduleName, (data, size) = self.__read_file(f, mangler) if moduleName: self.add_module(moduleName, data, size) _recurse_dir(dir) def get_mangle_base(self, *path): return len(os.path.join(*path).rsplit('.', 1)[0].replace('\\', '/').replace('/', '.')) + 1 def add_panda3d_dirs(self): manglebase = self.get_mangle_base(NIRAI_ROOT, 'panda3d', 'built') def _mangler(name): name = name[manglebase:].strip('.') # Required hack if name == 'direct.extensions_native.extension_native_helpers': name = 'extension_native_helpers' return name self.add_directory(os.path.join(NIRAI_ROOT, 'panda3d', 'built', 'direct'), mangler=_mangler) self.add_directory(os.path.join(NIRAI_ROOT, 'panda3d', 'built', 'pandac'), mangler=_mangler) self.add_directory(os.path.join(NIRAI_ROOT, 'panda3d', 'built', 'panda3d'), mangler=_mangler) def add_default_lib(self): manglebase = self.get_mangle_base(NIRAI_ROOT, 'python', 'Lib') def _mangler(name): name = name[manglebase:] return name.strip('.') self.add_directory(os.path.join(NIRAI_ROOT, 'python', 'Lib'), mangler=_mangler) def write_out(self): f = open(self.outfile, 'wb') f.write(self.HEADER) f.write(self.process_modules()) f.close() def generate_key(self, size=256): return os.urandom(size) def dump_key(self, key): for k in key: print ord(k), print def process_modules(self): # Pure virtual raise NotImplementedError('process_modules') def get_file_contents(self, filename, encrypt=False): with open(filename, 'rb') as f: data = f.read() if encrypt: iv = self.generate_key(16) key = self.generate_key(16) data = iv + key + aes.encrypt(data, key, iv) return data if sys.platform.startswith('win'): NiraiCompiler = NiraiCompilerWindows elif sys.platform == 'darwin': NiraiCompiler = NiraiCompilerDarwin else: class NiraiCompiler: def __init__(self, *args, **kw): raise RuntimeError('Attempted to use NiraiCompiler on unsupported platform: %s' % sys.platform)
{ "repo_name": "sctigercat1/src", "path": "niraitools.py", "copies": "1", "size": "12905", "license": "bsd-3-clause", "hash": -7961749429924829000, "line_mean": 34.0679347826, "line_max": 135, "alpha_frac": 0.580705153, "autogenerated": false, "ratio": 3.399631190727081, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9449497769926148, "avg_score": 0.006167714760186559, "num_lines": 368 }
# assert repeated exports of the same plot produce the same output file import subprocess import sys import tempfile # We create the tikz files in separate subprocesses, as when producing those in # the same process, the order of axis parameters is deterministic. plot_code = """ import sys import numpy as np from matplotlib import pyplot as plt import tikzplotlib t = np.arange(0.0, 2.0, 0.1) s = np.sin(2 * np.pi * t) plt.plot(t, s, label="a") plt.legend() tikzplotlib.save(sys.argv[1]) """ def test(): _, tmp_base = tempfile.mkstemp() # trade-off between test duration and probability of false negative n_tests = 4 tikzs = [] for i in range(n_tests): tikz_file = tmp_base + "_tikz.tex" try: mpltt_out = subprocess.check_output( [sys.executable, "-", tikz_file], input=plot_code.encode(), stderr=subprocess.STDOUT, ) sp = subprocess.Popen( ["python3", "-", tikz_file], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, ) (mpltt_out, _) = sp.communicate(plot_code.encode()) except subprocess.CalledProcessError as e: print("Command output:") print("=" * 70) print(e.output) print("=" * 70) raise with open(tikz_file) as f: tikzs.append(f.read()) for t in tikzs[1:]: assert t == tikzs[0]
{ "repo_name": "m-rossi/matplotlib2tikz", "path": "test/test_deterministic_output.py", "copies": "1", "size": "1525", "license": "mit", "hash": 7901895924169194000, "line_mean": 28.3269230769, "line_max": 79, "alpha_frac": 0.5718032787, "autogenerated": false, "ratio": 3.7195121951219514, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4791315473821951, "avg_score": null, "num_lines": null }
import re import stat from nose.tools import ( assert_equal, assert_not_equal, assert_raises, raises, assert_true, assert_false ) from mock import patch __all__ = ['assert_instance_of', 'assert_not_instance_of', 'assert_none', 'assert_not_none', 'assert_match', 'assert_not_match', 'assert_mode_644', 'assert_mode_755', 'assert_equal', 'assert_not_equal', 'assert_raises', 'patch', 'raises', 'assert_true', 'assert_false'] def assert_instance_of(expected, actual, msg=None): """Verify that object is an instance of expected """ assert isinstance(actual, expected), msg def assert_not_instance_of(expected, actual, msg=None): """Verify that object is not an instance of expected """ assert not isinstance(actual, expected, msg) def assert_none(actual, msg=None): """verify that item is None""" assert actual is None, msg def assert_not_none(actual, msg=None): """verify that item is None""" assert actual is not None, msg def assert_match(pattern, string, msg=None): """verify that the pattern matches the string""" assert_not_none(re.search(pattern, string), msg) def assert_not_match(pattern, string, msg=None): """verify that the pattern does not match the string""" assert_none(re.search(pattern, string), msg) def assert_mode_644(mode): """Verify given mode is 644""" assert (mode & stat.S_IROTH) and (mode & stat.S_IRGRP) assert (mode & stat.S_IWUSR) and (mode & stat.S_IRUSR) and not (mode & stat.S_IXUSR) def assert_mode_755(mode): """Verify given mode is 755""" assert (mode & stat.S_IROTH) and (mode & stat.S_IRGRP) and (mode & stat.S_IXOTH) and (mode & stat.S_IXGRP) assert (mode & stat.S_IWUSR) and (mode & stat.S_IRUSR) and (mode & stat.S_IXUSR)
{ "repo_name": "StyXman/GitPython", "path": "git/test/lib/asserts.py", "copies": "14", "size": "2048", "license": "bsd-3-clause", "hash": 5544488482593953000, "line_mean": 29.1176470588, "line_max": 110, "alpha_frac": 0.6625976562, "autogenerated": false, "ratio": 3.2979066022544283, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": null, "num_lines": null }
import re import stat from nose.tools import ( assert_equal, # @UnusedImport assert_not_equal, # @UnusedImport assert_raises, # @UnusedImport raises, # @UnusedImport assert_true, # @UnusedImport assert_false # @UnusedImport ) try: from unittest.mock import patch except ImportError: from mock import patch # @NoMove @UnusedImport __all__ = ['assert_instance_of', 'assert_not_instance_of', 'assert_none', 'assert_not_none', 'assert_match', 'assert_not_match', 'assert_mode_644', 'assert_mode_755', 'assert_equal', 'assert_not_equal', 'assert_raises', 'patch', 'raises', 'assert_true', 'assert_false'] def assert_instance_of(expected, actual, msg=None): """Verify that object is an instance of expected """ assert isinstance(actual, expected), msg def assert_not_instance_of(expected, actual, msg=None): """Verify that object is not an instance of expected """ assert not isinstance(actual, expected, msg) def assert_none(actual, msg=None): """verify that item is None""" assert actual is None, msg def assert_not_none(actual, msg=None): """verify that item is None""" assert actual is not None, msg def assert_match(pattern, string, msg=None): """verify that the pattern matches the string""" assert_not_none(re.search(pattern, string), msg) def assert_not_match(pattern, string, msg=None): """verify that the pattern does not match the string""" assert_none(re.search(pattern, string), msg) def assert_mode_644(mode): """Verify given mode is 644""" assert (mode & stat.S_IROTH) and (mode & stat.S_IRGRP) assert (mode & stat.S_IWUSR) and (mode & stat.S_IRUSR) and not (mode & stat.S_IXUSR) def assert_mode_755(mode): """Verify given mode is 755""" assert (mode & stat.S_IROTH) and (mode & stat.S_IRGRP) and (mode & stat.S_IXOTH) and (mode & stat.S_IXGRP) assert (mode & stat.S_IWUSR) and (mode & stat.S_IRUSR) and (mode & stat.S_IXUSR)
{ "repo_name": "jeblair/GitPython", "path": "git/test/lib/asserts.py", "copies": "6", "size": "2273", "license": "bsd-3-clause", "hash": -7670994152401468000, "line_mean": 31.014084507, "line_max": 110, "alpha_frac": 0.6572811263, "autogenerated": false, "ratio": 3.37741456166419, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.703469568796419, "avg_score": null, "num_lines": null }
import re import unittest from nose import tools from nose.tools import * __all__ = ['assert_instance_of', 'assert_not_instance_of', 'assert_none', 'assert_not_none', 'assert_match', 'assert_not_match'] + tools.__all__ def assert_instance_of(expected, actual, msg=None): """Verify that object is an instance of expected """ assert isinstance(actual, expected), msg def assert_not_instance_of(expected, actual, msg=None): """Verify that object is not an instance of expected """ assert not isinstance(actual, expected, msg) def assert_none(actual, msg=None): """verify that item is None""" assert_equal(None, actual, msg) def assert_not_none(actual, msg=None): """verify that item is None""" assert_not_equal(None, actual, msg) def assert_match(pattern, string, msg=None): """verify that the pattern matches the string""" assert_not_none(re.search(pattern, string), msg) def assert_not_match(pattern, string, msg=None): """verify that the pattern does not match the string""" assert_none(re.search(pattern, string), msg)
{ "repo_name": "directeur/git-python", "path": "test/testlib/asserts.py", "copies": "1", "size": "1315", "license": "bsd-3-clause", "hash": 5967770841612742000, "line_mean": 33.6315789474, "line_max": 70, "alpha_frac": 0.6942965779, "autogenerated": false, "ratio": 3.573369565217391, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4767666143117391, "avg_score": null, "num_lines": null }
"""Asserts to use in energy tests.""" from physalia.models import Measurement def consumption_below(sample, energy_consumption_baseline): """Test for energy consumption lower than a given value in Joules (avg). Args: sample (list of Measurement): sample of measurements energy_consumption (number): baseline energy consumption in Joules. """ energy_consumption_mean = Measurement.mean_energy_consumption(sample) assert energy_consumption_mean < energy_consumption_baseline def consumption_lower_than_app(sample, app, use_case=None): """Test that a given sample spends less energy than a known app. Args: sample (list of Measurement): sample of measurements app (string): identifier/package of the app to be compared use_case (string): select only data from a given use case """ baseline_measurements = Measurement.get_all_entries_of_app(app, use_case) baseline_consumption = Measurement.mean_energy_consumption( baseline_measurements ) consumption_below(sample, baseline_consumption) def top_percentile(sample, nth): """Test that a given sample is in the top nth percentile. Args: sample (list of Measurement): sample of measurements nth (number): percentage of the position in which the sample should fit app (string): identifier of the application within the sample should be compared use_case (string: identifier of the use case used to create the ranking """ position, total = Measurement.get_position_in_ranking(sample) percentile_position = float(position)/total*100 assert percentile_position <= nth,\ ("Given sample is not on {:.1f}% top percentile " "(Position: {:.1f}%)".format( nth, percentile_position ))
{ "repo_name": "TQRG/physalia", "path": "physalia/asserts.py", "copies": "1", "size": "1844", "license": "mit", "hash": 7109295954098101000, "line_mean": 39.0869565217, "line_max": 88, "alpha_frac": 0.6849240781, "autogenerated": false, "ratio": 4.288372093023256, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5473296171123255, "avg_score": null, "num_lines": null }
"""Assert that arguments meet specific criteria This module organizes functions that assert whether a criteria for their arguments is met. All functions within this module raise one or more errors on certain conditions not being bet. If the assertion is met, the functions will return True """ import shutil import os from inspect import isclass def has_file_extension(filepath, ext_required): '''Assert that a filepath has the required file extension :param filepath: string filepath presumably containing a file extension :param ext_required: the expected file extension examples: ".pdf", ".html", ".tex" ''' ext = os.path.splitext(filepath)[-1] if ext != ext_required: msg_tmpl = "The extension for {}, which is {}, does not equal {}" msg_format = msg_tmpl.format(filepath, ext, ext_required) raise ValueError(msg_format) return True def is_binary(system_binary_str): '''Assert that a string represents a system binary Return true if a string represents a system binary Raise TypeError if the system_binary_str is not a string Raise ValueError if the system_binary_str is not a system binary :param system_binary_str: STR string representing a system binary ''' if not isinstance(system_binary_str, str): raise TypeError("{} must be of type STR".format(system_binary_str)) binary_str = shutil.which(system_binary_str) if not binary_str: msg = "{} is not valid system binary".format(system_binary_str) raise ValueError(msg) return True def list_is_type(ls, t): '''Assert that a list contains only elements of type t Return True if list contains elements of type t Raise TypeError if t is not a class Raise TypeError if ls is not a list Raise TypeError if ls contains non-t elements :param ls: LIST :param t: python class ''' if not isclass(t): raise TypeError("{} is not a class".format(t)) elif not isinstance(ls, list): raise TypeError("{} is not a list".format(ls)) else: ls_bad_types = [i for i in ls if not isinstance(i, t)] if len(ls_bad_types) > 0: raise TypeError("{} are not {}".format(ls_bad_types, t)) return True
{ "repo_name": "pappasam/latexbuild", "path": "latexbuild/assertions.py", "copies": "1", "size": "2253", "license": "mit", "hash": -1703761920197350400, "line_mean": 34.7619047619, "line_max": 75, "alpha_frac": 0.6813138038, "autogenerated": false, "ratio": 4.141544117647059, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.001432461873638344, "num_lines": 63 }
# Assess graph connectivity in terms of a threshold distance (edge # weight) or sequence of these, to look at how a graph connects # as the threshold distance is systematically increased. # # Contains 4 functions, as they would be used in sequence, # along with some helper functions. # # THIS VERSION IS FOR NETWORKX V 0.99 AND ABOVE __version__ = 1.1 def edge_threshold(G, max_wt): import networkx as nx """ Accepts a (dense) weighted graph (XGraph) and a threshold weight, returnsa new graph with only edges for which the weight is less than the threshold. The weights are general but this has been designed for (and tested with) distances. Usage: if G is a XGraph with edges < 5000 m, >>> G2 = edge_threshold(G, 3000) returns a new graph with edges < 3000 m. DL Urban (22 Feb 2007) Added NetworkX version checking - 26 May 2009; JP Fay """ tG = nx.Graph() G_edges = G.edges(data=True) nbunch = G.nodes() tG.add_nodes_from(nbunch) # copy the nodes for edge in G_edges: (tn, fn, w) = edge if w <= max_wt: tG.add_edge(tn, fn, w) return tG def edge_threshold_sequence(G, min_wt, max_wt, winc): import networkx as nx """ Accepts a (dense) graph and systematically redefine its edges by edge-thresholding it in a loop of calls to edge_threshold (above), the loop provided by a min, max, and increment. Note (below) that the increment is added to the max_wt to make sure max_wt is included in the range (this is because of the way python does loops). Returns a dictionary of graphs keyed by the threshold weights. Usage: if G is a dense XGraph with edge weights <= 10000 m, >>> Gts = edge_threshold_sequence(G,1000,10000,1000) returns a dictionary of of 10 new graphs keyed by the numbers 1000-10000. To grab one: >>> G4000 = Gts[4000] DL Urban (22 Feb 2007) Added NetworkX version checking - 26 May 2009; JP Fay """ Gts = {} nbunch = G.nodes() edges = G.edges(data=True) for wt in range(min_wt, max_wt+winc, winc): tGw = nx.Graph() tGw.add_nodes_from(nbunch) for e in edges: (n1, n2, w) = e if w['weight'] <= wt: tGw.add_edge(n1, n2, w) Gts[wt] = tGw return Gts # Assess a dictionary of graphs keyed by dispersal distance # threshold, in terms of number of components and diameter. def graph_comp_sequence(Gts): import networkx as nx """ Gts is a graph thresholding sequence, a dictionary of graphs keyed by threshold distance, see edge_threshold_sequence(). This function takes that sequence and returns the number of components in each graph, along with the diameter of the largest component in each graph. The output is a dictionary of tuples (NC, D(G)) keyed by threshold distance. Requires: x_diameter(G), local function. Usage: The output is intended to be printed to a file (see write_table.txt for syntax), so that a plot can be constructed that illustrates the number of components and graph diameter as a function of distance. DL Urban (22 Feb 2007) """ seq = Gts.keys() gcs = {} for d in seq: g = Gts[d] if nx.is_connected(g): nc = 1 diam = x_diameter(g) else: nc = nx.number_connected_components(g) # the largest connected component, #0 in the list: gc = nx.connected_component_subgraphs(g)[0] diam = x_diameter(gc) gcs[d] = (nc, diam) return gcs # Write these out to a file: def write_graph_comp_sequence(gcs, path): """ Accept a graph component sequence from edge-thresholding, and write the output as a table to a file. Usage: >>> Gts = edge_threshold_sequence(G, min, max, inc), >>> gcs = graph_conn_sequence(Gts) >>> write_graph_conn_sequence(gcs, path) DL Urban (22 Feb 2007) """ f = open(path, 'w') f.write('%s\n' % 'Distance, NComps, Diameter') for k,v in gcs.iteritems(): (nc, diam) = v f.write('%4d, %5d, %10.3f\n' % (k, nc, diam)) f.close() # x_eccentricity and x_diameter correspond to the NX functions # but use weighted edges instead of tallying the number of links. def x_diameter(G, e=None): """Return the diameter of the graph G. The diameter is the maximum of all pairs shortest path. This version calls x_eccentricity (above). """ if e is None: e=x_eccentricity(G,with_labels=True) return max(e.values()) def x_eccentricity(G, v=None, sp=None, with_labels=False): import networkx as nx """ Return the eccentricity of node v in G (or all nodes if v is None). The eccentricity is the maximum of shortest paths to all other nodes. This X version is the same as the original eccentricity and related functions, but replaces the call to the single_source functions with calls to the corresponding Diijkstra functions. Note the native functions are for unweighted graphs, while the Dijkstra functions are for weighted graphs. Even so, the edge weights should be non-negative and not floating point. (copied and altered by DL Urban, Feb 2007) The optional keyword sp must be a dict of dicts of shortest_path_length keyed by source and target. That is, sp[v][t] is the length from v to t. If with_labels=True return dict of eccentricities keyed by vertex. """ nodes=[] if v is None: # none, use entire graph nodes=G.nodes() elif isinstance(v, list): # check for a list nodes=v else: # assume it is a single value nodes=[v] e={} for v in nodes: if sp is None: length=nx.single_source_dijkstra_path_length(G,v) else: length=sp[v] try: assert len(length)==G.number_of_nodes() except: raise nx.NetworkXError,\ "Graph not connected: infinite path length" e[v]=max(length.values()) if with_labels: return e else: if len(e)==1: return e.values()[0] # return single value return e.values() # Sensi_diameter computes the change in graph diameter # on the removal of each node--a way to find cut-nodes # that are also central to the graph. # Its helper function (following) writes the output. def sensi_diameter(G): import networkx as nx """ Compute graph sensitivity to node removal, in terms of the difference in graph diameter on the removal of each node in turn. This uses local function x_diameter(G), which is modified from networkx.diamter(G) to work on XGraphs. DL Urban (9 Feb 2007) """ # Starting diameter for full graph: if nx.is_connected(G): d0 = x_diameter(G) else: G0 = nx.connected_component_subgraphs(G) [0] # the largest subgraph d0 = x_diameter(G0) nc = nx.number_connected_components(G) # how many are there? sensi = {} for node in G.nodes(): ex = G.edges(node) # a set of edges adjacent to node; G.delete_edges_from(ex) # remove all of these, G.delete_node(node) # and then kill the node, too if nx.is_connected(G): dx = x_diameter(G) cuts = 0 else: Gx = nx.connected_component_subgraphs(G) [0] # the biggest ncx = nx.number_connected_components(G) if nc == ncx: cuts = 0 else: cuts = 1 dx = x_diameter(Gx) delta = d0 - dx G.add_node(node) # put the node and edges back again G.add_edges_from(ex) sensi[node] = (cuts, delta) # create and return a tuple (cuts, delta) return sensi # Write this output to a CSV file: def write_sensi_diameter(sensi, path): f = open(path, 'w') f.write('Node, Cuts, deltaD\n') for k,v in sensi.iteritems(): (cuts, delta) = v f.write('%4d, %3d, %10.2f\n' % (k, cuts, delta)) f.close()
{ "repo_name": "Duke-NSOE/GeoHAT", "path": "GeoHat_V10/Scripts/DU_GraphTools99.py", "copies": "1", "size": "8152", "license": "cc0-1.0", "hash": 6811382821546239000, "line_mean": 29.6466165414, "line_max": 75, "alpha_frac": 0.6225466143, "autogenerated": false, "ratio": 3.4852501068832833, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4607796721183283, "avg_score": null, "num_lines": null }
"""Assessment Engine API view functions""" from datetime import datetime from flask import ( Blueprint, abort, current_app, flash, jsonify, redirect, request, session, url_for, ) from flask_babel import gettext as _ from flask_user import roles_required import jsonschema import requests from ..audit import auditable_event from ..database import db from ..date_tools import FHIR_datetime from ..extensions import oauth from ..models.client import validate_origin from ..models.encounter import EC from ..models.fhir import bundle_results from ..models.identifier import Identifier from ..models.intervention import INTERVENTION from ..models.qb_timeline import invalidate_users_QBT from ..models.questionnaire import Questionnaire from ..models.questionnaire_response import ( NoFutureDates, QuestionnaireResponse, ) from ..models.role import ROLE from ..models.user import User, current_user, get_user_or_abort from ..trace import dump_trace, establish_trace from ..type_tools import check_int from .crossdomain import crossdomain assessment_engine_api = Blueprint('assessment_engine_api', __name__) @assessment_engine_api.route( '/api/patient/<int:patient_id>/assessment', defaults={'instrument_id': None}, ) @assessment_engine_api.route( '/api/patient/<int:patient_id>/assessment/<string:instrument_id>' ) @crossdomain() @oauth.require_oauth() def assessment(patient_id, instrument_id): """Return a patient's responses to questionnaire(s) Retrieve a minimal FHIR doc in JSON format including the 'QuestionnaireResponse' resource type. If 'instrument_id' is excluded, the patient's QuestionnaireResponses for all instruments are returned. --- operationId: getQuestionnaireResponse tags: - Assessment Engine produces: - application/json parameters: - name: patient_id in: path description: TrueNTH patient ID required: true type: integer format: int64 - name: instrument_id in: path description: ID of the instrument, eg "epic26", "eq5d" required: true type: string enum: - epic26 - eq5d - name: patch_dstu2 in: query description: whether or not to make bundles DTSU2 compliant required: false type: boolean default: false responses: 200: description: successful operation schema: id: assessment_bundle required: - type properties: type: description: Indicates the purpose of this bundle- how it was intended to be used. type: string enum: - document - message - transaction - transaction-response - batch - batch-response - history - searchset - collection link: description: A series of links that provide context to this bundle. items: properties: relation: description: A name which details the functional use for this link - see [[http://www.iana.org/assignments/link-relations/link-relations.xhtml]]. url: description: The reference details for the link. total: description: If a set of search matches, this is the total number of matches for the search (as opposed to the number of results in this bundle). type: integer entry: type: array items: $ref: "#/definitions/QuestionnaireResponse" example: entry: - resourceType: QuestionnaireResponse authored: '2016-01-22T20:32:17Z' status: completed identifier: value: '101.0' use: official label: cPRO survey session ID subject: display: patient demographics reference: https://stg.us.truenth.org/api/demographics/10015 author: display: patient demographics reference: https://stg.us.truenth.org/api/demographics/10015 source: display: patient demographics reference: https://stg.us.truenth.org/api/demographics/10015 group: question: - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.1.5 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 5 linkId: epic26.1 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.2.4 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 4 linkId: epic26.2 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.3.2 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 1 linkId: epic26.3 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.4.3 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 2 linkId: epic26.4 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.5.1 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 0 linkId: epic26.5 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.6.2 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 1 linkId: epic26.6 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.7.3 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 2 linkId: epic26.7 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.8.4 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 3 linkId: epic26.8 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.9.2 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 2 linkId: epic26.9 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.10.1 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 0 linkId: epic26.10 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.11.2 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 1 linkId: epic26.11 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.12.3 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 2 linkId: epic26.12 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.13.4 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 3 linkId: epic26.13 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.14.5 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 4 linkId: epic26.14 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.15.4 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 4 linkId: epic26.15 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.16.2 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 2 linkId: epic26.16 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.17.1 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 1 linkId: epic26.17 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.18.1 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 1 linkId: epic26.18 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.19.3 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 3 linkId: epic26.19 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.20.2 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 2 linkId: epic26.20 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.21.4 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 4 linkId: epic26.21 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.22.1 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 0 linkId: epic26.22 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.23.2 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 1 linkId: epic26.23 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.24.3 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 2 linkId: epic26.24 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.25.3 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 2 linkId: epic26.25 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.26.3 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 2 linkId: epic26.26 questionnaire: display: EPIC 26 Short Form reference: https://stg.us.truenth.org/api/questionnaires/epic26 - resourceType: QuestionnaireResponse authored: '2016-03-11T23:47:28Z' status: completed identifier: value: '119.0' use: official label: cPRO survey session ID subject: display: patient demographics reference: https://stg.us.truenth.org/api/demographics/10015 author: display: patient demographics reference: https://stg.us.truenth.org/api/demographics/10015 source: display: patient demographics reference: https://stg.us.truenth.org/api/demographics/10015 group: question: - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.1.1 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 1 linkId: epic26.1 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.2.1 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 1 linkId: epic26.2 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.3.3 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 2 linkId: epic26.3 - answer: [] linkId: epic26.4 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.5.4 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 3 linkId: epic26.5 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.6.3 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 2 linkId: epic26.6 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.7.2 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 1 linkId: epic26.7 - answer: [] linkId: epic26.8 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.9.3 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 3 linkId: epic26.9 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.10.5 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 4 linkId: epic26.10 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.11.2 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 1 linkId: epic26.11 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.12.2 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 1 linkId: epic26.12 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.13.4 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 3 linkId: epic26.13 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.14.1 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 0 linkId: epic26.14 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.15.5 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 5 linkId: epic26.15 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.16.2 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 2 linkId: epic26.16 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.17.1 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 1 linkId: epic26.17 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.18.4 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 4 linkId: epic26.18 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.19.4 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 4 linkId: epic26.19 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.20.2 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 2 linkId: epic26.20 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.21.5 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 5 linkId: epic26.21 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.22.1 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 0 linkId: epic26.22 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.23.2 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 1 linkId: epic26.23 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.24.3 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 2 linkId: epic26.24 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.25.4 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 3 linkId: epic26.25 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.26.5 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 4 linkId: epic26.26 questionnaire: display: EPIC 26 Short Form reference: https://stg.us.truenth.org/api/questionnaires/epic26 link: href: https://stg.us.truenth.org/api/patient/10015/assessment/epic26 rel: self resourceType: Bundle total: 2 type: searchset updated: '2016-03-14T20:47:26.282263Z' 401: description: if missing valid OAuth token or logged-in user lacks permission to view requested patient security: - ServiceToken: [] """ current_user().check_role(permission='view', other_id=patient_id) patient = get_user_or_abort(patient_id) questionnaire_responses = QuestionnaireResponse.query.filter_by( subject_id=patient.id).order_by(QuestionnaireResponse.authored.desc()) instrument_id = request.args.get('instrument_id', instrument_id) if instrument_id is not None: questionnaire_responses = questionnaire_responses.filter( QuestionnaireResponse.document[ ("questionnaire", "reference") ].astext.endswith(instrument_id) ) documents = [] for qnr in questionnaire_responses: for question in qnr.document_answered['group']['question']: for answer in question['answer']: # Hack: Extensions should be a list, correct in-place if need be # todo: migrate towards FHIR spec in persisted data if ( 'extension' in answer.get('valueCoding', {}) and not isinstance(answer['valueCoding']['extension'], (tuple, list)) ): answer['valueCoding']['extension'] = [answer['valueCoding']['extension']] # Hack: add missing "resource" wrapper for DTSU2 compliance # Remove when all interventions compliant if request.args.get('patch_dstu2'): qnr.document = { 'resource': qnr.document, 'fullUrl': request.url, } documents.append(qnr.document) link = {'rel': 'self', 'href': request.url} return jsonify(bundle_results(elements=documents, links=[link])) @assessment_engine_api.route('/api/patient/assessment') @crossdomain() @roles_required( [ROLE.STAFF_ADMIN.value, ROLE.STAFF.value, ROLE.RESEARCHER.value]) @oauth.require_oauth() def get_assessments(): """ Return multiple patient's responses to all questionnaires NB list of patient's returned is limited by current_users implicit permissions, typically controlled through organization affiliation. --- operationId: getQuestionnaireResponses tags: - Assessment Engine parameters: - name: format in: query description: format of file to download (CSV or JSON) required: false type: string enum: - json - csv default: json - name: patch_dstu2 in: query description: whether or not to make bundles DTSU2 compliant required: false type: boolean default: false - name: instrument_id in: query description: ID of the instrument, eg "epic26", "eq5d" required: false type: array items: type: string enum: - epic26 - eq5d collectionFormat: multi produces: - application/json responses: 200: description: successful operation schema: id: assessments_bundle required: - type properties: type: description: Indicates the purpose of this bundle- how it was intended to be used. type: string enum: - document - message - transaction - transaction-response - batch - batch-response - history - searchset - collection link: description: A series of links that provide context to this bundle. items: properties: relation: description: A name which details the functional use for this link - see [[http://www.iana.org/assignments/link-relations/link-relations.xhtml]]. url: description: The reference details for the link. total: description: If a set of search matches, this is the total number of matches for the search (as opposed to the number of results in this bundle). type: integer entry: type: array items: $ref: "#/definitions/FHIRPatient" 401: description: if missing valid OAuth token or logged-in user lacks permission to view requested patient security: - ServiceToken: [] - OAuth2AuthzFlow: [] """ from ..tasks import research_report_task # This frequently takes over a minute to produce. Generate a serializable # form of all args for reliable hand off to a background task. kwargs = { 'instrument_ids': request.args.getlist('instrument_id'), 'acting_user_id': current_user().id, 'patch_dstu2': request.args.get('patch_dstu2'), 'request_url': request.url, 'response_format': request.args.get('format', 'json').lower() } # Hand the task off to the job queue, and return 202 with URL for # checking the status of the task task = research_report_task.apply_async(kwargs=kwargs) return jsonify({}), 202, {'Location': url_for( 'portal.task_status', task_id=task.id, _external=True)} @assessment_engine_api.route( '/api/patient/<int:patient_id>/assessment', methods=('PUT',), ) @crossdomain() @oauth.require_oauth() def assessment_update(patient_id): """Update an existing questionnaire response on a patient's record Submit a minimal FHIR doc in JSON format including the 'QuestionnaireResponse' resource type. --- operationId: updateQuestionnaireResponse tags: - Assessment Engine produces: - application/json parameters: - name: patient_id in: path description: TrueNTH patient ID required: true type: integer format: int64 - in: body name: body schema: $ref: "#/definitions/QuestionnaireResponse" responses: 401: description: if missing valid OAuth token or logged-in user lacks permission to view requested patient 404: description: existing QuestionnaireResponse not found security: - ServiceToken: [] """ if not hasattr(request, 'json') or not request.json: return jsonify(message='Invalid request - requires JSON'), 400 if request.json.get('resourceType') != 'QuestionnaireResponse': return jsonify( message='Requires resourceType of "QuestionnaireResponse"'), 400 # Verify the current user has permission to edit given patient current_user().check_role(permission='edit', other_id=patient_id) patient = get_user_or_abort(patient_id) response = { 'ok': False, 'message': 'error updating questionnaire response', 'valid': False, } updated_qnr = request.json try: QuestionnaireResponse.validate_document(updated_qnr) QuestionnaireResponse.validate_authored( FHIR_datetime.parse(updated_qnr.get('authored'))) except (jsonschema.ValidationError, NoFutureDates) as e: return jsonify({ 'ok': False, 'message': str(e), 'reference': getattr(e, 'schema', ''), }), 400 else: response.update({ 'ok': True, 'message': 'questionnaire response valid', 'valid': True, }) try: identifier = Identifier.from_fhir(updated_qnr.get('identifier')) except ValueError as e: response['message'] = str(e) return jsonify(response), 400 existing_qnr = QuestionnaireResponse.by_identifier(identifier) if not existing_qnr: current_app.logger.warning( "attempted update on QuestionnaireResponse with unknown " "identifier {}".format(identifier)) response['message'] = "existing QuestionnaireResponse not found" return jsonify(response), 404 if len(existing_qnr) > 1: msg = ("can't update; multiple QuestionnaireResponses found with " "identifier {}".format(identifier)) current_app.logger.warning(msg) response['message'] = msg return jsonify(msg), 409 response.update({'message': 'previous questionnaire response found'}) existing_qnr = existing_qnr[0] existing_qnr.status = updated_qnr["status"] existing_qnr.document = updated_qnr db.session.add(existing_qnr) db.session.commit() existing_qnr.assign_qb_relationship(acting_user_id=current_user().id) auditable_event( "updated {}".format(existing_qnr), user_id=current_user().id, subject_id=patient.id, context='assessment', ) response.update({'message': 'questionnaire response updated successfully'}) invalidate_users_QBT(patient.id) return jsonify(response) @assessment_engine_api.route( '/api/patient/<int:patient_id>/assessment', methods=('POST',)) @crossdomain() @oauth.require_oauth() def assessment_add(patient_id): """Add a questionnaire response to a patient's record Submit a minimal FHIR doc in JSON format including the 'QuestionnaireResponse' resource type. NB, updates are only possible on QuestionnaireResponses for which a well defined ``identifer`` is included. If included, this value must be distinct over (``system``, ``value``). A duplicate submission will result in a ``409: conflict`` response, and refusal to retain the submission. --- operationId: addQuestionnaireResponse tags: - Assessment Engine definitions: - schema: id: Question description: An individual question and related attributes type: object externalDocs: url: http://hl7.org/implement/standards/fhir/DSTU2/questionnaireresponse-definitions.html#QuestionnaireResponse.group.question additionalProperties: false properties: text: description: Question text type: string linkId: description: Corresponding question within Questionnaire type: string answer: description: The respondent's answer(s) to the question externalDocs: url: http://hl7.org/implement/standards/fhir/DSTU2/questionnaireresponse-definitions.html#QuestionnaireResponse.group.question.answer type: array items: $ref: "#/definitions/Answer" - schema: id: Answer description: An individual answer to a question and related attributes. May only contain a single value[x] attribute type: object externalDocs: url: http://hl7.org/implement/standards/fhir/DSTU2/questionnaireresponse-definitions.html#QuestionnaireResponse.group.question.answer.value_x_ additionalProperties: false properties: valueBoolean: description: Boolean value answer to a question type: boolean valueDecimal: description: Decimal value answer to a question type: number valueInteger: description: Integer value answer to a question type: integer valueDate: description: Date value answer to a question type: string format: date valueDateTime: description: Datetime value answer to a question type: string format: date-time valueInstant: description: Instant value answer to a question type: string format: date-time valueTime: description: Time value answer to a question type: string valueString: description: String value answer to a question type: string valueUri: description: URI value answer to a question type: string valueAttachment: description: Attachment value answer to a question $ref: "#/definitions/ValueAttachment" valueCoding: description: Coding value answer to a question, may include score as FHIR extension $ref: "#/definitions/ValueCoding" valueQuantity: description: Quantity value answer to a question $ref: "#/definitions/Quantity" valueReference: description: Reference value answer to a question $ref: "#/definitions/Reference" group: description: Nested questionnaire group $ref: "#/definitions/Group" - schema: id: Group description: A structured set of questions and their answers. The questions are ordered and grouped into coherent subsets, corresponding to the structure of the grouping of the questionnaire being responded to. type: object additionalProperties: false properties: linkId: description: The item from the Questionnaire that corresponds to this item in the QuestionnaireResponse resource. type: string title: description: Name for this group type: string text: description: Text that is displayed above the contents of the group or as the text of the question being answered. type: string question: description: Questions in this group. items: $ref: "#/definitions/Question" type: array group: description: Questions or sub-groups nested beneath a question or group. items: $ref: "#/definitions/Group" type: array - schema: id: Quantity description: A measured amount (or an amount that can potentially be measured). Note that measured amounts include amounts that are not precisely quantified, including amounts involving arbitrary units and floating currencies. type: object additionalProperties: false properties: id: description: Unique id for the element within a resource (for internal references). This may be any string value that does not contain spaces. type: string value: description: The value of the measured amount. The value includes an implicit precision in the presentation of the value. type: number comparator: description: How the value should be understood and represented - whether the actual value is greater or less than the stated value due to measurement issues; e.g. if the comparator is \"\u003c\" , then the real value is \u003c stated value. type: string enum: - "\u003c" - "\u003c\u003d" - "\u003e\u003d" - "\u003e" unit: description: A human-readable form of the unit. type: string system: description: The identification of the system that provides the coded form of the unit. type: string code: description: A computer processable form of the unit in some unit representation system. type: string - schema: id: Questionnaire type: object additionalProperties: false properties: display: description: Name of Questionnaire type: string reference: description: URI uniquely defining the Questionnaire type: string - schema: id: QuestionnaireResponse type: object required: - resourceType - status additionalProperties: false properties: identifier: description: A business identifier assigned to a particular completed (or partially completed) questionnaire. $ref: "#/definitions/Identifier" questionnaire: description: The Questionnaire that defines and organizes the questions for which answers are being provided. $ref: "#/definitions/Questionnaire" resourceType: description: defines FHIR resource type, must be QuestionnaireResponse type: string status: externalDocs: url: http://hl7.org/implement/standards/fhir/DSTU2/questionnaireresponse-definitions.html#QuestionnaireResponse.status description: The lifecycle status of the questionnaire response as a whole. If submitting a QuestionnaireResponse with status "in-progress", the ``identifier`` must also be well defined. Without it, there's no way to reference it for updates. type: string enum: - in-progress - completed subject: description: The subject of the questionnaire response. This could be a patient, organization, practitioner, device, etc. This is who/what the answers apply to, but is not necessarily the source of information. $ref: "#/definitions/Reference" author: description: Person who received the answers to the questions in the QuestionnaireResponse and recorded them in the system. $ref: "#/definitions/Reference" authored: externalDocs: url: http://hl7.org/implement/standards/fhir/DSTU2/questionnaireresponse-definitions.html#QuestionnaireResponse.authored description: The datetime this resource was last updated type: string format: date-time source: $ref: "#/definitions/Reference" group: description: A group or question item from the original questionnaire for which answers are provided. type: object $ref: "#/definitions/Group" example: resourceType: QuestionnaireResponse authored: '2016-03-11T23:47:28Z' status: completed identifier: value: '119.0' use: official label: cPRO survey session ID system: 'https://ae.us.truenth.org/eproms' subject: display: patient demographics reference: https://stg.us.truenth.org/api/demographics/10015 author: display: patient demographics reference: https://stg.us.truenth.org/api/demographics/10015 source: display: patient demographics reference: https://stg.us.truenth.org/api/demographics/10015 group: question: - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.1.1 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 1 linkId: epic26.1 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.2.1 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 1 linkId: epic26.2 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.3.3 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 2 linkId: epic26.3 - answer: [] linkId: epic26.4 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.5.4 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 3 linkId: epic26.5 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.6.3 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 2 linkId: epic26.6 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.7.2 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 1 linkId: epic26.7 - answer: [] linkId: epic26.8 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.9.3 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 3 linkId: epic26.9 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.10.5 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 4 linkId: epic26.10 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.11.2 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 1 linkId: epic26.11 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.12.2 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 1 linkId: epic26.12 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.13.4 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 3 linkId: epic26.13 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.14.1 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 0 linkId: epic26.14 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.15.5 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 5 linkId: epic26.15 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.16.2 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 2 linkId: epic26.16 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.17.1 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 1 linkId: epic26.17 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.18.4 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 4 linkId: epic26.18 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.19.4 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 4 linkId: epic26.19 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.20.2 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 2 linkId: epic26.20 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.21.5 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 5 linkId: epic26.21 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.22.1 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 0 linkId: epic26.22 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.23.2 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 1 linkId: epic26.23 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.24.3 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 2 linkId: epic26.24 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.25.4 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 3 linkId: epic26.25 - answer: - valueCoding: system: https://stg.us.truenth.org/api/codings/assessment code: epic26.26.5 extension: url: https://hl7.org/fhir/StructureDefinition/iso21090-CO-value valueDecimal: 4 linkId: epic26.26 questionnaire: display: EPIC 26 Short Form reference: https://stg.us.truenth.org/api/questionnaires/epic26 - schema: id: Reference description: link to an internal or external resource type: object additionalProperties: false properties: reference: description: Relative, internal or absolute URL reference type: string display: description: Text alternative for the resource type: string - schema: id: ValueAttachment description: For referring to data content defined in other formats type: object additionalProperties: false properties: contentType: description: Identifies the type of the data in the attachment and allows a method to be chosen to interpret or render the data. Includes mime type parameters such as charset where appropriate. type: string language: description: The human language of the content. The value can be any valid value according to BCP 47. type: string data: description: The actual data of the attachment - a sequence of bytes, base64 encoded. type: string format: byte url: description: A location where the data can be accessed. type: string size: description: The number of bytes of data that make up this attachment (before base64 encoding, if that is done). type: integer hash: description: The calculated hash of the data using SHA-1. Represented using base64. type: string format: byte title: description: A label or set of text to display in place of the data. type: string creation: description: The date that the attachment was first created. type: string format: date-time - schema: id: ValueCoding type: object additionalProperties: false properties: system: description: Identity of the terminology system type: string format: uri version: description: Version of the system - if relevant type: string code: description: Symbol in syntax defined by the system type: string display: description: Representation defined by the system type: string userSelected: description: If this coding was chosen directly by the user type: boolean extension: description: Extension - Numerical value associated with the code $ref: "#/definitions/ValueDecimalExtension" - schema: id: ValueDecimalExtension type: object additionalProperties: false properties: url: description: Hardcoded reference to extension type: string format: uri valueDecimal: description: Numeric score value type: number produces: - application/json parameters: - name: patient_id in: path description: TrueNTH patient ID required: true type: integer format: int64 - name: entry_method in: query description: Entry method such as `paper` if known required: false type: string - in: body name: body schema: $ref: "#/definitions/QuestionnaireResponse" responses: 401: description: if missing valid OAuth token or logged-in user lacks permission to view requested patient security: - ServiceToken: [] """ from ..models.qb_timeline import invalidate_users_QBT # avoid cycle if not hasattr(request, 'json') or not request.json: return jsonify(message='Invalid request - requires JSON'), 400 if request.json.get('resourceType') != 'QuestionnaireResponse': return jsonify( message='Requires resourceType of "QuestionnaireResponse"'), 400 # Verify the current user has permission to edit given patient current_user().check_role(permission='edit', other_id=patient_id) patient = get_user_or_abort(patient_id) response = { 'ok': False, 'message': 'error saving questionnaire response', 'valid': False, } try: QuestionnaireResponse.validate_document(request.json) QuestionnaireResponse.validate_authored( FHIR_datetime.parse(request.json.get('authored'))) except (jsonschema.ValidationError, NoFutureDates) as e: response = { 'ok': False, 'message': str(e), 'reference': getattr(e, 'schema', ''), } return jsonify(response), 400 identifier = None if 'identifier' in request.json: # Confirm it's unique, or raise 409 try: identifier = Identifier.from_fhir(request.json['identifier']) except ValueError as e: response['message'] = str(e) return jsonify(response), 400 existing_qnr = QuestionnaireResponse.by_identifier(identifier) if len(existing_qnr): msg = ("QuestionnaireResponse with matching {} already exists; " "must be unique over (system, value)".format(identifier)) current_app.logger.warning(msg) response['message'] = msg return jsonify(response), 409 if request.json.get('status') == 'in-progress' and not identifier: msg = "Status {} received without the required identifier".format( request.json.get('status')) current_app.logger.warning(msg) response['message'] = msg return jsonify(response), 400 response.update({ 'ok': True, 'message': 'questionnaire response valid', 'valid': True, }) encounter = current_user().current_encounter if 'entry_method' in request.args: encounter_type = getattr( EC, request.args['entry_method'].upper()).codings[0] encounter.type.append(encounter_type) questionnaire_response = QuestionnaireResponse( subject_id=patient_id, status=request.json["status"], document=request.json, encounter=encounter, ) db.session.add(questionnaire_response) db.session.commit() questionnaire_response.assign_qb_relationship( acting_user_id=current_user().id) auditable_event("added {}".format(questionnaire_response), user_id=current_user().id, subject_id=patient_id, context='assessment') response.update({'message': 'questionnaire response saved successfully'}) invalidate_users_QBT(patient.id) return jsonify(response) @assessment_engine_api.route('/api/invalidate/<int:user_id>') @oauth.require_oauth() def invalidate(user_id): from ..models.qb_timeline import invalidate_users_QBT # avoid cycle user = get_user_or_abort(user_id) invalidate_users_QBT(user_id) return jsonify(invalidated=user.as_fhir()) @assessment_engine_api.route('/api/present-needed') @roles_required([ROLE.STAFF_ADMIN.value, ROLE.STAFF.value, ROLE.PATIENT.value]) @oauth.require_oauth() def present_needed(): """Look up needed and in process q's for user and then present_assessment Takes the same attributes as present_assessment. If `authored` date is different from utcnow(), any instruments found to be in an `in_progress` state will be treated as if they haven't been started. """ from ..models.qb_status import QB_Status # avoid cycle subject_id = request.args.get('subject_id') or current_user().id subject = get_user_or_abort(subject_id) if subject != current_user(): current_user().check_role(permission='edit', other_id=subject_id) as_of_date = FHIR_datetime.parse( request.args.get('authored'), none_safe=True) if not as_of_date: as_of_date = datetime.utcnow() assessment_status = QB_Status(subject, as_of_date=as_of_date) if assessment_status.overall_status == 'Withdrawn': abort(400, 'Withdrawn; no pending work found') args = dict(request.args.items()) args['instrument_id'] = ( assessment_status.instruments_needing_full_assessment( classification='all')) # Instruments in progress need special handling. Assemble # the list of external document ids for reliable resume # behavior at external assessment intervention. resume_ids = assessment_status.instruments_in_progress( classification='all') if resume_ids: args['resume_identifier'] = resume_ids if not args.get('instrument_id') and not args.get('resume_identifier'): flash(_('All available questionnaires have been completed')) current_app.logger.debug('no assessments needed, redirecting to /') return redirect('/') url = url_for('.present_assessment', **args) current_app.logger.debug('present assessment url, redirecting to: %s', url) return redirect(url, code=302) @assessment_engine_api.route('/api/present-assessment') @crossdomain() @roles_required([ROLE.STAFF_ADMIN.value, ROLE.STAFF.value, ROLE.PATIENT.value]) @oauth.require_oauth() def present_assessment(instruments=None): """Request that TrueNTH present an assessment via the assessment engine Redirects to the first assessment engine instance that is capable of administering the requested assessment --- operationId: present_assessment tags: - Assessment Engine produces: - text/html parameters: - name: instrument_id in: query description: ID of the instrument, eg "epic26", "eq5d" required: true type: array items: type: string enum: - epic26 - eq5d collectionFormat: multi - name: resume_instrument_id in: query description: ID of the instrument, eg "epic26", "eq5d" required: true type: array items: type: string enum: - epic26 - eq5d collectionFormat: multi - name: next in: query description: Intervention URL to return to after assessment completion required: true type: string format: url - name: subject_id in: query description: User ID to Collect QuestionnaireResponses as required: false type: integer - name: authored in: query description: Override QuestionnaireResponse.authored with given datetime required: false type: string format: date-time responses: 303: description: successful operation headers: Location: description: URL registered with assessment engine used to provide given assessment type: string format: url 401: description: if missing valid OAuth token or bad `next` parameter security: - ServiceToken: [] - OAuth2AuthzFlow: [] """ queued_instruments = request.args.getlist('instrument_id') resume_instruments = request.args.getlist('resume_instrument_id') resume_identifiers = request.args.getlist('resume_identifier') # Hack to allow deprecated API to piggyback # Remove when deprecated_present_assessment() is fully removed if instruments is not None: queued_instruments = instruments # Combine requested instruments into single list, maintaining order common_instruments = resume_instruments + queued_instruments common_instruments = sorted( set(common_instruments), key=lambda x: common_instruments.index(x) ) configured_instruments = Questionnaire.questionnaire_codes() if set(common_instruments) - set(configured_instruments): abort( 404, "No matching assessment found: %s" % ( ", ".join(set(common_instruments) - set(configured_instruments)) ) ) assessment_params = { "project": ",".join(common_instruments), "resume_instrument_id": ",".join(resume_instruments), "resume_identifier": ",".join(resume_identifiers), "subject_id": request.args.get('subject_id'), "authored": request.args.get('authored'), "entry_method": request.args.get('entry_method'), } # Clear empty querystring params assessment_params = {k: v for k, v in assessment_params.items() if v} assessment_url = "".join(( INTERVENTION.ASSESSMENT_ENGINE.link_url, "/surveys/new_session?", requests.compat.urlencode(assessment_params), )) if 'next' in request.args: next_url = request.args.get('next') # Validate next URL the same way CORS requests are validate_origin(next_url) current_app.logger.debug('storing session[assessment_return]: %s', next_url) session['assessment_return'] = next_url return redirect(assessment_url, code=303) @assessment_engine_api.route('/api/present-assessment/<instrument_id>') @oauth.require_oauth() def deprecated_present_assessment(instrument_id): current_app.logger.warning( "use of depricated API %s from referer %s", request.url, request.headers.get('Referer'), ) return present_assessment(instruments=[instrument_id]) @assessment_engine_api.route('/api/complete-assessment') @crossdomain() @oauth.require_oauth() def complete_assessment(): """Return to the last intervention that requested an assessment be presented Redirects to the URL passed to TrueNTH when present-assessment was last called (if valid) or TrueNTH home --- operationId: complete_assessment tags: - Internal produces: - text/html responses: 303: description: successful operation headers: Location: description: URL passed to TrueNTH when present-assessment was last called (if valid) or TrueNTH home type: string format: url 401: description: if missing valid OAuth token security: - ServiceToken: [] - OAuth2AuthzFlow: [] """ next_url = session.pop("assessment_return", "/") # Logout Assessment Engine after survey completion for token in INTERVENTION.ASSESSMENT_ENGINE.client.tokens: if token.user != current_user(): continue current_app.logger.debug( "assessment complete, logging out user: %s", token.user.id) INTERVENTION.ASSESSMENT_ENGINE.client.notify({ 'event': 'logout', 'user_id': token.user.id, 'refresh_token': token.refresh_token, 'info': 'complete-assessment', }) db.session.delete(token) db.session.commit() current_app.logger.debug("assessment complete, redirect to: %s", next_url) return redirect(next_url, code=303) @assessment_engine_api.route('/api/consent-assessment-status') @crossdomain() @oauth.require_oauth() def batch_assessment_status(): """Return a batch of consent and assessment states for list of users --- operationId: batch_assessment_status tags: - Internal parameters: - name: user_id in: query description: TrueNTH user ID for assessment status lookup. Any number of IDs may be provided required: true type: array items: type: integer format: int64 collectionFormat: multi produces: - application/json responses: 200: description: successful operation schema: id: batch_assessment_response properties: status: type: array items: type: object required: - user_id - consents properties: user_id: type: integer format: int64 description: TrueNTH ID for user consents: type: array items: type: object required: - consent - assessment_status properties: consent: type: string description: Details of the consent assessment_status: type: string description: User's assessment status 401: description: if missing valid OAuth token security: - ServiceToken: [] """ from ..models.qb_timeline import qb_status_visit_name acting_user = current_user() user_ids = request.args.getlist('user_id') if not user_ids: abort(400, "Requires at least one user_id") results = [] for uid in user_ids: check_int(uid) users = User.query.filter(User.id.in_(user_ids)) for user in users: if not acting_user.check_role('view', user.id): continue details = [] status, _ = qb_status_visit_name(user.id, datetime.utcnow()) for consent in user.all_consents: details.append( {'consent': consent.as_json(), 'assessment_status': str(status)}) results.append({'user_id': user.id, 'consents': details}) return jsonify(status=results) @assessment_engine_api.route( '/api/patient/<int:patient_id>/assessment-status') @crossdomain() @oauth.require_oauth() def patient_assessment_status(patient_id): """Return current assessment status for a given patient --- operationId: patient_assessment_status tags: - Assessment Engine parameters: - name: patient_id in: path description: TrueNTH patient ID required: true type: integer format: int64 - name: as_of_date in: query description: Optional UTC datetime for times other than ``utcnow`` required: false type: string format: date-time - name: purge in: query description: Optional trigger to purge any cached data for given user before (re)calculating assessment status required: false type: string produces: - application/json responses: 200: description: return current assessment status of given patient 401: description: if missing valid OAuth token or logged-in user lacks permission to view requested patient 404: description: if patient id is invalid security: - ServiceToken: [] """ from ..models.qb_status import QB_Status patient = get_user_or_abort(patient_id) current_user().check_role(permission='view', other_id=patient_id) date = request.args.get('as_of_date') date = FHIR_datetime.parse(date) if date else datetime.utcnow() trace = request.args.get('trace', False) if trace: establish_trace( "BEGIN trace for assessment-status on {}".format(patient_id)) if request.args.get('purge', 'false').lower() == 'true': invalidate_users_QBT(patient_id) assessment_status = QB_Status(user=patient, as_of_date=date) # indefinite assessments don't affect overall status, but need to # be available if unfinished outstanding_indefinite_work = len( assessment_status.instruments_needing_full_assessment( classification='indefinite') + assessment_status.instruments_in_progress(classification='indefinite') ) qbd = assessment_status.current_qbd() qb_name = qbd.questionnaire_bank.name if qbd else None response = { 'assessment_status': str(assessment_status.overall_status), 'outstanding_indefinite_work': outstanding_indefinite_work, 'questionnaires_ids': ( assessment_status.instruments_needing_full_assessment( classification='all')), 'resume_ids': assessment_status.instruments_in_progress( classification='all'), 'completed_ids': assessment_status.instruments_completed( classification='all'), 'qb_name': qb_name } if trace: response['trace'] = dump_trace() return jsonify(response)
{ "repo_name": "uwcirg/true_nth_usa_portal", "path": "portal/views/assessment_engine.py", "copies": "1", "size": "75011", "license": "bsd-3-clause", "hash": -78243782092636270, "line_mean": 36.9802531646, "line_max": 154, "alpha_frac": 0.5310021197, "autogenerated": false, "ratio": 4.625454769686132, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5656456889386132, "avg_score": null, "num_lines": null }
" assess mesh quality of howorka geometry and how it changes with uniform refinement and snapping " from nanopores import * from nanopores.geometries.curved import Cylinder, Sphere, Circle from dolfin import * from matplotlib import pyplot geo_name = "H_cyl_geo" nm = import_vars("nanopores.geometries.%s.params_geo" %geo_name)["nm"] add_params( h = 3., z0 = 2., ratio = .1, nref = 1, ) geo_params = dict( x0 = [0., 0., nm*z0], rMolecule = nm*0.5, lcCenter = nm*0.1, #1, lcMolecule = nm*0.05, #025, ) generate_mesh(h, geo_name, optimize=True, **geo_params) geo = geo_from_name(geo_name, **geo_params) print geo._bou2phys #plot(geo.submesh("pore")) plot_sliced(geo) # define sphere for molecule molec = Sphere(R=geo.params["rMolecule"], center=geo.params["x0"]) # define cylinders for inner and outer DNA boundary and side boundary innerdna = Cylinder(R=geo.params["r0"], L=geo.params["l0"]) outerdna = Cylinder(R=geo.params["r1"], L=geo.params["l0"]) side = Cylinder(R=geo.params["R"], L=2.*geo.params["Rz"]) curved = dict( moleculeb = molec.snap, innerdnab = innerdna.snap, outerdnab = outerdna.snap, membranednab = outerdna.snap, sideb = side.snap, outermembraneb = side.snap, ) def mesh_quality(mesh, oldmesh=None, ratio=1e-1): #vertex = VertexFunction("bool", mesh, False) dgncells = CellFunction("size_t", mesh, 0) for c in cells(mesh): if c.radius_ratio() < ratio: dgncells[c] = 1 #if c.radius_ratio() < 1e-5: #print 'Degenerate cell', c.index(), ', radius ratio', c.radius_ratio() #for v in vertices(c): #vertex[v] = True #if c.radius_ratio() < 1e-6: # print ' ', v.point().str() minrr = MeshQuality.radius_ratio_min_max(mesh)[0] print "Minimal radius ratio of mesh:", minrr pyplot.figure() exec(MeshQuality.radius_ratio_matplotlib_histogram(mesh, 200), locals()) # plot degenerate cells if minrr < ratio: submesh = SubMesh(mesh, dgncells, 1) title = "degenerate N=%s" %mesh.num_cells() #plot(submesh, title=title) geo_sub = geo_from_subdomains(submesh, "nanopores.geometries.%s.subdomains" %geo.params["name"], **geo.params) plot(geo_sub.boundaries, title="boundaries "+title) # find degenerate cells before snapping if oldmesh is not None: oldmesh = refine(oldmesh) oldcells = CellFunction("size_t", oldmesh, 0) oldcells.array()[:] = dgncells.array() plot(SubMesh(oldmesh, oldcells, 1), "old degenerate cells N=%s" %mesh.num_cells()) # mesh quality before refinement mesh = geo.mesh print "Number of cells:", mesh.num_cells() mesh_quality(mesh, ratio=ratio) #interactive() for i in range(nref): # Mark cells for refinement markers = CellFunction("bool", mesh, True) # Refine mesh mesh = refine(mesh, markers) print "Number of cells:", mesh.num_cells() geo.adapt(mesh) mesh_quality(mesh, ratio=ratio) # snap curved boundaries for boundary, snap in curved.items(): print "Adapting curved boundary '%s'." % boundary geo.snap_to_boundary(boundary, snap) mesh_quality(mesh, ratio=ratio) #areCh = assemble(Constant(1.)*geo.dS("dnab")) #print "Area (approx):", areCh #print "Error A:", abs(areCh - areC) print "hmin [nm]: ", geo.mesh.hmin()/nm plot_sliced(geo) interactive()
{ "repo_name": "mitschabaude/nanopores", "path": "scripts/numerics/H3Drefine.py", "copies": "1", "size": "3496", "license": "mit", "hash": 6749357663890491000, "line_mean": 30.7818181818, "line_max": 99, "alpha_frac": 0.6287185355, "autogenerated": false, "ratio": 3.110320284697509, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9046312436926827, "avg_score": 0.03854527665413647, "num_lines": 110 }
"""Assess phenotype recall relative to known HPO-PMID map.""" import collections import random import sys sys.path.append('../code') import extractor_util as util import data_util as dutil NUM_ERRORS_TO_SAMPLE = 50 def main(id_file, candidate_file): # Load list of all pubmed IDs in the dataset print >> sys.stderr, 'Loading list of pubmed IDs from doc ID list.' doi_to_pmid = dutil.read_doi_to_pmid() pmids_in_data = set() num_docs = 0 with open(id_file) as f: for line in f: doc_id = line.strip() pmid = dutil.get_pubmed_id_for_doc(doc_id, doi_to_pmid=doi_to_pmid) if pmid: pmids_in_data.add(pmid) num_docs += 1 print >> sys.stderr, '%d/%d documents have PubMed IDs.' % ( len(pmids_in_data), num_docs) # Load map from Pubmed ID to HPO term via MeSH print >> sys.stderr, 'Loading supervision data via MeSH' mesh_supervision = collections.defaultdict(set) with open('%s/onto/data/hpo_to_pmid_via_mesh.tsv' % util.APP_HOME) as f: for line in f: hpo_id, pmid = line.strip().split('\t') if pmid in pmids_in_data: mesh_supervision[pmid].add(hpo_id) # Identify all true pairs from MeSH true_pairs = set() for pmid in pmids_in_data: for hpo in mesh_supervision[pmid]: true_pairs.add((pmid, hpo)) # Load map from Pubmed ID to HPO term based on extracted candidates print >> sys.stderr, 'Loading extracted pheno candidates' candidates = collections.defaultdict(set) with open(candidate_file) as f: for line in f: doc_id, hpo_id = line.strip().split('\t') pmid = dutil.get_pubmed_id_for_doc(doc_id, doi_to_pmid=doi_to_pmid) if pmid: candidates[pmid].add(hpo_id) # Load HPO DAG # We say we found a HPO term if we find either the exact HPO term # or one of its children hpo_dag = dutil.read_hpo_dag() # Determine which true pairs had candidate mentions for them found_pairs = set() missed_pairs = set() for pmid, hpo in true_pairs: found_hpo_ids = candidates[pmid] for cand_hpo in found_hpo_ids: if cand_hpo == '\N': continue if hpo_dag.has_child(hpo, cand_hpo): found_pairs.add((pmid, hpo)) break else: missed_pairs.add((pmid, hpo)) # Compute recall num_true = len(true_pairs) num_found = len(found_pairs) print >> sys.stderr, 'Recall: %d/%d = %g' % ( num_found, num_true, float(num_found) / num_true) # Compute other statistics num_article = len(pmids_in_data) num_annotated = sum(1 for x in pmids_in_data if len(mesh_supervision[x]) > 0) print >> sys.stderr, '%d/%d = %g pubmed articles had HPO annotation' % ( num_annotated, num_article, float(num_annotated) / num_article) # Read in HPO information hpo_info_dict = dict() with open('%s/onto/data/hpo_phenotypes.tsv' % util.APP_HOME) as f: for line in f: toks = line.strip('\r\n').split('\t') hpo_id = toks[0] hpo_info_dict[hpo_id] = toks[0:3] # Sample some error cases missed_sample = random.sample(list(missed_pairs), 100) for pmid, hpo in missed_sample: hpo_info = hpo_info_dict[hpo] pubmed_url = 'http://www.ncbi.nlm.nih.gov/pubmed/%s' % pmid hpo_url = 'www.human-phenotype-ontology.org/hpoweb/showterm?id=%s' % hpo toks = [pubmed_url, hpo_url] + hpo_info print '\t'.join(toks) if __name__ == '__main__': if len(sys.argv) < 3: print >> sys.stderr, 'Usage: %s doc_ids.tsv candidates.tsv' % sys.argv[0] print >> sys.stderr, '' print >> sys.stderr, 'doc_ids.tsv should be list of doc ids' print >> sys.stderr, ' e.g. /lfs/raiders2/0/robinjia/data/genomics_sentences_input_data/50k_doc_ids.tsv' print >> sys.stderr, 'candidates.tsv should have rows doc_id, hpo_id.' print >> sys.stderr, ' e.g. result of SELECT doc_id, entity FROM pheno_mentions' print >> sys.stderr, ' or SELECT doc_id, entity FROM pheno_mentions_is_correct_inference WHERE expectation > 0.9' sys.exit(1) main(*sys.argv[1:])
{ "repo_name": "HazyResearch/dd-genomics", "path": "eval/pheno_recall.py", "copies": "1", "size": "3975", "license": "apache-2.0", "hash": 8585529978542728000, "line_mean": 34.8108108108, "line_max": 118, "alpha_frac": 0.6500628931, "autogenerated": false, "ratio": 2.853553481694185, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.40036163747941855, "avg_score": null, "num_lines": null }
"""Assess the accuracy of the reference values using the Wronskian relations. This follows the methodology of Liang-Wu Cai (2011), eqs. (9) and (10). It should give me more confidence in the reference values. At the moment, I only implement a single Wronskian relation (between jn and yn), but once the code is vetted I should implement more. """ import cPickle as pickle from itertools import izip, imap import numpy as np import mpmath from matplotlib import pyplot as plt from reference_points import reference_points from reference_values import mpc_to_np from config import (MAX_ORDER, RADIAL_POINTS, ANGULAR_POINTS, INNER_RADIUS, OUTER_RADIUS, STARTING_PRECISION) def sig_figs_jy(jn, jn1, yn, yn1, z): """Check relation http://dlmf.nist.gov/10.50 . Parameters ---------- jn : mpf The value of j_n(x). jn1 : mpf The value of j_{n + 1}(x). yn : mpf The value of y_n(x). yn1 : mpf The value of y_{n + 1}(x). Returns ------- The estimated number of significant digits to which the computation of the passed Bessel functions is correct. """ w = mpmath.fabs(z**2*(jn1*yn - jn*yn1) - 1) if not mpmath.isfinite(w): return w if w > 0: return 1 - mpmath.log10(w) else: return mpmath.mp.dps def reference_data(): with open("jn.pickle", "rb") as f: jn = pickle.load(f) with open("yn.pickle", "rb") as f: yn = pickle.load(f) return jn, yn def reference_sig_figs(points, jn, yn): """Yield significant figures of jn, yn at point.""" for idx, p in enumerate(points): z, order = p if order < MAX_ORDER: yield sig_figs_jy(jn[idx], jn[idx+1], yn[idx], yn[idx+1], z) else: # Can't compute Wronskian, but need to yield something to stay # in step with the points generator. yield mpmath.nan def plots_from_generators(pointgen, valgen, title): p = point_arrays_from_generator(pointgen) v = data_arrays_from_generator(valgen) make_plot(p[0], v[0], "{}_real.png".format(title), title) for point, value, idx in izip(p[1:], v[1:], xrange(ANGULAR_POINTS)): make_plot(point, value, "{}_complex_{}.png".format(title, idx), r"{}, $\exp(2\pi\imath*{}/{})$ line".format(title, idx + 1, ANGULAR_POINTS + 1)) def point_arrays_from_generator(g): return np.split(np.array(list(g), dtype=[('z', 'c16'), ('n', 'u8')]), ANGULAR_POINTS + 1) def data_arrays_from_generator(g): return np.split(np.array(list(imap(mpc_to_np, g))), ANGULAR_POINTS + 1) def make_plot(point, value, filename, title): z = np.reshape(point['z'], (RADIAL_POINTS, MAX_ORDER + 1)) n = np.reshape(point['n'], (RADIAL_POINTS, MAX_ORDER + 1)) v = np.reshape(value, (RADIAL_POINTS, MAX_ORDER + 1)) imdata = np.ma.masked_invalid(v) cmap = plt.cm.Greys cmap.set_bad('r', 1) fig, ax = plt.subplots() im = ax.pcolormesh(np.log10(np.abs(z.transpose())), n.transpose(), imdata.transpose(), cmap=cmap, vmin=0, vmax=15) plt.colorbar(im) ax.set_xlim((INNER_RADIUS, OUTER_RADIUS)) ax.set_ylim((0, imdata.shape[1])) ax.set_xlabel(r"$\log_{10}(|z|)$") ax.set_ylabel("order") if title: ax.set_title(title) plt.savefig(filename) plt.close(fig) if __name__ == '__main__': mpmath.mp.dps = STARTING_PRECISION jn, yn = reference_data() sig_figs = reference_sig_figs(reference_points(), jn, yn) plots_from_generators(reference_points(), sig_figs, "jnyn")
{ "repo_name": "tpudlik/sbf", "path": "reference/wronskian.py", "copies": "1", "size": "3646", "license": "mit", "hash": -8244892156947000000, "line_mean": 29.3833333333, "line_max": 98, "alpha_frac": 0.607515085, "autogenerated": false, "ratio": 3.095076400679117, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9200630856618823, "avg_score": 0.0003921258120586228, "num_lines": 120 }
"""Assess transcript abundance in RNA-seq experiments using Cufflinks. http://cufflinks.cbcb.umd.edu/manual.html """ import os import shutil import tempfile import pandas as pd from bcbio import bam from bcbio.utils import get_in, file_exists, safe_makedir from bcbio.distributed.transaction import file_transaction from bcbio.log import logger from bcbio.pipeline import config_utils from bcbio.provenance import do from bcbio.rnaseq import gtf, annotate_gtf def run(align_file, ref_file, data): align_file = bam.convert_cufflinks_mapq(align_file) config = data["config"] cmd = _get_general_options(align_file, config) cmd.extend(_get_no_assembly_options(ref_file, data)) out_dir = _get_output_dir(align_file, data) tracking_file = os.path.join(out_dir, "genes.fpkm_tracking") fpkm_file = os.path.join(out_dir, data['rgnames']['sample']) + ".fpkm" tracking_file_isoform = os.path.join(out_dir, "isoforms.fpkm_tracking") fpkm_file_isoform = os.path.join(out_dir, data['rgnames']['sample']) + ".isoform.fpkm" if not file_exists(fpkm_file): with file_transaction(data, out_dir) as tmp_out_dir: safe_makedir(tmp_out_dir) cmd.extend(["--output-dir", tmp_out_dir]) cmd.extend([align_file]) cmd = list(map(str, cmd)) do.run(cmd, "Cufflinks on %s." % (align_file)) fpkm_file = gene_tracking_to_fpkm(tracking_file, fpkm_file) fpkm_file_isoform = gene_tracking_to_fpkm(tracking_file_isoform, fpkm_file_isoform) return out_dir, fpkm_file, fpkm_file_isoform def gene_tracking_to_fpkm(tracking_file, out_file): """ take a gene-level tracking file from Cufflinks and output a two column table with the first column as IDs and the second column as FPKM for the sample. combines FPKM from the same genes into one FPKM value to fix this bug: http://seqanswers.com/forums/showthread.php?t=5224&page=2 """ if file_exists(out_file): return out_file df = pd.io.parsers.read_csv(tracking_file, sep="\t", header=0) df = df[['tracking_id', 'FPKM']] df = df.groupby(['tracking_id']).sum() df.to_csv(out_file, sep="\t", header=False, index_label=False) return out_file def _get_general_options(align_file, config): options = [] cufflinks = config_utils.get_program("cufflinks", config) options.extend([cufflinks]) options.extend(["--num-threads", config["algorithm"].get("num_cores", 1)]) options.extend(["--quiet"]) options.extend(["--no-update-check"]) options.extend(["--max-bundle-frags", 2000000]) options.extend(_get_stranded_flag(config)) return options def _get_no_assembly_options(ref_file, data): options = [] options.extend(["--frag-bias-correct", ref_file]) options.extend(["--multi-read-correct"]) options.extend(["--upper-quartile-norm"]) gtf_file = data["genome_resources"]["rnaseq"].get("transcripts", "") if gtf_file: options.extend(["--GTF", gtf_file]) mask_file = data["genome_resources"]["rnaseq"].get("transcripts_mask", "") if mask_file: options.extend(["--mask-file", mask_file]) return options def _get_stranded_flag(config): strand_flag = {"unstranded": "fr-unstranded", "firststrand": "fr-firststrand", "secondstrand": "fr-secondstrand"} stranded = get_in(config, ("algorithm", "strandedness"), "unstranded").lower() assert stranded in strand_flag, ("%s is not a valid strandedness value. " "Valid values are 'firststrand', " "'secondstrand' and 'unstranded" % (stranded)) flag = strand_flag[stranded] return ["--library-type", flag] def _get_output_dir(align_file, data, sample_dir=True): config = data["config"] name = data["rgnames"]["sample"] if sample_dir else "" return os.path.join(get_in(data, ("dirs", "work")), "cufflinks", name) def assemble(bam_file, ref_file, num_cores, out_dir, data): out_dir = os.path.join(out_dir, data["rgnames"]["sample"]) safe_makedir(out_dir) out_file = os.path.join(out_dir, "cufflinks-assembly.gtf") cufflinks_out_file = os.path.join(out_dir, "transcripts.gtf") library_type = " ".join(_get_stranded_flag(data["config"])) if file_exists(out_file): return out_file bam_file = bam.convert_cufflinks_mapq(bam_file) with file_transaction(data, out_dir) as tmp_out_dir: cmd = ("cufflinks --output-dir {tmp_out_dir} --num-threads {num_cores} " "--frag-bias-correct {ref_file} " "--quiet " "{library_type} --multi-read-correct --upper-quartile-norm {bam_file}") cmd = cmd.format(**locals()) do.run(cmd, "Assembling transcripts with Cufflinks using %s." % bam_file) shutil.move(cufflinks_out_file, out_file) return out_file def clean_assembly(gtf_file, clean=None, dirty=None): """ clean the likely garbage transcripts from the GTF file including: 1. any novel single-exon transcripts 2. any features with an unknown strand """ base, ext = os.path.splitext(gtf_file) db = gtf.get_gtf_db(gtf_file, in_memory=True) clean = clean if clean else base + ".clean" + ext dirty = dirty if dirty else base + ".dirty" + ext if file_exists(clean): return clean, dirty logger.info("Cleaning features with an unknown strand from the assembly.") with open(clean, "w") as clean_handle, open(dirty, "w") as dirty_handle: for gene in db.features_of_type('gene'): for transcript in db.children(gene, level=1): if is_likely_noise(db, transcript): write_transcript(db, dirty_handle, transcript) else: write_transcript(db, clean_handle, transcript) return clean, dirty def write_transcript(db, handle, transcript): for feature in db.children(transcript): handle.write(str(feature) + "\n") def is_likely_noise(db, transcript): if is_novel_single_exon(db, transcript): return True if strand_unknown(db, transcript): return True def strand_unknown(db, transcript): """ for unstranded data with novel transcripts single exon genes will have no strand information. single exon novel genes are also a source of noise in the Cufflinks assembly so this removes them """ features = list(db.children(transcript)) strand = features[0].strand if strand == ".": return True else: return False def is_novel_single_exon(db, transcript): features = list(db.children(transcript)) exons = [x for x in features if x.featuretype == "exon"] class_code = features[0].attributes.get("class_code", None)[0] if len(exons) == 1 and class_code == "u": return True return False def fix_cufflinks_attributes(ref_gtf, merged_gtf, data, out_file=None): """ replace the cufflinks gene_id and transcript_id with the gene_id and transcript_id from ref_gtf, where available """ base, ext = os.path.splitext(merged_gtf) fixed = out_file if out_file else base + ".clean.fixed" + ext if file_exists(fixed): return fixed ref_db = gtf.get_gtf_db(ref_gtf) merged_db = gtf.get_gtf_db(merged_gtf, in_memory=True) ref_tid_to_gid = {} for gene in ref_db.features_of_type('gene'): for transcript in ref_db.children(gene, level=1): ref_tid_to_gid[transcript.id] = gene.id ctid_to_cgid = {} ctid_to_oid = {} for gene in merged_db.features_of_type('gene'): for transcript in merged_db.children(gene, level=1): ctid_to_cgid[transcript.id] = gene.id feature = list(merged_db.children(transcript))[0] oid = feature.attributes.get("oId", [None])[0] if oid: ctid_to_oid[transcript.id] = oid cgid_to_gid = {} for ctid, oid in ctid_to_oid.items(): cgid = ctid_to_cgid.get(ctid, None) oid = ctid_to_oid.get(ctid, None) gid = ref_tid_to_gid.get(oid, None) if oid else None if cgid and gid: cgid_to_gid[cgid] = gid with file_transaction(data, fixed) as tmp_fixed_file: with open(tmp_fixed_file, "w") as out_handle: for gene in merged_db.features_of_type('gene'): for transcript in merged_db.children(gene, level=1): for feature in merged_db.children(transcript): cgid = feature.attributes.get("gene_id", [None])[0] gid = cgid_to_gid.get(cgid, None) ctid = None if gid: feature.attributes["gene_id"][0] = gid ctid = feature.attributes.get("transcript_id", [None])[0] tid = ctid_to_oid.get(ctid, None) if tid: feature.attributes["transcript_id"][0] = tid if "nearest_ref" in feature.attributes: del feature.attributes["nearest_ref"] if "oId" in feature.attributes: del feature.attributes["oId"] out_handle.write(str(feature) + "\n") return fixed def merge(assembled_gtfs, ref_file, gtf_file, num_cores, data): """ run cuffmerge on a set of assembled GTF files """ assembled_file = tempfile.NamedTemporaryFile(delete=False).name with open(assembled_file, "w") as temp_handle: for assembled in assembled_gtfs: temp_handle.write(assembled + "\n") out_dir = os.path.join("assembly", "cuffmerge") merged_file = os.path.join(out_dir, "merged.gtf") out_file = os.path.join(out_dir, "assembled.gtf") if file_exists(out_file): return out_file if not file_exists(merged_file): with file_transaction(data, out_dir) as tmp_out_dir: cmd = ("cuffmerge -o {tmp_out_dir} --ref-gtf {gtf_file} " "--num-threads {num_cores} --ref-sequence {ref_file} " "{assembled_file}") cmd = cmd.format(**locals()) message = ("Merging the following transcript assemblies with " "Cuffmerge: %s" % ", ".join(assembled_gtfs)) do.run(cmd, message) clean, _ = clean_assembly(merged_file) fixed = fix_cufflinks_attributes(gtf_file, clean, data) classified = annotate_gtf.annotate_novel_coding(fixed, gtf_file, ref_file, data) filtered = annotate_gtf.cleanup_transcripts(classified, gtf_file, ref_file) shutil.move(filtered, out_file) return out_file
{ "repo_name": "vladsaveliev/bcbio-nextgen", "path": "bcbio/rnaseq/cufflinks.py", "copies": "4", "size": "10844", "license": "mit", "hash": 1915336726007167700, "line_mean": 39.920754717, "line_max": 91, "alpha_frac": 0.609738104, "autogenerated": false, "ratio": 3.4079195474544313, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.6017657651454431, "avg_score": null, "num_lines": null }
"""Assess transcript abundance in RNA-seq experiments using Cufflinks. http://cufflinks.cbcb.umd.edu/manual.html """ import os import tempfile from bcbio.utils import get_in, file_exists, safe_makedir from bcbio.distributed.transaction import file_transaction from bcbio.pipeline import config_utils from bcbio.provenance import do from bcbio.rnaseq import gtf, annotate_gtf import pandas as pd def run(align_file, ref_file, data): config = data["config"] cmd = _get_general_options(align_file, config) cmd.extend(_get_no_assembly_options(ref_file, data)) out_dir = _get_output_dir(align_file, data) tracking_file = os.path.join(out_dir, "genes.fpkm_tracking") fpkm_file = os.path.join(out_dir, data['rgnames']['sample']) + ".fpkm" tracking_file_isoform = os.path.join(out_dir, "isoforms.fpkm_tracking") fpkm_file_isoform = os.path.join(out_dir, data['rgnames']['sample']) + ".isoform.fpkm" if not file_exists(fpkm_file): with file_transaction(data, out_dir) as tmp_out_dir: cmd.extend(["--output-dir", tmp_out_dir]) cmd.extend([align_file]) cmd = map(str, cmd) do.run(cmd, "Cufflinks on %s." % (align_file)) fpkm_file = gene_tracking_to_fpkm(tracking_file, fpkm_file) fpkm_file_isoform = gene_tracking_to_fpkm(tracking_file_isoform, fpkm_file_isoform) return out_dir, fpkm_file, fpkm_file_isoform def gene_tracking_to_fpkm(tracking_file, out_file): """ take a gene-level tracking file from Cufflinks and output a two column table with the first column as IDs and the second column as FPKM for the sample. combines FPKM from the same genes into one FPKM value to fix this bug: http://seqanswers.com/forums/showthread.php?t=5224&page=2 """ if file_exists(out_file): return out_file df = pd.io.parsers.read_table(tracking_file, sep="\t", header=0) df = df[['tracking_id', 'FPKM']] df = df.groupby(['tracking_id']).sum() df.to_csv(out_file, sep="\t", header=False, index_label=False) return out_file def _get_general_options(align_file, config): options = [] cufflinks = config_utils.get_program("cufflinks", config) options.extend([cufflinks]) options.extend(["--num-threads", config["algorithm"].get("num_cores", 1)]) options.extend(["--quiet"]) options.extend(["--no-update-check"]) options.extend(["--max-bundle-frags", 2000000]) options.extend(_get_stranded_flag(config)) return options def _get_no_assembly_options(ref_file, data): options = [] options.extend(["--frag-bias-correct", ref_file]) options.extend(["--multi-read-correct"]) options.extend(["--upper-quartile-norm"]) gtf_file = data["genome_resources"]["rnaseq"].get("transcripts", "") if gtf_file: options.extend(["--GTF", gtf_file]) mask_file = data["genome_resources"]["rnaseq"].get("transcripts_mask", "") if mask_file: options.extend(["--mask-file", mask_file]) return options def _get_stranded_flag(config): strand_flag = {"unstranded": "fr-unstranded", "firststrand": "fr-firststrand", "secondstrand": "fr-secondstrand"} stranded = get_in(config, ("algorithm", "strandedness"), "unstranded").lower() assert stranded in strand_flag, ("%s is not a valid strandedness value. " "Valid values are 'firststrand', " "'secondstrand' and 'unstranded" % (stranded)) flag = strand_flag[stranded] return ["--library-type", flag] def _get_output_dir(align_file, data, sample_dir=True): config = data["config"] name = data["rgnames"]["sample"] if sample_dir else "" return os.path.join(get_in(data, ("dirs", "work")), "cufflinks", name) def assemble(bam_file, ref_file, num_cores, out_dir, data): out_dir = os.path.join(out_dir, data["rgnames"]["sample"]) safe_makedir(out_dir) out_file = os.path.join(out_dir, data["rgnames"]["sample"], "transcripts.gtf") if file_exists(out_file): return out_file with file_transaction(data, out_dir) as tmp_out_dir: cmd = ("cufflinks --output-dir {tmp_out_dir} --num-threads {num_cores} " "--frag-bias-correct {ref_file} " "--multi-read-correct --upper-quartile-norm {bam_file}") cmd = cmd.format(**locals()) do.run(cmd, "Assembling transcripts with Cufflinks using %s." % bam_file) return out_file def clean_assembly(gtf_file, clean=None, dirty=None): """ clean the likely garbage transcripts from the GTF file including: 1. any novel single-exon transcripts 2. any features with an unknown strand """ base, ext = os.path.splitext(gtf_file) db = gtf.get_gtf_db(gtf_file, in_memory=True) clean = clean if clean else base + ".clean" + ext dirty = dirty if dirty else base + ".dirty" + ext if file_exists(clean): return clean, dirty with open(clean, "w") as clean_handle, open(dirty, "w") as dirty_handle: for gene in db.features_of_type('gene'): for transcript in db.children(gene, level=1): if is_likely_noise(db, transcript): write_transcript(db, dirty_handle, transcript) else: write_transcript(db, clean_handle, transcript) return clean, dirty def write_transcript(db, handle, transcript): for feature in db.children(transcript): handle.write(str(feature) + "\n") def is_likely_noise(db, transcript): if is_novel_single_exon(db, transcript): return True if strand_unknown(db, transcript): return True def strand_unknown(db, transcript): """ for unstranded data with novel transcripts single exon genes will have no strand information. single exon novel genes are also a source of noise in the Cufflinks assembly so this removes them """ features = list(db.children(transcript)) strand = features[0].strand if strand == ".": return True else: return False def is_novel_single_exon(db, transcript): features = list(db.children(transcript)) exons = [x for x in features if x.featuretype == "exon"] class_code = features[0].attributes.get("class_code", None)[0] if len(exons) == 1 and class_code == "u": return True return False def fix_cufflinks_attributes(ref_gtf, merged_gtf, data, out_file=None): """ replace the cufflinks gene_id and transcript_id with the gene_id and transcript_id from ref_gtf, where available """ ref_db = gtf.get_gtf_db(ref_gtf, in_memory=True) merged_db = gtf.get_gtf_db(merged_gtf, in_memory=True) base, ext = os.path.splitext(merged_gtf) fixed = out_file if out_file else base + ".clean.fixed" + ext if file_exists(fixed): return fixed ref_tid_to_gid = {} for gene in ref_db.features_of_type('gene'): for transcript in ref_db.children(gene, level=1): ref_tid_to_gid[transcript.id] = gene.id ctid_to_cgid = {} ctid_to_oid = {} for gene in merged_db.features_of_type('gene'): for transcript in merged_db.children(gene, level=1): ctid_to_cgid[transcript.id] = gene.id feature = list(merged_db.children(transcript))[0] oid = feature.attributes.get("oId", [None])[0] if oid: ctid_to_oid[transcript.id] = oid cgid_to_gid = {} for ctid, oid in ctid_to_oid.items(): cgid = ctid_to_cgid.get(ctid, None) oid = ctid_to_oid.get(ctid, None) gid = ref_tid_to_gid.get(oid, None) if oid else None if cgid and gid: cgid_to_gid[cgid] = gid with file_transaction(data, fixed) as tmp_fixed_file: with open(tmp_fixed_file, "w") as out_handle: for gene in merged_db.features_of_type('gene'): for transcript in merged_db.children(gene, level=1): for feature in merged_db.children(transcript): cgid = feature.attributes.get("gene_id", [None])[0] gid = cgid_to_gid.get(cgid, None) ctid = None if gid: feature.attributes["gene_id"][0] = gid ctid = feature.attributes.get("transcript_id", [None])[0] tid = ctid_to_oid.get(ctid, None) if tid: feature.attributes["transcript_id"][0] = tid if "nearest_ref" in feature.attributes: del feature.attributes["nearest_ref"] if "oId" in feature.attributes: del feature.attributes["oId"] out_handle.write(str(feature) + "\n") return fixed def merge(assembled_gtfs, ref_file, gtf_file, num_cores, data): """ run cuffmerge on a set of assembled GTF files """ assembled_file = tempfile.NamedTemporaryFile(delete=False).name with open(assembled_file, "w") as temp_handle: for assembled in assembled_gtfs: temp_handle.write(assembled + "\n") out_dir = os.path.join("assembly", "cuffmerge") merged_file = os.path.join(out_dir, "merged.gtf") out_file = os.path.join(out_dir, "assembled.gtf") if file_exists(out_file): return out_file with file_transaction(data, out_dir) as tmp_out_dir: cmd = ("cuffmerge -o {tmp_out_dir} --ref-gtf {gtf_file} " "--num-threads {num_cores} --ref-sequence {ref_file} " "{assembled_file}") cmd = cmd.format(**locals()) do.run(cmd, "Merging transcript assemblies with reference.") clean, _ = clean_assembly(merged_file) fixed = fix_cufflinks_attributes(gtf_file, clean, data) classified = annotate_gtf.annotate_novel_coding(fixed, gtf_file, ref_file) filtered = annotate_gtf.cleanup_transcripts(classified, gtf_file, ref_file) os.rename(filtered, out_file) return out_file
{ "repo_name": "SciLifeLab/bcbio-nextgen", "path": "bcbio/rnaseq/cufflinks.py", "copies": "1", "size": "10133", "license": "mit", "hash": -382943128400980350, "line_mean": 41.3974895397, "line_max": 91, "alpha_frac": 0.6129477943, "autogenerated": false, "ratio": 3.3923669233344493, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.45053147176344494, "avg_score": null, "num_lines": null }
# asset class import os, sys, pygame from logging import debug, info, warn tile_size = 16 class Assets: def __init__(self, data_dir='data', data_file='data/assets.txt'): self.data_dir = data_dir self.data_file = data_file self._assets = {} def load_all(self): # load all assets debug('Assets.load_all started') surf_cache = {} with open(self.data_file, 'r') as index_file: for line_no, line in enumerate(index_file.readlines()): line = line.strip() debug(line) if line.startswith('#'): continue # Floor.png, tile1, 1, 4 try: fn, id, ix, iy = (s.strip() for s in line.split(',')) ix = int(ix) iy = int(iy) if fn not in surf_cache: surf_cache[fn] = pygame.image.load(os.path.join(self.data_dir, fn)) debug('loaded %s into loading cache' % fn) self._assets[id] = surf_cache[fn].subsurface(pygame.Rect(ix*tile_size, iy*tile_size, tile_size, tile_size)) debug('loaded %s as subsurface' % id) except ValueError: warn('ValueError when loading line %d in the %s' % (line_no + 1, self.data_file)) debug('Assets.load_all done') def __getitem__(self, *args, **kwargs): return self._assets.__getitem__(*args, **kwargs)
{ "repo_name": "SafPlusPlus/pyweek19", "path": "pw19/assets.py", "copies": "1", "size": "1566", "license": "apache-2.0", "hash": 3407339189766433000, "line_mean": 36.3095238095, "line_max": 127, "alpha_frac": 0.4872286079, "autogenerated": false, "ratio": 3.9645569620253163, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4951785569925316, "avg_score": null, "num_lines": null }
"""assethub URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.10/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf import settings from django.conf.urls import url, include from django.conf.urls.static import static from django.contrib import admin import django.contrib.auth.views urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^accounts/login/$', django.contrib.auth.views.login, name='login'), url(r'^accounts/logout/$', django.contrib.auth.views.logout, name='logout'), url(r'^accounts/changepassword/$', django.contrib.auth.views.password_change, name='chane_password'), url(r'^accounts/changepassword/done/$', django.contrib.auth.views.password_change_done, name='password_change_done'), url(r'^accounts/resetpassword/$', django.contrib.auth.views.password_reset, name='reset_password'), url(r'^accounts/resetpassword/done/$', django.contrib.auth.views.password_reset_done, name='password_reset_done'), #url(r'^accounts/', include('django.contrib.auth.urls')), url(r'^accounts/', include('registration.backends.hmac.urls')), url(r'^taggit_autosuggest/', include('taggit_autosuggest.urls')), url(r'^comments/', include('django_comments.urls')), url('', include('social_django.urls', namespace='social')), url(r'^', include('assets.urls')), ] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
{ "repo_name": "portnov/assethub", "path": "assethub/assethub/urls.py", "copies": "1", "size": "1918", "license": "bsd-3-clause", "hash": -3170563905660059600, "line_mean": 50.8378378378, "line_max": 121, "alpha_frac": 0.712721585, "autogenerated": false, "ratio": 3.5650557620817844, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.47777773470817847, "avg_score": null, "num_lines": null }
# Asset import : Assimp library # from pyassimp import pyassimp from ctypes import * import cPickle import logging from pogle_math import Matrix4x4, AABB, Vector, Transform from pogle_scene import SceneNode from pogle_bufferobject import BufferObject from pogle_opengl import * from pogle_stats import Stats import pyassimp __author__ = 'Clement JACOB' __copyright__ = "Copyright 2013, The Python OpenGL Engine" __license__ = "Closed Source" __version__ = "0.0.1" __email__ = "clems71@gmail.com" __status__ = "Prototype" # Not defined in base library GL_PATCHES = 0x000E class Vec2(Structure): _fields_ = [ ('x', GLfloat), ('y', GLfloat), ] _attribs_ = [ (2, GL_FLOAT, GL_FALSE), ] class Vec3(Structure): _fields_ = [ ('x', GLfloat), ('y', GLfloat), ('z', GLfloat), ] class DefaultAttribStruct(Structure): """ Describe per-vertex data (attributes in OpenGL terms) """ _fields_ = [ ('position', Vec3), ('normal', Vec3), ('tangent', Vec3), ('bitangent', Vec3), ('uv0', Vec2), ] _attribs_ = [ (3, GL_FLOAT, GL_FALSE ), (3, GL_FLOAT, GL_TRUE ), (3, GL_FLOAT, GL_TRUE ), (3, GL_FLOAT, GL_TRUE ), (2, GL_FLOAT, GL_FALSE ), ] class AttribStruct2D(Structure): """ Describe per-vertex data (attributes in OpenGL terms) """ _fields_ = [ ('position', Vec2), ('uv0', Vec2), ] _attribs_ = [ (2, GL_FLOAT, GL_FALSE), (2, GL_FLOAT, GL_FALSE), ] class GeometryNode(SceneNode): def __init__(self, geom, transform=None, material=None): super(GeometryNode, self).__init__(transform, SceneNode.NODE_HAS_GEOMETRY) self.geom = geom # Default engine material self._material = None self.material = material @property def material(self): return self._material @material.setter def material(self, val): if self._material != val: self._material = val if self.scene != None: self.scene.mark_renderlist_as_dirty() def render(self, renderer): self.geom.draw(renderer) @staticmethod def load_from_file(path): return GeometryNode(Geometry.load_from_file(path)) class VAO(object): _current = None def __init__(self): self.glid = glGenVertexArrays(1) self.bind() def bind(self): """ Bind this VAO """ if VAO._current != self: VAO._current = self glBindVertexArray(self.glid) @staticmethod def unbind(): glBindVertexArray(0) VAO._current = None class DynamicGeomRef(object): def __init__(self, other, mode): self.root = other self.drawmode = mode def draw(self, renderer): self.root.draw(renderer, self.drawmode) class DynamicGeom(object): def __init__(self, attrib_type, count, mode=GL_LINE_STRIP): """ attrib_type -- A Structure inherited type defining per vertex attribs """ self._changed = False self._count = 0 self.drawmode = mode self.vao = VAO() self.vbo = BufferObject(GL_ARRAY_BUFFER, sizeof(attrib_type) * count, GL_DYNAMIC_DRAW) self._client_mem_object = (attrib_type * count)() attrib_type_size = sizeof(attrib_type) attrib_id = 0 offset = 0 for details in attrib_type._attribs_: glEnableVertexAttribArray(attrib_id) attrib_size = details[0] attrib_gltype = details[1] attrib_normalized = details[2] glVertexAttribPointer(attrib_id, attrib_size, attrib_gltype, attrib_normalized, attrib_type_size, offset) attrib_id += 1 print('') print('>>>> WARNING : Get real size instead of assuming 32 bits') print('') offset += 4 * attrib_size def append(self, data): self._client_mem_object[self._count] = data self._count += 1 self._changed = True def __getitem__(self, idx): return self._client_mem_object[idx] def __setitem__(self, idx, val): self._client_mem_object[idx] = val @property def count(self): return self._count @count.setter def count(self, val): self._count = val self._changed = True def clear(self): self._count = 0 def draw(self, renderer, mode=None): self.vao.bind() if self._changed: self.vbo.fill(self._client_mem_object) self._changed = False mode = self.drawmode if mode is None else mode glDrawArrays(mode, 0, self._count) Stats.drawcalls += 1 class Geometry(object): """ Raw geometry, with no transform applied on it """ def __init__(self, attribs, indices, aabb=None): attrib_type = type(attribs[0]) attrib_type_size = sizeof(attrib_type) self.aabb = aabb self.idx_count = len(indices) self.tri_count = self.idx_count / 3 # Create a container for all Buffer Objects self.vao = VAO() # Mesh Buffer Objects self.indices_vbo = BufferObject(GL_ELEMENT_ARRAY_BUFFER, indices, GL_STATIC_DRAW) self.vbo = BufferObject(GL_ARRAY_BUFFER, attribs, GL_STATIC_DRAW) attrib_id = 0 for attrib, details in zip(attrib_type._fields_, attrib_type._attribs_): glEnableVertexAttribArray(attrib_id) attrib_name = attrib[0] attrib_size = details[0] attrib_gltype = details[1] attrib_normalized = details[2] glVertexAttribPointer(attrib_id, attrib_size, attrib_gltype, attrib_normalized, attrib_type_size, ctypes.c_void_p(getattr(attrib_type, attrib_name).offset)) attrib_id += 1 # VAO.unbind() def draw(self, renderer): self.vao.bind() # If tessellation is enabled, it has to be rendered as patch if renderer.current_material._shader.has_tessellation: glDrawElements(GL_PATCHES, self.idx_count, GL_UNSIGNED_INT, None) # Else, as simple triangles else: glDrawElements(GL_TRIANGLES, self.idx_count, GL_UNSIGNED_INT, None) Stats.drawcalls += 1 @staticmethod def load_from_file(path): path_cache = path + '.geomcache' try: fcache = open(path_cache, 'rb') attribs_bytes, indices_bytes, aabb_min, aabb_max, numverts, numtris = cPickle.load(fcache) attribs = (DefaultAttribStruct * numverts).from_buffer(attribs_bytes) indices = (GLuint * numtris).from_buffer(indices_bytes) aabb_min = Vector(*aabb_min) aabb_max = Vector(*aabb_max) return Geometry(attribs, indices, AABB(aabb_min, aabb_max)) except : logging.warn('Failed to load geometry ' + path + ' from cache') scene = pyassimp.load(path, pyassimp.postprocess.aiProcessPreset_TargetRealtime_Quality) mesh = scene.meshes[0] if len(mesh.vertices) == 0: return None attribs = (DefaultAttribStruct * len(mesh.vertices))() indices = (GLuint * (len(mesh.faces) * 3))() # UV layers uv0 = (0.0, 0.0) uvlayers = 0 if mesh.numuvcomponents[0] >= 2: uvlayers += 1 v0 = mesh.vertices[0] aabb_min = Vector(v0[0], v0[1], v0[2]) aabb_max = Vector(v0[0], v0[1], v0[2]) # Fill attributes for idx in range(len(mesh.vertices)): vertex = mesh.vertices[idx] normal = mesh.normals[idx] tangent = mesh.tangents[idx] bitangent = mesh.bitangents[idx] if uvlayers != 0: uv0 = mesh.texturecoords[0][idx] # Build the AABB if vertex[0] < aabb_min.x: aabb_min.x = vertex[0] if vertex[1] < aabb_min.y: aabb_min.y = vertex[1] if vertex[2] < aabb_min.z: aabb_min.z = vertex[2] if vertex[0] > aabb_max.x: aabb_max.x = vertex[0] if vertex[1] > aabb_max.y: aabb_max.y = vertex[1] if vertex[2] > aabb_max.z: aabb_max.z = vertex[2] attribs[idx].position.x = vertex[0] attribs[idx].position.y = vertex[1] attribs[idx].position.z = vertex[2] attribs[idx].normal.x = normal[0] attribs[idx].normal.y = normal[1] attribs[idx].normal.z = normal[2] attribs[idx].tangent.x = tangent[0] attribs[idx].tangent.y = tangent[1] attribs[idx].tangent.z = tangent[2] attribs[idx].bitangent.x = bitangent[0] attribs[idx].bitangent.y = bitangent[1] attribs[idx].bitangent.z = bitangent[2] attribs[idx].uv0.x = uv0[0] attribs[idx].uv0.y = uv0[1] # Create indices array for idx, f in enumerate(mesh.faces): assert len(f) == 3 indices[idx * 3 + 0] = f[0] indices[idx * 3 + 1] = f[1] indices[idx * 3 + 2] = f[2] with open(path_cache, 'wb') as fcache: geom_cache = (bytearray(attribs), bytearray(indices), aabb_min.vals, aabb_max.vals, len(mesh.vertices), len(mesh.faces)*3, ) cPickle.dump(geom_cache, fcache, -1) return Geometry(attribs, indices, AABB(aabb_min, aabb_max)) class FullScreenQuad(Geometry): def __init__(self): attribs = (AttribStruct2D * 4)() attribs[0].position = Vec2(x=-1, y=-1) attribs[1].position = Vec2(x=1, y=-1) attribs[2].position = Vec2(x=1, y=1) attribs[3].position = Vec2(x=-1, y=1) attribs[0].uv0 = Vec2(x=0, y=0) attribs[1].uv0 = Vec2(x=1, y=0) attribs[2].uv0 = Vec2(x=1, y=1) attribs[3].uv0 = Vec2(x=0, y=1) indices = (GLuint * 6)(0, 1, 3, 3, 1, 2) super(FullScreenQuad, self).__init__(attribs, indices)
{ "repo_name": "clems71/pogle", "path": "pogle/pogle_mesh.py", "copies": "1", "size": "10505", "license": "mit", "hash": 4080257619794925000, "line_mean": 27.7592067989, "line_max": 136, "alpha_frac": 0.541170871, "autogenerated": false, "ratio": 3.5828785811732606, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9595130409042789, "avg_score": 0.005783808626094437, "num_lines": 353 }
"""Asset Item Dialog""" import logging import sys from PyQt5 import QtWidgets import ui.functions from config import configurations from database.db import Database from helpers.functions import ham from ui.window.ui_asset_item import ( Ui_AssetItemDialog, ) logger = logging.getLogger(__name__) class AssetItemDialog(QtWidgets.QDialog, Ui_AssetItemDialog): def __init__(self, parent=None): super(AssetItemDialog, self).__init__(parent) self.setupUi(self) self.db = Database(use_default_db=True) ui.functions.set_window_icon(self) self._setup_ui_buttons() self._setup_asset_combobox() self._setup_format_combobox() def _setup_asset_combobox(self): """Setup Asset combobox""" asset_categories = configurations.get_setting('Assets', 'CategoryList') for asset_category in asset_categories: self.assetComboBox.addItem(asset_category) def _setup_format_combobox(self): """Setup Asset Format combobox""" asset_categories = configurations.get_setting('Assets', 'CategoryList') for asset_category in asset_categories: self.formatComboBox.addItem(asset_category) def _setup_ui_buttons(self): self.btnCreate.setDisabled(True) self.btnCreate.clicked.connect(ham) self.btnCancel.clicked.connect(self.close) def show_dialog(): dialog = AssetItemDialog() if dialog.exec_(): logger.debug('Creating new asset item...') else: logger.debug('Aborting Create New Asset Item...') if __name__ == '__main__': app = QtWidgets.QApplication(sys.argv) window = AssetItemDialog() window.show() sys.exit(app.exec_())
{ "repo_name": "hueyyeng/AssetsBrowser", "path": "ui/dialog/asset_item.py", "copies": "1", "size": "1716", "license": "mit", "hash": 8341241874736016000, "line_mean": 28.5862068966, "line_max": 79, "alpha_frac": 0.668997669, "autogenerated": false, "ratio": 3.926773455377574, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5095771124377574, "avg_score": null, "num_lines": null }
"""Asset Item Format Dialog""" import logging import sys from PyQt5 import QtWidgets import ui.functions from database.db import Database from helpers.functions import ham from ui.window.ui_asset_item_format import ( Ui_AssetItemFormatDialog, ) logger = logging.getLogger(__name__) class AssetItemFormatDialog(QtWidgets.QDialog, Ui_AssetItemFormatDialog): def __init__(self, parent=None): super(AssetItemFormatDialog, self).__init__(parent) self.setupUi(self) self.db = Database(use_default_db=True) ui.functions.set_window_icon(self) self._setup_ui_buttons() def _setup_ui_buttons(self): self.btnPushAdd.clicked.connect(ham) self.btnPushRemove.clicked.connect(ham) self.btnPushClear.clicked.connect(ham) def show_dialog(): dialog = AssetItemFormatDialog() if not dialog.exec_(): logger.debug('Aborting Manage Asset Item Format...') if __name__ == '__main__': app = QtWidgets.QApplication(sys.argv) window = AssetItemFormatDialog() window.show() sys.exit(app.exec_())
{ "repo_name": "hueyyeng/AssetsBrowser", "path": "ui/dialog/asset_item_format.py", "copies": "1", "size": "1086", "license": "mit", "hash": -2140980871963230200, "line_mean": 25.487804878, "line_max": 73, "alpha_frac": 0.6887661142, "autogenerated": false, "ratio": 3.719178082191781, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4907944196391781, "avg_score": null, "num_lines": null }
#Asset LOAN_PORTFOLIO_CONTROL_ACCOUNT = "14100000" LOAN_FUND_SOURCE = "11220000" LOANS_RECOVERED = "14400000" LOAN_ALLOWANCE_FOR_LOSS_ACCOUNT = "14300000" #Income Receivable LOAN_INCOME_RECEIVABLE_ACCOUNT = "15100000" INTEREST_INCOME_RECEIVABLE_ACCOUNT = "15110000" FEE_INCOME_RECEIVABLE_ACCOUNT = "15120000" PENALTY_INCOME_RECEIVABLE_ACCOUNT = "15130000" #Income INTEREST_INCOME_ACCOUNT = "41100000" FEE_INCOME_ACCOUNT = "41200000" PENALTY_INCOME_ACCOUNT = "41300000" #Expense LOAN_WRITE_OFF_EXPENSE_ACCOUNT = "51200000" LOAN_CHART_OF_ACCOUNTS = ( (LOAN_PORTFOLIO_CONTROL_ACCOUNT, "Portfolio Control"), (LOAN_FUND_SOURCE, "Fund Source"), (LOANS_RECOVERED, "Loans Recovered"), (INTEREST_INCOME_ACCOUNT, "Interest Income Account"), (FEE_INCOME_ACCOUNT, "Fee Income Account"), (PENALTY_INCOME_ACCOUNT, "Penalties Income Account"), (LOAN_ALLOWANCE_FOR_LOSS_ACCOUNT, "Allowance for Loan Loss"), (INTEREST_INCOME_RECEIVABLE_ACCOUNT, "Interest Receivable Account"), (FEE_INCOME_RECEIVABLE_ACCOUNT, "Fee Receivable Account"), (PENALTY_INCOME_RECEIVABLE_ACCOUNT, "Penalty Receivable Account"), (LOAN_WRITE_OFF_EXPENSE_ACCOUNT, "Write-off Account"), ) #Transactions #Withdrawal TRANSACTION_TYPE_LOAN_DISBURSAL = 1 #Income TRANSACTION_TYPE_INTEREST_APPLY = 2 TRANSACTION_TYPE_FEE_APPLY = 3 TRANSACTION_TYPE_PENALTY_APPLY = 4 #Deposit/Repayments TRANSACTION_TYPE_PRINCIPAL_POSTING = 5 TRANSACTION_TYPE_INTEREST_POSTING = 6 TRANSACTION_TYPE_FEE_POSTING = 7 TRANSACTION_TYPE_PENALTY_POSTING = 8 #Expenses TRANSACTION_TYPE_PRINCIPAL_WRITE_OFF = 9 TRANSACTION_TYPE_INTEREST_WRITE_OFF = 10 TRANSACTION_TYPE_FEE_WRITE_OFF = 11 TRANSACTION_TYPE_PENALTY_WRITE_OFF = 12
{ "repo_name": "AjabWorld/ajabsacco", "path": "ajabsacco/core/codes/loans.py", "copies": "1", "size": "1701", "license": "apache-2.0", "hash": 7722886778613149000, "line_mean": 31.7115384615, "line_max": 72, "alpha_frac": 0.7589653145, "autogenerated": false, "ratio": 2.62095531587057, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.8694991159256888, "avg_score": 0.03698589422273633, "num_lines": 52 }
ASSET_PATH = "__assets" SEARCH_INDEX_PATH = "__bock_search_index" # Give the "/" of the article root a special name ROOT_NAME = "ROOT" MARKDOWN_FILE_EXTENSION = "md" ABBREVIATED_SHA_SIZE = 8 MAX_LENGTH_OF_LATEST_ARTICLES = 10 MAX_DEPTH_OF_FOLDERS = 3 WORDS_IN_EXCERPT = 64 # Search MAX_SEARCH_RESULTS = 100 MAX_CHARS_IN_SEARCH_RESULTS = 500 MAX_CHARS_SURROUNDING_SEARCH_HIGHLIGHT = 150 MIN_CHARS_IN_SEARCH_TERM = 3 # Server. If not set, (2n + 1) workers are used per the Gunicorn docs MAX_NUMBER_OF_WORKERS = 2 DEFAULT_PORT = 8000 # https://github.com/github/gitignore/blob/master/Global/macOS.gitignore PATHS_TO_REMOVE_MACOS = [ ".apdisk", ".AppleDB", ".AppleDesktop", ".AppleDouble", ".com.apple.timemachine.donotpresent", ".DS_Store", ".fseventsd", ".LSOverride", ".Spotlight-V100", ".TemporaryItems", ".Trashes", ".VolumeIcon.icns", "Icon" ".DocumentRevisions-V100", "Network Trash Folder", "Temporary Items", ] # https://github.com/github/gitignore/blob/master/Global/Windows.gitignore PATHS_TO_REMOVE_WINDOWS = [ "Thumbs.db", "Thumbs.db:encryptable", "ehthumbs.db", "ehthumbs_vista.db", "Desktop.ini", "desktop.ini", ] PATHS_TO_REMOVE = ( PATHS_TO_REMOVE_MACOS # I fucking love Python for this... + PATHS_TO_REMOVE_WINDOWS + [ # This is our stuff ".git", ".gitignore", "node_modules", ASSET_PATH, SEARCH_INDEX_PATH, ] ) # Extensions used to render our articles # TODO: Revisit these... MARKDOWN_EXTENSIONS = [ "markdown.extensions.admonition", "markdown.extensions.extra", "markdown.extensions.meta", "markdown.extensions.sane_lists", "markdown.extensions.smarty", "markdown.extensions.toc", "markdown.extensions.wikilinks", "pymdownx.arithmatex", "pymdownx.caret", "pymdownx.critic", "pymdownx.emoji", "pymdownx.extra", "pymdownx.highlight", "pymdownx.inlinehilite", "pymdownx.keys", "pymdownx.magiclink", "pymdownx.mark", "pymdownx.smartsymbols", "pymdownx.tasklist", ] MARKDOWN_EXTENSION_CONFIG = { "pymdownx.highlight": { "css_class": "code-highlight", }, } LOGGING_FORMAT = "%(asctime)s - %(levelname)s - %(message)s" # These need to be unique. Could use @unique Enum's but no need to be that # fancy. Underscores are a sufficient 'namespace' delineator. EXIT_CODE_ARTICLE_ROOT_NOT_FOUND = 3 EXIT_CODE_NOT_A_GIT_REPOSITORY = 2 EXIT_CODE_NOT_AN_ABSOLUTE_PATH = 4 EXIT_CODE_OTHER = 6 # `None` works too. In that case, the `Authorization` header is checked for the # refresh key <3 VALID_REFRESH_ORIGINS = [ "github", ]
{ "repo_name": "afreeorange/bock", "path": "bock/constants.py", "copies": "1", "size": "2689", "license": "mit", "hash": 4135760028740059000, "line_mean": 23.4454545455, "line_max": 79, "alpha_frac": 0.6578653775, "autogenerated": false, "ratio": 2.935589519650655, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4093454897150655, "avg_score": null, "num_lines": null }
"""Assets Browser Custom Widgets""" import logging import platform from pathlib import Path from PyQt5 import QtCore, QtGui, QtWidgets from config.configurations import get_setting from config.constants import IMAGE_FORMAT from helpers.enums import FileManager from helpers.utils import get_file_size, open_file, reveal_in_os from ui.enums import IconRadio, PreviewRadio logger = logging.getLogger(__name__) ICON_THUMBNAILS_MODE = IconRadio(get_setting('Advanced', 'IconThumbnails')) ICON_THUMBNAILS_SIZE = QtCore.QSize(32, 32) MAX_SIZE = PreviewRadio(get_setting('Advanced', 'Preview')).size() class ColumnViewFileIcon(QtWidgets.QFileIconProvider): def icon(self, file_info: QtCore.QFileInfo): if ICON_THUMBNAILS_MODE.value == -3: return QtGui.QIcon() path = file_info.filePath() icon = super().icon(file_info) if path.lower().endswith(IMAGE_FORMAT): file_icon = QtGui.QPixmap(ICON_THUMBNAILS_SIZE) file_icon.load(path) icon = QtGui.QIcon(file_icon) return icon class ColumnViewWidget(QtWidgets.QColumnView): def __init__(self, category, project): super().__init__() default_path = Path(get_setting('Settings', 'ProjectPath')) / project / "Assets" / category logger.debug("Load... %s", default_path) self.setAlternatingRowColors(False) self.setResizeGripsVisible(True) self.setColumnWidths([200] * 9) # Column width multiply by the amount of columns self.setEnabled(True) self.fsm = QtWidgets.QFileSystemModel() self.fsm.setReadOnly(False) self.fsm.setIconProvider(ColumnViewFileIcon()) self.setModel(self.fsm) self.setRootIndex(self.fsm.setRootPath(str(default_path))) self.clicked.connect(self.get_file_info) self.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) self.customContextMenuRequested.connect(self.context_menu) # File Category Labels self.preview_category_name = QtWidgets.QLabel('Name:') self.preview_category_size = QtWidgets.QLabel('Size:') self.preview_category_type = QtWidgets.QLabel('Type:') self.preview_category_date = QtWidgets.QLabel('Modified:') # Align Right for Prefix Labels align_right = QtCore.Qt.AlignRight | QtCore.Qt.AlignVCenter self.preview_category_name.setAlignment(align_right) self.preview_category_size.setAlignment(align_right) self.preview_category_type.setAlignment(align_right) self.preview_category_date.setAlignment(align_right) # File Attributes Labels self.preview_file_name = QtWidgets.QLabel() self.preview_file_size = QtWidgets.QLabel() self.preview_file_type = QtWidgets.QLabel() self.preview_file_date = QtWidgets.QLabel() # File Attributes Layout and Value for Preview Pane self.sublayout_text = QtWidgets.QGridLayout() self.sublayout_text.addWidget(self.preview_category_name, 0, 0) self.sublayout_text.addWidget(self.preview_category_size, 1, 0) self.sublayout_text.addWidget(self.preview_category_type, 2, 0) self.sublayout_text.addWidget(self.preview_category_date, 3, 0) self.sublayout_text.addWidget(self.preview_file_name, 0, 1) self.sublayout_text.addWidget(self.preview_file_size, 1, 1) self.sublayout_text.addWidget(self.preview_file_type, 2, 1) self.sublayout_text.addWidget(self.preview_file_date, 3, 1) self.sublayout_text.setRowStretch(4, 1) # Arrange layout to upper part of widget # Preview Thumbnails self.preview = QtWidgets.QLabel() self.sublayout_thumbnail = QtWidgets.QVBoxLayout() self.sublayout_thumbnail.addWidget(self.preview) self.sublayout_thumbnail.setAlignment(QtCore.Qt.AlignCenter) # Set Preview Pane for QColumnView self.preview_widget = QtWidgets.QWidget() self.preview_pane = QtWidgets.QVBoxLayout(self.preview_widget) self.preview_pane.addLayout(self.sublayout_thumbnail) self.preview_pane.addLayout(self.sublayout_text) self.setPreviewWidget(self.preview_widget) # Custom context menu handling for directory or file def context_menu(self, pos): """Custom context menu. Display different set of menu actions if directory or file. Parameters ---------- pos : QtCore.QPoint """ menu = QtWidgets.QMenu() idx = self.indexAt(pos) is_selection = idx.isValid() # Only show context menu if the cursor position is over a valid item if is_selection: selected_item = self.fsm.index(idx.row(), 0, idx.parent()) file_name = str(self.fsm.fileName(selected_item)) file_name = file_name[:50] + '...' if len(file_name) > 50 else file_name file_path = str(self.fsm.filePath(selected_item)) is_dir = self.fsm.isDir(selected_item) if not is_dir: open_action = menu.addAction('Open ' + file_name) open_action.triggered.connect(lambda: open_file(file_path)) reveal_action = menu.addAction( 'Reveal in ' + getattr(FileManager, platform.system().upper()).value ) reveal_action.triggered.connect(lambda: reveal_in_os(file_path)) menu.exec_(self.mapToGlobal(pos)) self.clearSelection() # Return selected item attributes in Model View for Preview Pane def get_file_info(self, idx): """Get file info. Retrieve file information for display in Preview tab. Parameters ---------- idx : QtCore.QModelIndex QModelIndex using decorator method. Returns ------- str File path. """ selected_item = self.fsm.index(idx.row(), 0, idx.parent()) # Retrieve File Attributes file_name = self.fsm.fileName(selected_item) file_size = self.fsm.size(selected_item) file_type = self.fsm.type(selected_item).split(' ')[0] file_date = self.fsm.lastModified(selected_item) file_path = self.fsm.filePath(selected_item) # Assign the File Attributes' string into respective labels self.preview_file_name.setText(file_name) self.preview_file_size.setText(get_file_size(file_size)) self.preview_file_type.setText(file_type.upper() + ' file') self.preview_file_date.setText(file_date.toString('yyyy/MM/dd h:m AP')) # Retrieve image path for Thumbnail Preview image_path = self.fsm.filePath(selected_item) # Generate thumbnails for Preview Pane if image_path.lower().endswith(IMAGE_FORMAT): image = QtGui.QImageReader() image.setFileName(image_path) scaled_size = image.size() scaled_size.scale(MAX_SIZE, MAX_SIZE, QtCore.Qt.KeepAspectRatio) image.setScaledSize(scaled_size) thumbnail = QtGui.QPixmap.fromImage(image.read()) else: file_info = QtCore.QFileInfo(image_path) # Retrieve info like icons, path, etc file_icon = QtWidgets.QFileIconProvider().icon(file_info) thumbnail = file_icon.pixmap(48, 48, QtGui.QIcon.Normal, QtGui.QIcon.Off) self.preview.setPixmap(thumbnail) return file_path
{ "repo_name": "hueyyeng/AssetsBrowser", "path": "helpers/widgets.py", "copies": "1", "size": "7421", "license": "mit", "hash": -1376616365287477200, "line_mean": 40, "line_max": 99, "alpha_frac": 0.6534159817, "autogenerated": false, "ratio": 3.8391101914123125, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4992526173112313, "avg_score": null, "num_lines": null }
"""Assets can be filtered through one or multiple filters, modifying their contents (think minification, compression). """ from __future__ import with_statement import os import subprocess import inspect import shlex import tempfile from .. import six from ..six.moves import map from ..six.moves import zip try: frozenset except NameError: from sets import ImmutableSet as frozenset from ..exceptions import FilterError from ..importlib import import_module from ..utils import hash_func __all__ = ('Filter', 'CallableFilter', 'get_filter', 'register_filter', 'ExternalTool', 'JavaTool') def freezedicts(obj): """Recursively iterate over ``obj``, supporting dicts, tuples and lists, and freeze ``dicts`` such that ``obj`` can be used with hash(). """ if isinstance(obj, (list, tuple)): return type(obj)([freezedicts(sub) for sub in obj]) if isinstance(obj, dict): return frozenset(six.iteritems(obj)) return obj def smartsplit(string, sep): """Split while allowing escaping. So far, this seems to do what I expect - split at the separator, allow escaping via \, and allow the backslash itself to be escaped. One problem is that it can raise a ValueError when given a backslash without a character to escape. I'd really like a smart splitter without manually scan the string. But maybe that is exactly what should be done. """ assert string is not None # or shlex will read from stdin if not six.PY3: # On 2.6, shlex fails miserably with unicode input is_unicode = isinstance(string, unicode) if is_unicode: string = string.encode('utf8') l = shlex.shlex(string, posix=True) l.whitespace += ',' l.whitespace_split = True l.quotes = '' if not six.PY3 and is_unicode: return map(lambda s: s.decode('utf8'), list(l)) else: return list(l) class option(tuple): """Micro option system. I want this to remain small and simple, which is why this class is lower-case. See ``parse_options()`` and ``Filter.options``. """ def __new__(cls, initarg, configvar=None, type=None): # If only one argument given, it is the configvar if configvar is None: configvar = initarg initarg = None return tuple.__new__(cls, (initarg, configvar, type)) def parse_options(options): """Parses the filter ``options`` dict attribute. The result is a dict of ``option`` tuples. """ # Normalize different ways to specify the dict items: # attribute: option() # attribute: ('__init__ arg', 'config variable') # attribute: ('config variable,') # attribute: 'config variable' result = {} for internal, external in options.items(): if not isinstance(external, option): if not isinstance(external, (list, tuple)): external = (external,) external = option(*external) result[internal] = external return result class Filter(object): """Base class for a filter. Subclasses should allow the creation of an instance without any arguments, i.e. no required arguments for __init__(), so that the filter can be specified by name only. In fact, the taking of arguments will normally be the exception. """ # Name by which this filter can be referred to. name = None # Options the filter supports. The base class will ensure that # these are both accepted by __init__ as kwargs, and may also be # defined in the environment config, or the OS environment (i.e. # a setup() implementation will be generated which uses # get_config() calls). # # Can look like this: # options = { # 'binary': 'COMPASS_BINARY', # 'plugins': option('COMPASS_PLUGINS', type=list), # } options = {} # The maximum debug level under which this filter should run. # Most filters only run in production mode (debug=False), so this is the # default value. However, a filter like ``cssrewrite`` needs to run in # ``merge`` mode. Further, compiler-type filters (like less/sass) would # say ``None``, indicating that they have to run **always**. # There is an interesting and convenient twist here: If you use such a # filter, the bundle will automatically be merged, even in debug mode. # It couldn't work any other way of course, the output needs to be written # somewhere. If you have other files that do not need compiling, and you # don't want them pulled into the merge, you can use a nested bundle with # it's own output target just for those files that need the compilation. max_debug_level = False def __init__(self, **kwargs): self.ctx = None self._options = parse_options(self.__class__.options) # Resolve options given directly to the filter. This # allows creating filter instances with options that # deviate from the global default. # TODO: can the metaclass generate a init signature? for attribute, (initarg, _, _) in self._options.items(): arg = initarg if initarg is not None else attribute if arg in kwargs: setattr(self, attribute, kwargs.pop(arg)) else: setattr(self, attribute, None) if kwargs: raise TypeError('got an unexpected keyword argument: %s' % list(kwargs.keys())[0]) def __eq__(self, other): if isinstance(other, Filter): return self.id() == other.id() return NotImplemented def set_context(self, ctx): """This is called before the filter is used.""" self.ctx = ctx def get_config(self, setting=False, env=None, require=True, what='dependency', type=None): """Helper function that subclasses can use if they have dependencies which they cannot automatically resolve, like an external binary. Using this function will give the user the ability to resolve these dependencies in a common way through either a Django setting, or an environment variable. You may specify different names for ``setting`` and ``env``. If only the former is given, the latter is considered to use the same name. If either argument is ``False``, the respective source is not used. By default, if the value is not found, an error is raised. If ``required`` is ``False``, then ``None`` is returned instead. ``what`` is a string that is used in the exception message; you can use it to give the user an idea what he is lacking, i.e. 'xyz filter binary'. Specifying values via the OS environment is obviously limited. If you are expecting a special type, you may set the ``type`` argument and a value from the OS environment will be parsed into that type. Currently only ``list`` is supported. """ assert type in (None, list), "%s not supported for type" % type if env is None: env = setting assert setting or env value = None if not setting is False: value = self.ctx.get(setting, None) if value is None and not env is False: value = os.environ.get(env) if value is not None: if not six.PY3: # TODO: What charset should we use? What does Python 3 use? value = value.decode('utf8') if type == list: value = smartsplit(value, ',') if value is None and require: err_msg = '%s was not found. Define a ' % what options = [] if setting: options.append('%s setting' % setting) if env: options.append('%s environment variable' % env) err_msg += ' or '.join(options) raise EnvironmentError(err_msg) return value def unique(self): """This function is used to determine if two filter instances represent the same filter and can be merged. Only one of the filters will be applied. If your filter takes options, you might want to override this and return a hashable object containing all the data unique to your current instance. This will allow your filter to be applied multiple times with differing values for those options. """ return False def id(self): """Unique identifier for the filter instance. Among other things, this is used as part of the caching key. It should therefore not depend on instance data, but yield the same result across multiple python invocations. """ # freezedicts() allows filters to return dict objects as part # of unique(), which are not per-se supported by hash(). return hash_func((self.name, freezedicts(self.unique()),)) def setup(self): """Overwrite this to have the filter do initial setup work, like determining whether required modules are available etc. Since this will only be called when the user actually attempts to use the filter, you can raise an error here if dependencies are not matched. Note: In most cases, it should be enough to simply define the ``options`` attribute. If you override this method and want to use options as well, don't forget to call super(). Note: This may be called multiple times if one filter instance is used with different asset environment instances. """ for attribute, (_, configvar, type) in self._options.items(): if not configvar: continue if getattr(self, attribute) is None: # No value specified for this filter instance , # specifically attempt to load it from the environment. setattr(self, attribute, self.get_config(setting=configvar, require=False, type=type)) def input(self, _in, out, **kw): """Implement your actual filter here. This will be called for every source file. """ def output(self, _in, out, **kw): """Implement your actual filter here. This will be called for every output file. """ def open(self, out, source_path, **kw): """Implement your actual filter here. This is like input(), but only one filter may provide this. Use this if your filter needs to read from the source file directly, and would ignore any processing by earlier filters. """ def concat(self, out, hunks, **kw): """Implement your actual filter here. Will be called once between the input() and output() steps, and should concat all the source files (given as hunks) together, writing the result to the ``out`` stream. Only one such filter is allowed. """ def get_additional_cache_keys(self, **kw): """Additional cache keys dependent on keyword arguments. If your filter's output is dependent on some or all of the keyword arguments, you can return these arguments here as a list. This will make sure the caching behavior is correct. For example, the CSSRewrite filter depends not only on the contents of the file it applies to, but also the output path of the final file. If the CSSRewrite filter doesn't correctly override this method, a certain output file with a certain base directory might potentially get a CSSRewriten file from cache that is meant for an output file in a different base directory. """ return [] # We just declared those for demonstration purposes del input del output del open del concat class CallableFilter(Filter): """Helper class that create a simple filter wrapping around callable. """ def __init__(self, callable): super(CallableFilter, self).__init__() self.callable = callable def unique(self): # XXX This means the cache will never work for those filters. # This is actually a deeper problem: Originally unique() was # used to remove duplicate filters. Now it is also for the cache # key. The latter would benefit from ALL the filter's options being # included. Possibly this might just be what we should do, at the # expense of the "remove duplicates" functionality (because it # is never really needed anyway). It's also illdefined when a filter # should be a removable duplicate - most options probably SHOULD make # a filter no longer being considered duplicate. return self.callable def output(self, _in, out, **kw): return self.callable(_in, out) class ExternalToolMetaclass(type): def __new__(cls, name, bases, attrs): # First, determine the method defined for this very class. We # need to pop the ``method`` attribute from ``attrs``, so that we # create the class without the argument; allowing us then to look # at a ``method`` attribute that parents may have defined. # # method defaults to 'output' if argv is set, to "implement # no default method" without an argv. if not 'method' in attrs and 'argv' in attrs: chosen = 'output' else: chosen = attrs.pop('method', False) # Create the class first, since this helps us look at any # method attributes defined in the parent hierarchy. klass = type.__new__(cls, name, bases, attrs) parent_method = getattr(klass, 'method', None) # Assign the method argument that we initially popped again. klass.method = chosen try: # Don't do anything for this class itself ExternalTool except NameError: return klass # If the class already has a method attribute, this indicates # that a parent class already dealt with it and enabled/disabled # the methods, and we won't again. if parent_method is not None: return klass methods = ('output', 'input', 'open') if chosen is not None: assert not chosen or chosen in methods, \ '%s not a supported filter method' % chosen # Disable those methods not chosen. for m in methods: if m != chosen: # setdefault = Don't override actual methods the # class has in fact provided itself. if not m in klass.__dict__: setattr(klass, m, None) return klass class ExternalTool(six.with_metaclass(ExternalToolMetaclass, Filter)): """Subclass that helps creating filters that need to run an external program. You are encouraged to use this when possible, as it helps consistency. In the simplest possible case, subclasses only have to define one or more of the following attributes, without needing to write any code: ``argv`` The command line that will be passed to subprocess.Popen. New-style format strings can be used to access all kinds of data: The arguments to the filter method, as well as the filter instance via ``self``: argv = ['{self.binary}', '--input', '{source_path}', '--cwd', '{self.env.directory}'] ``method`` The filter method to implement. One of ``input``, ``output`` or ``open``. """ argv = [] method = None def open(self, out, source_path, **kw): self._evaluate([out, source_path], kw, out) def input(self, _in, out, **kw): self._evaluate([_in, out], kw, out, _in) def output(self, _in, out, **kw): self._evaluate([_in, out], kw, out, _in) def _evaluate(self, args, kwargs, out, data=None): # For now, still support Python 2.5, but the format strings in argv # are not supported (making the feature mostly useless). For this # reason none of the builtin filters is using argv currently. if hasattr(str, 'format'): # Add 'self' to the keywords available in format strings kwargs = kwargs.copy() kwargs.update({'self': self}) # Resolve all the format strings in argv def replace(arg): try: return arg.format(*args, **kwargs) except KeyError as e: # Treat "output" and "input" variables special, they # are dealt with in :meth:`subprocess` instead. if e.args[0] not in ('input', 'output'): raise return arg argv = list(map(replace, self.argv)) else: argv = self.argv self.subprocess(argv, out, data=data) @classmethod def subprocess(cls, argv, out, data=None): """Execute the commandline given by the list in ``argv``. If a byestring is given via ``data``, it is piped into data. ``argv`` may contain two placeholders: ``{input}`` If given, ``data`` will be written to a temporary file instead of data. The placeholder is then replaced with that file. ``{output}`` Will be replaced by a temporary filename. The return value then will be the content of this file, rather than stdout. """ class tempfile_on_demand(object): def __repr__(self): if not hasattr(self, 'filename'): fd, self.filename = tempfile.mkstemp() os.close(fd) return self.filename @property def created(self): return hasattr(self, 'filename') # Replace input and output placeholders input_file = tempfile_on_demand() output_file = tempfile_on_demand() if hasattr(str, 'format'): # Support Python 2.5 without the feature argv = list(map(lambda item: item.format(input=input_file, output=output_file), argv)) try: data = (data.read() if hasattr(data, 'read') else data) if data is not None: data = data.encode('utf-8') if input_file.created: if data is None: raise ValueError( '{input} placeholder given, but no data passed') with open(input_file.filename, 'wb') as f: f.write(data) # No longer pass to stdin data = None try: proc = subprocess.Popen( argv, # we cannot use the in/out streams directly, as they might be # StringIO objects (which are not supported by subprocess) stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, shell=os.name == 'nt') except OSError: raise FilterError('Program file not found: %s.' % argv[0]) stdout, stderr = proc.communicate(data) if proc.returncode: raise FilterError( '%s: subprocess returned a non-success result code: ' '%s, stdout=%s, stderr=%s' % ( cls.name or cls.__name__, proc.returncode, stdout, stderr)) else: if output_file.created: with open(output_file.filename, 'rb') as f: out.write(f.read().decode('utf-8')) else: out.write(stdout.decode('utf-8')) finally: if output_file.created: os.unlink(output_file.filename) if input_file.created: os.unlink(input_file.filename) class JavaTool(ExternalTool): """Helper class for filters which are implemented as Java ARchives (JARs). The subclass is expected to define a ``jar`` attribute in :meth:`setup`. If the ``argv`` definition is used, it is expected to contain only the arguments to be passed to the Java tool. The path to the java binary and the jar file are added by the base class. """ method = None def setup(self): super(JavaTool, self).setup() # We can reasonably expect that java is just on the path, so # don't require it, but hope for the best. path = self.get_config(env='JAVA_HOME', require=False) if path is not None: self.java_bin = os.path.join(path, 'bin/java') else: self.java_bin = 'java' def subprocess(self, args, out, data=None): ExternalTool.subprocess( [self.java_bin, '-jar', self.jar] + args, out, data) _FILTERS = {} def register_filter(f): """Add the given filter to the list of know filters. """ if not issubclass(f, Filter): raise ValueError("Must be a subclass of 'Filter'") if not f.name: raise ValueError('Must have a name') _FILTERS[f.name] = f def get_filter(f, *args, **kwargs): """Resolves ``f`` to a filter instance. Different ways of specifying a filter are supported, for example by giving the class, or a filter name. *args and **kwargs are passed along to the filter when it's instantiated. """ if isinstance(f, Filter): # Don't need to do anything. assert not args and not kwargs return f elif isinstance(f, six.string_types): if f in _FILTERS: klass = _FILTERS[f] else: raise ValueError('No filter \'%s\'' % f) elif inspect.isclass(f) and issubclass(f, Filter): klass = f elif callable(f): assert not args and not kwargs return CallableFilter(f) else: raise ValueError('Unable to resolve to a filter: %s' % f) return klass(*args, **kwargs) CODE_FILES = ['.py', '.pyc', '.so'] def is_module(name): """Is this a recognized module type? Does this name end in one of the recognized CODE_FILES extensions? The file is assumed to exist, as unique_modules has found it using an os.listdir() call. returns the name with the extension stripped (the module name) or None if the name does not appear to be a module """ for ext in CODE_FILES: if name.endswith(ext): return name[:-len(ext)] def is_package(directory): """Is the (fully qualified) directory a python package? """ for ext in ['.py', '.pyc']: if os.path.exists(os.path.join(directory, '__init__'+ext)): return True def unique_modules(directory): """Find all unique module names within a directory For each entry in the directory, check if it is a source code file-type (using is_code(entry)), or a directory with a source-code file-type at entry/__init__.py[c]? Filter the results to only produce a single entry for each module name. Filter the results to not include '_' prefixed names. yields each entry as it is encountered """ found = {} for entry in sorted(os.listdir(directory)): if entry.startswith('_'): continue module = is_module(entry) if module: if module not in found: found[module] = entry yield module elif is_package(os.path.join(directory, entry)): if entry not in found: found[entry] = entry yield entry def load_builtin_filters(): from os import path import warnings current_dir = path.dirname(__file__) for name in unique_modules(current_dir): module_name = 'weppy_assets.webassets.filter.%s' % name try: module = import_module(module_name) except Exception as e: warnings.warn('Error while loading builtin filter ' 'module \'%s\': %s' % (module_name, e)) else: for attr_name in dir(module): attr = getattr(module, attr_name) if inspect.isclass(attr) and issubclass(attr, Filter): if not attr.name: # Skip if filter has no name; those are # considered abstract base classes. continue register_filter(attr) load_builtin_filters()
{ "repo_name": "gi0baro/weppy-assets", "path": "weppy_assets/webassets/filter/__init__.py", "copies": "1", "size": "24918", "license": "bsd-3-clause", "hash": 248853785013087680, "line_mean": 35.3766423358, "line_max": 81, "alpha_frac": 0.5971586805, "autogenerated": false, "ratio": 4.5578928114139385, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0031069830556850067, "num_lines": 685 }
"""Assets can be filtered through one or multiple filters, modifying their contents (think minification, compression). """ from __future__ import with_statement import os, subprocess import inspect import shlex import tempfile try: frozenset except NameError: from sets import ImmutableSet as frozenset from webassets.exceptions import FilterError from webassets.importlib import import_module __all__ = ('Filter', 'CallableFilter', 'get_filter', 'register_filter', 'ExternalTool', 'JavaTool') def freezedicts(obj): """Recursively iterate over ``obj``, supporting dicts, tuples and lists, and freeze ``dicts`` such that ``obj`` can be used with hash(). """ if isinstance(obj, (list, tuple)): return type(obj)([freezedicts(sub) for sub in obj]) if isinstance(obj, dict): return frozenset(obj.iteritems()) return obj def smartsplit(string, sep): """Split while allowing escaping. So far, this seems to do what I expect - split at the separator, allow escaping via \, and allow the backslash itself to be escaped. One problem is that it can raise a ValueError when given a backslash without a character to escape. I'd really like a smart splitter without manually scan the string. But maybe that is exactly what should be done. """ assert string is not None # or shlex will read from stdin # shlex fails miserably with unicode input is_unicode = isinstance(sep, unicode) if is_unicode: string = string.encode('utf8') l = shlex.shlex(string, posix=True) l.whitespace += ',' l.whitespace_split = True l.quotes = '' if is_unicode: return map(lambda s: s.decode('utf8'), list(l)) else: return list(l) class option(tuple): """Micro option system. I want this to remain small and simple, which is why this class is lower-case. See ``parse_options()`` and ``Filter.options``. """ def __new__(cls, initarg, configvar=None, type=None): if configvar is None: # If only one argument given, it is the configvar configvar = initarg initarg = None return tuple.__new__(cls, (initarg, configvar, type)) def parse_options(options): """Parses the filter ``options`` dict attribute. The result is a dict of ``option`` tuples. """ # Normalize different ways to specify the dict items: # attribute: option() # attribute: ('__init__ arg', 'config variable') # attribute: ('config variable,') # attribute: 'config variable' result = {} for internal, external in options.items(): if not isinstance(external, option): if not isinstance(external, (list, tuple)): external = (external,) external = option(*external) result[internal] = external return result class Filter(object): """Base class for a filter. Subclasses should allow the creation of an instance without any arguments, i.e. no required arguments for __init__(), so that the filter can be specified by name only. In fact, the taking of arguments will normally be the exception. """ # Name by which this filter can be referred to. name = None # Options the filter supports. The base class will ensure that # these are both accepted by __init__ as kwargs, and may also be # defined in the environment config, or the OS environment (i.e. # a setup() implementation will be generated which uses # get_config() calls). # # Can look like this: # options = { # 'binary': 'COMPASS_BINARY', # 'plugins': option('COMPASS_PLUGINS', type=list), # } options = {} # The maximum debug level under which this filter should run. # Most filters only run in production mode (debug=False), so this is the # default value. However, a filter like ``cssrewrite`` needs to run in # ``merge`` mode. Further, compiler-type filters (like less/sass) would # say ``None``, indicating that they have to run **always**. # There is an interesting and convenient twist here: If you use such a # filter, the bundle will automatically be merged, even in debug mode. # It couldn't work any other way of course, the output needs to be written # somewhere. If you have other files that do not need compiling, and you # don't want them pulled into the merge, you can use a nested bundle with # it's own output target just for those files that need the compilation. max_debug_level = False def __init__(self, **kwargs): self.env = None self._options = parse_options(self.__class__.options) # Resolve options given directly to the filter. This # allows creating filter instances with options that # deviate from the global default. # TODO: can the metaclass generate a init signature? for attribute, (initarg, _, _) in self._options.items(): arg = initarg if initarg is not None else attribute if arg in kwargs: setattr(self, attribute, kwargs.pop(arg)) else: setattr(self, attribute, None) if kwargs: raise TypeError('got an unexpected keyword argument: %s' % kwargs.keys()[0]) def __hash__(self): return self.id() def __cmp__(self, other): if isinstance(other, Filter): return cmp(self.id(), other.id()) return NotImplemented def set_environment(self, env): """This is called before the filter is used.""" self.env = env def get_config(self, setting=False, env=None, require=True, what='dependency', type=None): """Helper function that subclasses can use if they have dependencies which they cannot automatically resolve, like an external binary. Using this function will give the user the ability to resolve these dependencies in a common way through either a Django setting, or an environment variable. You may specify different names for ``setting`` and ``env``. If only the former is given, the latter is considered to use the same name. If either argument is ``False``, the respective source is not used. By default, if the value is not found, an error is raised. If ``required`` is ``False``, then ``None`` is returned instead. ``what`` is a string that is used in the exception message; you can use it to give the user an idea what he is lacking, i.e. 'xyz filter binary'. Specifying values via the OS environment is obviously limited. If you are expecting a special type, you may set the ``type`` argument and a value from the OS environment will be parsed into that type. Currently only ``list`` is supported. """ assert type in (None, list), "%s not supported for type" % type if env is None: env = setting assert setting or env value = None if not setting is False: value = self.env.config.get(setting, None) if value is None and not env is False: value = os.environ.get(env) if value and type == list: value = smartsplit(value, ',') if value is None and require: err_msg = '%s was not found. Define a ' % what options = [] if setting: options.append('%s setting' % setting) if env: options.append('%s environment variable' % env) err_msg += ' or '.join(options) raise EnvironmentError(err_msg) return value def unique(self): """This function is used to determine if two filter instances represent the same filter and can be merged. Only one of the filters will be applied. If your filter takes options, you might want to override this and return a hashable object containing all the data unique to your current instance. This will allow your filter to be applied multiple times with differing values for those options. """ return False def id(self): """Unique identifier for the filter instance. Among other things, this is used as part of the caching key. It should therefore not depend on instance data, but yield the same result across multiple python invocations. """ # freezedicts() allows filters to return dict objects as part # of unique(), which are not per-se supported by hash(). return hash((self.name, freezedicts(self.unique()),)) def setup(self): """Overwrite this to have the filter do initial setup work, like determining whether required modules are available etc. Since this will only be called when the user actually attempts to use the filter, you can raise an error here if dependencies are not matched. Note: In most cases, it should be enough to simply define the ``options`` attribute. If you override this method and want to use options as well, don't forget to call super(). Note: This may be called multiple times if one filter instance is used with different asset environment instances. """ for attribute, (_, configvar, type) in self._options.items(): if not configvar: continue if getattr(self, attribute) is None: # No value specified for this filter instance , # specifically attempt to load it from the environment. setattr(self, attribute, self.get_config(setting=configvar, require=False, type=type)) def input(self, _in, out, **kw): """Implement your actual filter here. This will be called for every source file. """ def output(self, _in, out, **kw): """Implement your actual filter here. This will be called for every output file. """ def open(self, out, source_path, **kw): """Implement your actual filter here. This is like input(), but only one filter may provide this. Use this if your filter needs to read from the source file directly, and would ignore any processing by earlier filters. """ def concat(self, out, hunks, **kw): """Implement your actual filter here. Will be called once between the input() and output() steps, and should concat all the source files (given as hunks) together, and return a string. Only one such filter is allowed. """ # We just declared those for demonstration purposes del input del output del open del concat class CallableFilter(Filter): """Helper class that create a simple filter wrapping around callable. """ def __init__(self, callable): super(CallableFilter, self).__init__() self.callable = callable def unique(self): # XXX This means the cache will never work for those filters. # This is actually a deeper problem: Originally unique() was # used to remove duplicate filters. Now it is also for the cache # key. The latter would benefit from ALL the filter's options being # included. Possibly this might just be what we should do, at the # expense of the "remove duplicates" functionality (because it # is never really needed anyway). It's also illdefined when a filter # should be a removable duplicate - most options probably SHOULD make # a filter no longer being considered duplicate. return self.callable def output(self, _in, out, **kw): return self.callable(_in, out) class ExternalTool(Filter): """Subclass that helps creating filters that need to run an external program. You are encouraged to use this when possible, as it helps consistency. In the simplest possible case, subclasses only have to define one or more of the following attributes, without needing to write any code: ``argv`` The command line that will be passed to subprocess.Popen. New-style format strings can be used to access all kinds of data: The arguments to the filter method, as well as the filter instance via ``self``: argv = ['{self.binary}', '--input', '{source_path}', '--cwd', '{self.env.directory}'] ``method`` The filter method to implement. One of ``input``, ``output`` or ``open``. """ argv = [] method = None class __metaclass__(type): def __new__(cls, name, bases, attrs): # First, determine the method defined for this very class. We # need to pop the ``method`` attribute from ``attrs``, so that we # create the class without the argument; allowing us then to look # at a ``method`` attribute that parents may have defined. # # method defaults to 'output' if argv is set, to "implement # no default method" without an argv. if not 'method' in attrs and 'argv' in attrs: chosen = 'output' else: chosen = attrs.pop('method', False) # Create the class first, since this helps us look at any # method attributes defined in the parent hierarchy. klass = type.__new__(cls, name, bases, attrs) parent_method = getattr(klass, 'method', None) # Assign the method argument that we initially popped again. klass.method = chosen try: # Don't do anything for this class itself ExternalTool except NameError: return klass # If the class already has a method attribute, this indicates # that a parent class already dealt with it and enabled/disabled # the methods, and we won't again. if parent_method is not None: return klass methods = ('output', 'input', 'open') if chosen is not None: assert not chosen or chosen in methods, \ '%s not a supported filter method' % chosen # Disable those methods not chosen. for m in methods: if m != chosen: # setdefault = Don't override actual methods the # class has in fact provided itself. if not m in klass.__dict__: setattr(klass, m, None) return klass def open(self, out, source_path, **kw): self._evaluate([out, source_path], kw, out) def input(self, _in, out, **kw): self._evaluate([_in, out], kw, out, _in) def output(self, _in, out, **kw): self._evaluate([_in, out], kw, out, _in) def _evaluate(self, args, kwargs, out, data=None): # For now, still support Python 2.5, but the format strings in argv # are not supported (making the feature mostly useless). For this # reason none of the builtin filters is using argv currently. if hasattr(str, 'format'): # Add 'self' to the keywords available in format strings kwargs = kwargs.copy() kwargs.update({'self': self}) # Resolve all the format strings in argv def replace(arg): try: return arg.format(*args, **kwargs) except KeyError, e: # Treat "output" and "input" variables special, they # are dealt with in :meth:`subprocess` instead. if e.args[0] not in ('input', 'output'): raise return arg argv = map(replace, self.argv) else: argv = self.argv self.subprocess(argv, out, data=data) @classmethod def subprocess(cls, argv, out, data=None): """Execute the commandline given by the list in ``argv``. If a byestring is given via ``data``, it is piped into data. ``argv`` may contain two placeholders: ``{input}`` If given, ``data`` will be written to a temporary file instead of data. The placeholder is then replaced with that file. ``{output}`` Will be replaced by a temporary filename. The return value then will be the content of this file, rather than stdout. """ class tempfile_on_demand(object): def __repr__(self): if not hasattr(self, 'filename'): self.fd, self.filename = tempfile.mkstemp() return self.filename @property def created(self): return hasattr(self, 'filename') # Replace input and output placeholders input_file = tempfile_on_demand() output_file = tempfile_on_demand() if hasattr(str, 'format'): # Support Python 2.5 without the feature argv = map(lambda item: item.format(input=input_file, output=output_file), argv) try: if input_file.created: if not data: raise ValueError( '{input} placeholder given, but no data passed') with os.fdopen(input_file.fd, 'wb') as f: f.write(data.read() if hasattr(data, 'read') else data) # No longer pass to stdin data = None proc = subprocess.Popen( argv, # we cannot use the in/out streams directly, as they might be # StringIO objects (which are not supported by subprocess) stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = proc.communicate( data.read() if hasattr(data, 'read') else data) if proc.returncode: raise FilterError( '%s: subprocess returned a non-success result code: ' '%s, stdout=%s, stderr=%s' % ( cls.name or cls.__name__, proc.returncode, stdout, stderr)) else: if output_file.created: with os.fdopen(output_file.fd, 'rb') as f: out.write(f.read()) else: out.write(stdout) finally: if output_file.created: os.unlink(output_file.filename) if input_file.created: os.unlink(input_file.filename) class JavaTool(ExternalTool): """Helper class for filters which are implemented as Java ARchives (JARs). The subclass is expected to define a ``jar`` attribute in :meth:`setup`. If the ``argv`` definition is used, it is expected to contain only the arguments to be passed to the Java tool. The path to the java binary and the jar file are added by the base class. """ method = None def setup(self): super(JavaTool, self).setup() # We can reasonably expect that java is just on the path, so # don't require it, but hope for the best. path = self.get_config(env='JAVA_HOME', require=False) if path is not None: self.java_bin = os.path.join(path, 'bin/java') else: self.java_bin = 'java' def subprocess(self, args, out, data=None): ExternalTool.subprocess( [self.java_bin, '-jar', self.jar] + args, out, data) _FILTERS = {} def register_filter(f): """Add the given filter to the list of know filters. """ if not issubclass(f, Filter): raise ValueError("Must be a subclass of 'Filter'") if not f.name: raise ValueError('Must have a name') _FILTERS[f.name] = f def get_filter(f, *args, **kwargs): """Resolves ``f`` to a filter instance. Different ways of specifying a filter are supported, for example by giving the class, or a filter name. *args and **kwargs are passed along to the filter when it's instantiated. """ if isinstance(f, Filter): # Don't need to do anything. assert not args and not kwargs return f elif isinstance(f, basestring): if f in _FILTERS: klass = _FILTERS[f] else: raise ValueError('No filter \'%s\'' % f) elif inspect.isclass(f) and issubclass(f, Filter): klass = f elif callable(f): assert not args and not kwargs return CallableFilter(f) else: raise ValueError('Unable to resolve to a filter: %s' % f) return klass(*args, **kwargs) def load_builtin_filters(): from os import path import warnings current_dir = path.dirname(__file__) for entry in os.listdir(current_dir): if entry.endswith('.py'): name = path.splitext(entry)[0] elif path.exists(path.join(current_dir, entry, '__init__.py')): name = entry else: continue module_name = 'webassets.filter.%s' % name try: module = import_module(module_name) except Exception, e: warnings.warn('Error while loading builtin filter ' 'module \'%s\': %s' % (module_name, e)) else: for attr_name in dir(module): attr = getattr(module, attr_name) if inspect.isclass(attr) and issubclass(attr, Filter): if not attr.name: # Skip if filter has no name; those are # considered abstract base classes. continue register_filter(attr) load_builtin_filters()
{ "repo_name": "torchbox/webassets", "path": "src/webassets/filter/__init__.py", "copies": "1", "size": "22108", "license": "bsd-2-clause", "hash": -6268759008490456000, "line_mean": 35.8466666667, "line_max": 83, "alpha_frac": 0.5933598697, "autogenerated": false, "ratio": 4.585770587015142, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0022587157787714086, "num_lines": 600 }
"""Assets can be filtered through one or multiple filters, modifying their contents (think minification, compression). """ import os, re import inspect __all__ = ('Filter', 'CallableFilter', 'get_filter', 'register_filter',) class NameGeneratingMeta(type): """Metaclass that will generate a "name" attribute based on the class name if none is given. """ def __new__(cls, name, bases, attrs): try: Filter except NameError: # Don't generate a name for the baseclass itself. pass else: if not 'name' in attrs: filter_name = name if name.endswith('Filter'): filter_name = filter_name[:-6] filter_name = filter_name.lower() attrs['name'] = filter_name return type.__new__(cls, name, bases, attrs) class Filter(object): """Base class for a filter. Subclasses should allow the creation of an instance without any arguments, i.e. no required arguments for __init__(), so that the filter can be specified by name only. In fact, the taking of arguments will normally be the exception. """ __metaclass__ = NameGeneratingMeta # Name by which this filter can be referred to. Will be generated # automatically for subclasses if not explicitly given. name = None def __init__(self): self.env = None def __hash__(self): return self.id() def __cmp__(self, other): if isinstance(other, Filter): return cmp(self.id(), other.id()) return NotImplemented def set_environment(self, env): """This is called just before the filter is used. """ if not self.env or self.env != env: self.env = env self.setup() def get_config(self, setting=False, env=None, require=True, what='dependency'): """Helper function that subclasses can use if they have dependencies which they cannot automatically resolve, like an external binary. Using this function will give the user the ability to resolve these dependencies in a common way through either a Django setting, or an environment variable. You may specify different names for ``setting`` and ``env``. If only the former is given, the latter is considered to use the same name. If either argument is ``False``, the respective source is not used. By default, if the value is not found, an error is raised. If ``required`` is ``False``, then ``None`` is returned instead. ``what`` is a string that is used in the exception message; you can use it to give the user an idea what he is lacking, i.e. 'xyz filter binary' """ if env is None: env = setting assert setting or env value = None if not setting is False: value = self.env.config.get(setting, None) if value is None and not env is False: value = os.environ.get(env) if value is None and require: err_msg = '%s was not found. Define a ' % what options = [] if setting: options.append('%s setting' % setting) if env: options.append('%s environment variable' % env) err_msg += ' or '.join(options) raise EnvironmentError(err_msg) return value def unique(self): """This function is used to determine if two filter instances represent the same filter and can be merged. Only one of the filters will be applied. If your filter takes options, you might want to override this and return a hashable object containing all the data unique to your current instance. This will allow your filter to be applied multiple times with differing values for those options. """ return False def id(self): """Unique identifer for the filter instance. Among other things, this is used as part of the caching key. It should therefore not depend on instance data, but yield the same result across multiple python invocations. """ return hash((self.name, self.unique(),)) def setup(self): """Overwrite this to have the filter to initial setup work, like determining whether required modules are available etc. Since this will only be called when the user actually attempts to use the filter, you can raise an error here if dependencies are not matched. Note: This may be called multiple times if one filter instance is used with different asset environment instances. """ def input(self, _in, out): """Implement your actual filter here. This will be called for every source file. """ def output(self, _in, out): """Implement your actual filter here. This will be called for every output file. """ # We just declared those for demonstration purposes del input del output class CallableFilter(Filter): """Helper class that create a simple filter wrapping around callable. """ def __init__(self, callable): self.callable = callable def unique(self): return self.callable def output(self, _in, out): return self.callable(_in, out) _FILTERS = {} def register_filter(f): """Add the given filter to the list of know filters. """ if not issubclass(f, Filter): raise ValueError("Must be a subclass of 'Filter'") if not f.name: raise ValueError('Must have a name') if f.name in _FILTERS: raise KeyError('Filter with name %s already registered' % f.name) if not hasattr(f, 'input') and not hasattr(f, 'output'): raise TypeError('Filter lacks both an input() and output() method: %s' % f) _FILTERS[f.name] = f def get_filter(f, *args, **kwargs): """Resolves ``f`` to a filter instance. Different ways of specifying a filter are supported, for example by giving the class, or a filter name. *args and **kwargs are passed along to the filter when it's instantiated. """ if isinstance(f, Filter): # Don't need to do anything. assert not args and not kwargs return f elif isinstance(f, basestring): if f in _FILTERS: klass = _FILTERS[f] else: raise ValueError('No filter \'%s\'' % f) elif inspect.isclass(f) and issubclass(f, Filter): klass = f elif callable(f): assert not args and not kwargs return CallableFilter(f) else: raise ValueError('Unable to resolve to a filter: %s' % f) return klass(*args, **kwargs) def load_builtin_filters(): from os import path import warnings current_dir = path.dirname(__file__) for entry in os.listdir(current_dir): if entry.endswith('.py'): name = path.splitext(entry)[0] elif path.exists(path.join(current_dir, entry, '__init__.py')): name = entry else: continue module_name = 'webassets.filter.%s' % name try: module = __import__(module_name, {}, {}, ['']) except Exception, e: warnings.warn('Error while loading builtin filter ' 'module \'%s\': %s' % (module_name, e)) else: for attr_name in dir(module): attr = getattr(module, attr_name) if inspect.isclass(attr) and issubclass(attr, Filter): if not attr.name: # Skip if filter has no name; those are # considered abstract base classes. continue register_filter(attr) load_builtin_filters()
{ "repo_name": "rs/webassets", "path": "src/webassets/filter/__init__.py", "copies": "1", "size": "8215", "license": "bsd-2-clause", "hash": -2822253581852480000, "line_mean": 30.9919678715, "line_max": 83, "alpha_frac": 0.5776019477, "autogenerated": false, "ratio": 4.683580387685291, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5761182335385291, "avg_score": null, "num_lines": null }
# Asset selection window from gi.repository import Gtk, Gdk class AssetSelectionWindow(Gtk.Window): def __init__(self, parent): Gtk.Window.__init__(self, title="Select Asset") self.parent = parent self.set_keep_above(True) self.set_border_width(5) self.set_position(Gtk.WindowPosition.MOUSE) self.connect('key-release-event', self._on_key_release) # self.set_modal(True) grid = Gtk.Grid() grid.set_column_homogeneous(True) grid.set_row_homogeneous(True) self.add(grid) self.base_store = Gtk.ListStore(str) for item in self.parent.coin.bases: self.base_store.append([item]) self.base_store.set_sort_column_id(0, Gtk.SortType.ASCENDING) self.quote_store = Gtk.ListStore(str) self.ex_store = Gtk.ListStore(str, str) self.view_bases = Gtk.TreeView(self.base_store) self.view_quotes = Gtk.TreeView(self.quote_store) self.view_exchanges = Gtk.TreeView(self.ex_store) self.view_bases.get_selection().connect("changed", self._base_changed) self.view_quotes.get_selection().connect("changed", self._quote_changed) self.view_exchanges.get_selection().connect("changed", self._exchange_changed) rend_base = Gtk.CellRendererText() rend_quote = Gtk.CellRendererText() rend_exchange = Gtk.CellRendererText() col_base = Gtk.TreeViewColumn("Base", rend_base, text=0) col_base.set_sort_column_id(0) col_quote = Gtk.TreeViewColumn("Quote", rend_quote, text=0) col_quote.set_sort_column_id(0) col_exchange = Gtk.TreeViewColumn("Exchange", rend_exchange, text=0) col_exchange.set_sort_column_id(0) self.view_bases.append_column(col_base) self.view_quotes.append_column(col_quote) self.view_exchanges.append_column(col_exchange) self.view_exchanges.connect("row-activated", self._update_indicator) self.set_focus_child(self.view_bases) sw = Gtk.ScrolledWindow() sw.set_vexpand(True) sw.add(self.view_bases) grid.attach(sw, 0, 0, 200, 400) sw2 = Gtk.ScrolledWindow() sw2.set_vexpand(True) sw2.add(self.view_quotes) grid.attach(sw2, 200, 0, 200, 400) sw3 = Gtk.ScrolledWindow() sw3.set_vexpand(True) sw3.add(self.view_exchanges) grid.attach(sw3, 400, 0, 200, 400) lbl_hint = Gtk.Label("Hint: Start typing in a list to search.") grid.attach(lbl_hint, 100, 400, 400, 25) buttonbox = Gtk.Box(spacing=2) button_set_close = Gtk.Button('Set and Close') button_set_close.connect("clicked", self._update_indicator_close) button_set = Gtk.Button('Set') button_set.connect("clicked", self._update_indicator) button_set.set_can_default(True) button_set.get_style_context().add_class(Gtk.STYLE_CLASS_SUGGESTED_ACTION) button_cancel = Gtk.Button('Close') button_cancel.connect("clicked", self._close) buttonbox.pack_start(button_set_close, True, True, 0) buttonbox.pack_start(button_set, True, True, 0) buttonbox.pack_start(button_cancel, True, True, 0) grid.attach(buttonbox, 0, 425, 600, 50) self._select_currents() self.show_all() self.present() def _base_changed(self, selection): (model, iter) = selection.get_selected() if iter is None: return self.quote_store.clear() self.current_base = model[iter][0] for quote in self.parent.coin.bases[self.current_base]: self.quote_store.append([quote]) self.quote_store.set_sort_column_id(0, Gtk.SortType.ASCENDING) self.view_quotes.set_cursor(0) def _quote_changed(self, selection): (model, iter) = selection.get_selected() if iter is None: return self.ex_store.clear() self.current_quote = model[iter][0] for exchange in self.parent.coin.bases[self.current_base][self.current_quote]: self.ex_store.append([exchange.get_name(), exchange.get_code()]) self.ex_store.set_sort_column_id(0, Gtk.SortType.ASCENDING) self.view_exchanges.set_cursor(0) def _exchange_changed(self, selection): (model, iter) = selection.get_selected() if iter is None: return self.current_exchange = model[iter][1] ## # Select the currently active values and scroll them into view # def _select_currents(self): def _select_and_scroll(store, view, current_value): for row in store: if row[0] == current_value: view.set_cursor(row.path) view.scroll_to_cell(row.path) break _select_and_scroll(self.base_store, self.view_bases, self.parent.exchange.asset_pair.get('base')) _select_and_scroll(self.quote_store, self.view_quotes, self.parent.exchange.asset_pair.get('quote')) _select_and_scroll(self.ex_store, self.view_exchanges, self.parent.exchange.get_name()) def _update_indicator_close(self, widget): self._update_indicator(widget) self._close(widget) def _update_indicator(self, widget, *args): exchange = self.parent.coin.find_exchange_by_code(self.current_exchange) self.parent.change_assets(self.current_base, self.current_quote, exchange) def _on_key_release(self, widget, ev, data=None): if ev.keyval == Gdk.KEY_Escape: self._close() def _close(self, widget=None): self.destroy()
{ "repo_name": "nilgradisnik/coinprice-indicator", "path": "coin/asset_selection.py", "copies": "1", "size": "5672", "license": "mit", "hash": -7949906752368417000, "line_mean": 34.8987341772, "line_max": 108, "alpha_frac": 0.6232369535, "autogenerated": false, "ratio": 3.4797546012269938, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.46029915547269934, "avg_score": null, "num_lines": null }
"""Assets module.""" import os import logging import pygame import pyscroll import pytmx from .locals import * # noqa from .utils import ContainerAware logger = logging.getLogger(__name__) class AssetManager(ContainerAware): """Asset manager.""" def __init__(self): """Constructor.""" logger.debug('Initializing AssetManager') self.configuration = self.container.get(Configuration) self.base_path = self.configuration.get('akurra.assets.base_path', 'assets') def get_path(self, asset_path): """ Return a path to an asset while taking distributions and base paths into account. :param asset_path: Relative path of asset to process. """ return os.path.join(self.base_path, asset_path) def get_sound(self, asset_path): """ Return an sfx object (OGG only for now). :param asset_path: Relative path of asset to process. """ path = self.get_path(asset_path) sound = pygame.mixer.Sound(path) return sound def get_image(self, asset_path, colorkey=None, alpha=False): """ Return an image by processing an asset. :param asset_path: Relative path of asset to process. """ path = self.get_path(asset_path) image = pygame.image.load(path) image = image.convert_alpha() if alpha else image.convert() if colorkey: image.set_colorkey(colorkey) return image def get_tmx_data(self, asset_path): """ Return TMX data by processing an asset. :param asset_path: Relative path of asset to process. """ path = self.get_path(asset_path) tmx_data = pytmx.load_pygame(path) return tmx_data def get_map_data(self, asset_path): """ Return map data by processing an asset. :param asset_path: Relative path of asset to process. """ tmx_data = self.get_tmx_data(asset_path) map_data = pyscroll.data.TiledMapData(tmx_data) return map_data
{ "repo_name": "multatronic/akurra", "path": "akurra/assets.py", "copies": "1", "size": "2087", "license": "mit", "hash": 8102100851264531000, "line_mean": 23.5529411765, "line_max": 89, "alpha_frac": 0.6114039291, "autogenerated": false, "ratio": 3.9377358490566037, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5049139778156604, "avg_score": null, "num_lines": null }
# Assets from flask.ext.assets import (Environment, Bundle) from app import app assets = Environment() assets.init_app(app) # Assets js_base = Bundle( 'bower_components/jquery/jquery.js', 'bower_components/lodash/dist/lodash.js', 'bower_components/bootstrap-sass/js/bootstrap-affix.js', 'bower_components/bootstrap-sass/js/bootstrap-alert.js', 'bower_components/bootstrap-sass/js/bootstrap-dropdown.js', 'bower_components/bootstrap-sass/js/bootstrap-tooltip.js', 'bower_components/bootstrap-sass/js/bootstrap-modal.js', 'bower_components/bootstrap-sass/js/bootstrap-transition.js', 'bower_components/bootstrap-sass/js/bootstrap-button.js', 'bower_components/bootstrap-sass/js/bootstrap-popover.js', 'bower_components/bootstrap-sass/js/bootstrap-typeahead.js', 'bower_components/bootstrap-sass/js/bootstrap-carousel.js', 'bower_components/bootstrap-sass/js/bootstrap-scrollspy.js', 'bower_components/bootstrap-sass/js/bootstrap-collapse.js', 'bower_components/bootstrap-sass/js/bootstrap-tab.js', 'bower_components/bootstrap-select/js/bootstrap-select.js', #filters='jsmin', output='gen/packed_base.js') assets.register('js_base', js_base) js_ie9 = Bundle( 'bower_components/es5-shim/es5-shim.js', 'bower_components/json3/lib/json3.min.js', filters='jsmin', output='gen/packed_ie9.js') assets.register('js_ie9', js_ie9) js_angular = Bundle( 'bower_components/angular/angular.js', 'bower_components/angular-resource/angular-resource.js', 'bower_components/ng-grid/ng-grid-2.0.13.debug.js', 'bower_components/ng-grid/plugins/ng-grid-flexible-height.js', 'bower_components/CodeMirror/lib/codemirror.js', 'bower_components/d3/d3.js', 'bower_components/angular-strap/dist/angular-strap.js', 'vendor/angular-d3/angular-d3.js', 'vendor/redcode_cm.js', 'vendor/mars.js', 'scripts/controllers/navigation.js', 'scripts/directives/navigation.js', 'scripts/app.js', 'scripts/directives/codemirror.js', 'scripts/services/accounts.js', 'scripts/services/warriors.js', 'scripts/services/machines.js', 'scripts/services/queues.js', 'scripts/services/matches.js', 'scripts/services/submissions.js', 'scripts/controllers/main.js', 'scripts/controllers/developList.js', 'scripts/controllers/developEdit.js', 'scripts/controllers/matchesList.js', 'scripts/controllers/matchView.js', 'scripts/controllers/adminWarriors.js', 'scripts/controllers/accounts.js', 'scripts/controllers/machines.js', 'scripts/controllers/queues.js', 'scripts/controllers/accountEdit.js', #filters='jsmin', output='gen/packed_angular.js') assets.register('js_angular', js_angular) css_base = Bundle( 'bower_components/bootstrap-sass/bootstrap-2.3.2.css', 'bower_components/bootstrap-select/dist/css/bootstrap-select.css', #'bower_components/flatui/css/flat-ui.css', 'bower_components/ng-grid/ng-grid.min.css', 'bower_components/CodeMirror/lib/codemirror.css', 'bower_components/CodeMirror/theme/eclipse.css', filters='cssmin', output='gen/packed_base.css') assets.register('css_base', css_base) css_main = Bundle( 'styles/main.css', output='gen/packed_main.css') assets.register('css_main', css_main)
{ "repo_name": "mahrz/kernkrieg", "path": "assets.py", "copies": "1", "size": "3316", "license": "mit", "hash": 4167858654769972000, "line_mean": 37.5697674419, "line_max": 70, "alpha_frac": 0.7198431846, "autogenerated": false, "ratio": 3.059040590405904, "config_test": false, "has_no_keywords": true, "few_assignments": false, "quality_score": 0.4278883775005904, "avg_score": null, "num_lines": null }
# assets # to import: from config.assets import patterns from config.assets import default, production, test, development def patterns(version): if version == 'production': return merge(default.exports, production.exports) elif version == 'test': return merge(default.exports, test.exports) else: # default to development return merge(default.exports, development.exports) def merge(dict1, dict2): m = dict(**dict1) for k, v in dict2.items(): if k in m: if isinstance(v, dict): # print 'merge dicts from common key '+str(k) m_new = merge(m[k], v) # print 'm_new: '+str(m_new) m[k] = m_new # print 'k: '+str(k)+' m[k]: '+str(m[k]) elif isinstance(v, list): print 'merge lists of common key: '+str(k)+' l1: '+str(m[k])+' l2: '+str(v) m[k].extend(v) #if isinstance(v, basestring): else: m[k] = v else: if isinstance(v, dict): m[k] = merge(dict(), v) else: # basic element, no need to recursively add dict m[k] = v # print 'merge dicts => '+str(m) return m
{ "repo_name": "buckbaskin/flask-vertical", "path": "config/assets/__init__.py", "copies": "1", "size": "1031", "license": "mit", "hash": 2465864327749391000, "line_mean": 27.6388888889, "line_max": 79, "alpha_frac": 0.6314258002, "autogenerated": false, "ratio": 2.8324175824175826, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.39638433826175823, "avg_score": null, "num_lines": null }
"""Assets represent Financial Assets, these are claims based on a contract. These are often referred to as Securities or Products in other libraries. """ from __future__ import absolute_import, division, print_function import pandas as pd from pandas import DataFrame from numbers import Number from pandas.tseries.offsets import DateOffset, CustomBusinessDay from pennies.time import daycounter RATETYPES = ['FIXED', 'IBOR'] class Asset(object): """Base class of all Financial Assets""" def __init__(self): self.frame = pd.DataFrame() def accept(self, visitor, *args, **kwargs): """Accepts visitors that calculate various measures on the Asset. Stub if we wish to use visitor pattern. Currently using multipledispatch """ return visitor.visit(Asset, *args, **kwargs) def __eq__(self, other): return self.frame.equals(other.frame) def all_assets(): """Provides a list of all available Assets""" return Asset.__subclasses__() class ZeroCouponBond(Asset): """A single payment of an amount of currency on a given date. This has a number of aliases: ZCB, Zero, DiscountBond, Bullet By default, the amount is $1 received. Attributes ---------- dt_payment: datetime Date (and time) on which amount is received currency : str Currency code of amount received notional: float Notional in given currency. Received if positive, else paid. notional: float Notional in given currency. Received if positive, else paid. """ def __init__(self, dt_payment, currency='USD', notional=1.0, bday=None): """ Parameters ---------- dt_payment: datetime Date (and time) on which notional is received currency : str, optional Currency code of notional received notional: float, optional Currency Amount. Received if positive, else paid. bday: str, optional Rule to adjust dates that fall on weekends and holidays. """ super(ZeroCouponBond, self).__init__() self.dt_payment = dt_payment self.currency = currency self.notional = notional self.frame = pd.DataFrame({ 'pay': dt_payment, 'notional': notional, 'currency': currency}, index=[0]) class Annuity(Asset): """Fixed Rate Annuity. This is used as the fixed leg of a Swap, as the core of fixed rate Bonds, and the natural Numeraire when pricing Swaptions. The primary representation of the asset is a dataframe where each row is a single cashflow. """ # TODO Capture additional cases outlined below # TODO Stubs: Short and Long, Front and Back # TODO Daycount conventions: add more # TODO Business day adjustments conventions: add more # TODO Holiday calendars: add def __init__(self, df, notl_exchange=True): """Create Annuity from DataFrame. Not meant to be the primary constructor. Instead, calls like Annuity.from_tenor will be more common. This is here because classmethods must return a call to constructor so that return type is known. Parameters ---------- df: DataFrame Required columns = ['start','end', 'pay', 'fixing', 'period', 'frequency', 'notional', 'dcc','lag_pay', 'bday_adj', 'stub'] notl_exchange: bool If true, notional is paid at the final pay date """ super(Annuity, self).__init__() # Primary representation self.frame = df # Scalar Metadata self.notl_exchange = notl_exchange try: vals = set(df.currency) assert len(vals) == 1, ('currency column should have just one ' 'value: Found {}'.format(vals)) self.currency = vals.pop() except KeyError: print('Required key, currency, not contained in frame') raise try: vals = set(df.frequency) assert len(vals) == 1, ('frequency column should have just one ' 'value: Found {}'.format(vals)) self.frequency = vals.pop() except KeyError: print('Required key, frequency, not contained in frame') raise try: vals = set(df.type) assert len(vals) == 1, ('type column should have just one ' 'value: Found {}'.format(vals)) self.type = vals.pop() except KeyError: print('Optional key, type, not contained in frame. Set to None') self.type = None @classmethod def from_tenor(cls, dt_settlement, tenor, frequency, rate=1.0, dcc=None, notional=1.0, currency='USD', receive=True, payment_lag=0, bday=None, stub='front', notl_exchange=True, rate_type='FIXED'): """Construct a fixed rate Annuity from start date, length and frequency. Parameters ---------- dt_settlement: datetime Date (and time) on which leg begins to accrue interest tenor: int Length of the entire leg, as number of months frequency: int Number of months between cash flows dcc: str, optional Daycount Convention for computing accrual of interest rate: float, optional Rate of interest accrual. Simple accrual, no compounding in period. notional: float, optional Notional amount. Received if positive, else paid. currency : str, optional Currency code of amount received receive: bool, optional Alternative method of specifying sign of notional. Multiplies given notional by -1 if False payment_lag: int, optional Number of days after accrual end dates that payments are made. bday: str, optional Rule to adjust dates that fall on weekends and holidays. stub: str, optional If schedule building leads to one period of different length, this decides if it is the first ('front') or last period ('back'). notl_exchange: bool If true, notional is paid at the final pay date rate_type: str, optional Defines whether the rate being paid is fixed, or of some floating index such as an IBOR. """ # TODO: Check behavior when stubs implied dt_maturity = dt_settlement + DateOffset(months=tenor) period = DateOffset(months=frequency) sched_end = pd.date_range(dt_settlement, dt_maturity, freq=period, closed='right') sched_start = sched_end - period # TODO Test stub cases. start[i] should be end[i-1] if bday or payment_lag: sched_pay = sched_end + CustomBusinessDay(payment_lag, holidays=None) else: sched_pay = sched_end # Primary representation of leg as Pandas DataFrame assert rate_type in RATETYPES frame = pd.DataFrame({ 'start': sched_start, 'end': sched_end, 'pay': sched_pay, 'rate': rate, 'notional': notional, 'frequency': frequency, 'currency': currency, 'dcc': dcc, 'pay_lag': payment_lag, 'bday_adj': bday, 'stub': stub, 'type': rate_type}) year_frac = daycounter(dcc)(frame.start, frame.end) frame['period'] = year_frac return Annuity(frame, notl_exchange=notl_exchange) @classmethod def from_frame(cls, df, notl_exchange=True): return Annuity(df, notl_exchange=notl_exchange) def __str__(self): return str(self.frame) def __eq__(self, other): return (isinstance(other, Annuity) and super(Annuity, self).__eq__(other)) class FixedLeg(Annuity): def __init__(self, df, fixed_rate=None, notl_exchange=True): super(FixedLeg, self).__init__(df, notl_exchange=notl_exchange) self.type = 'FIXED' self.frame['type'] = self.type if fixed_rate: self.frame['rate'] = fixed_rate def __eq__(self, other): return (isinstance(other, FixedLeg) and super(FixedLeg, self).__eq__(other)) @classmethod def from_tenor(cls, dt_settlement, tenor, frequency, rate=1.0, dcc='30360', notional=1.0, currency='USD', receive=True, payment_lag=0, bday=None, stub='front', notl_exchange=True): annuity = Annuity.from_tenor(dt_settlement, tenor, frequency, rate, dcc, notional, currency, receive, payment_lag, bday, stub, notl_exchange, rate_type='FIXED') if isinstance(rate, Number): return FixedLeg(annuity.frame, fixed_rate=rate) else: raise NotImplementedError("FixedLeg requires scalar rate.") @classmethod def from_frame(cls, df, fixed_rate=1.0, notl_exchange=True): return FixedLeg(df, fixed_rate=fixed_rate, notl_exchange=notl_exchange) class IborLeg(Annuity): """Series of coupons based on fixings of an IBOR. IBOR = Inter-Bank Offered Rate, eg 3M USD LIBOR (3-month dollar Libor) Used as Floating Leg of a Swap or Floating Rate Note. """ def __init__(self, df, notl_exchange=True): """Compute from DataFrame. This is unlikely to be the primary constructor, but classmethods must return a call to constructor so that type is known. Parameters ---------- df: DataFrame Required columns = ['start','end', 'pay', 'fixing', 'period', 'frequency', 'notional', 'dcc','lag_pay', 'bday_adj', 'stub'] """ # Primary representation super(IborLeg, self).__init__(df, notl_exchange=notl_exchange) self.type = 'IBOR' self.frame['rate_type'] = self.type @classmethod def from_tenor(cls, dt_settlement, tenor, frequency, rate=None, dcc=None, notional=1.0, currency='USD', receive=True, payment_lag=0, fixing_lag=0, bday=None, stub='front', notl_exchange=True): annuity = Annuity.from_tenor(dt_settlement, tenor, frequency, rate, dcc, notional, currency, receive, payment_lag, bday, stub, notl_exchange, rate_type='IBOR') df = annuity.frame if bday or fixing_lag: df['fixing'] = df['start'] + CustomBusinessDay(fixing_lag, holidays=None) else: df['fixing'] = df['start'] return IborLeg(df, notl_exchange=notl_exchange) @classmethod def from_frame(cls, df, notl_exchange=True): return IborLeg(df, notl_exchange=notl_exchange) def __eq__(self, other): return (isinstance(other, IborLeg) and super(IborLeg, self).__eq__(other)) class CompoundAsset(Asset): """This Asset is composed of a list of Assets. This is a convenient way to structure a bespoke trade that contains numerous parts, like embedded options, or different first coupons. Attributes ---------- underlying_contracts: list List of instances of Assets """ def __init__(self, underlying_contracts): """ Parameters ---------- underlying_contracts: list of Asset's """ super(CompoundAsset, self).__init__() self.underlying_contracts = underlying_contracts class Swap(CompoundAsset): def __init__(self, receive_leg, pay_leg): """ This takes two frames""" self.underlying_contracts = [receive_leg, pay_leg] self.leg_receive = receive_leg self.leg_pay = pay_leg def __eq__(self, other): return (isinstance(other, Swap) and self.leg_pay == other.leg_pay and self.leg_receive == other.leg_receive) def __str__(self): return ('\nPay Leg:\n' + str(self.leg_pay) + '\nReceive Leg:\n' + str(self.leg_receive)) class VanillaSwap(Swap): def __init__(self, fixed_leg, floating_leg): assert isinstance(fixed_leg, FixedLeg) assert isinstance(floating_leg, IborLeg) assert fixed_leg.currency == floating_leg.currency, \ 'Currencies differ in legs of VanillaSwap' assert fixed_leg.type == 'FIXED' self.leg_fixed = fixed_leg assert floating_leg.type == 'IBOR' self.leg_float = floating_leg initial_notl_fixed = fixed_leg.frame.notional.iloc[0] initial_notl_float = floating_leg.frame.notional.iloc[0] if initial_notl_fixed * initial_notl_float > 0.0: raise ValueError("Notional values of both legs have same sign") elif initial_notl_fixed >= 0.0: super(VanillaSwap, self).__init__(receive_leg=fixed_leg, pay_leg=floating_leg) else: super(VanillaSwap, self).__init__(receive_leg=floating_leg, pay_leg=fixed_leg) def __eq__(self, other): return (isinstance(other, VanillaSwap) and self.leg_fixed == other.leg_fixed and self.leg_float == other.leg_float) class FRA(Asset): """Forward Rate Agreement""" def __init__(self, fixed_rate, dt_fixing, dt_payment, dt_accrual_start=None, dt_accrual_end=None, daycount=None, notional=1.0, pay_upfront=True): raise NotImplementedError class StirFuture(Asset): """Short term interest rate Future""" def __init__(self): raise NotImplementedError class Deposit(Asset): """Short term cash deposit paying simple (not compounded) interest""" def __init__(self): raise NotImplementedError class IborFixing(Asset): """Current Fixing of an Inter-Bank Offered Rate Used to calibrate yield curves. Not an asset per-se. """ def __init__(self): raise NotImplementedError class TenorSwap(Swap): """Swap with two floating legs, each of different rate tenors""" def __init__(Asset): raise NotImplementedError class CurrencySwap(Swap): """Swap with two floating legs, each of different currencies and often rate and payment frequency""" def __init__(self): raise NotImplementedError # TODO: Check whether this sort of aliasing is a good idea Zero = ZeroCouponBond """Alias for a ZeroCouponBond""" ZCB = ZeroCouponBond """Alias for a ZeroCouponBond""" DiscountBond = ZeroCouponBond """Alias for a ZeroCouponBond""" BulletPayment = ZeroCouponBond """Alias for a ZeroCouponBond""" SettlementPayment = ZeroCouponBond """BulletPayment used to settle trades"""
{ "repo_name": "caseyclements/pennies", "path": "pennies/trading/assets.py", "copies": "1", "size": "15097", "license": "apache-2.0", "hash": -7175903032183199000, "line_mean": 33.9490740741, "line_max": 93, "alpha_frac": 0.5938265881, "autogenerated": false, "ratio": 4.049624463519313, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.00035102836502663097, "num_lines": 432 }
"""A SSH Interface class. An interface to ssh on posix systems, and plink (part of the Putty suite) on Win32 systems. By Rasjid Wilcox. Copyright (c) 2002. Version: 0.2 Last modified 4 September 2002. Drawing on ideas from work by Julian Schaefer-Jasinski, Guido's telnetlib and version 0.1 of pyssh (http://pyssh.sourceforge.net) by Chuck Esterbrook. Licenced under a Python 2.2 style license. See License.txt. """ DEBUG_LEVEL = 0 import os, getpass import signal # should cause all KeyboardInterrupts to go to the main thread # try for Linux, does not seem to be try under Cygwin import nbpipe import time # Constants SSH_PORT=22 SSH_PATH='' CTRL_C=chr(3) READ_LAZY=0 READ_SOME=1 READ_ALL=2 # set the path to ssh / plink, and chose the popen2 funciton to use if os.name=='posix': import fssa # we can look for ssh-agent on posix # XXX Can we on Win32/others? import ptyext # if my patch gets accepted, change this to check for a # sufficiently high version of python, and assign ptyext=pty # if sufficient. sshpopen2=ptyext.popen2 CLOSE_STR='~.' tp=os.popen('/usr/bin/which ssh') SSH_PATH=tp.read().strip() try: tp.close() except IOError: # probably no child process pass if SSH_PATH == '': tp=os.popen('command -v ssh') # works in bash, ash etc, not csh etc. SSH_PATH=tp.read().strip() tp.close() if SSH_PATH == '': check = ['/usr/bin/ssh', '/usr/local/bin/ssh', '/bin/ssh'] for item in check: if os.path.isfile(item): SSH_PATH=item break PORT_STR='-p ' else: sshpopen2=os.popen2 CLOSE_STR=CTRL_C # FIX-ME: This does not work. # I think I need to implement a 'kill' component # to the close function using win32api. SSH_PATH='' PORT_STR='-P ' class mysshError(Exception): """Error class for myssh.""" pass # Helper functions def _prompt(prompt): """Print the message as the prompt for input. Return the text entered.""" noecho = (prompt.lower().find('password:') >= 0) or \ (prompt.lower().find('passphrase:') >=0) print """User input required for ssh connection. (Type Ctrl-C to abort connection.)""" abort = 0 try: if noecho: response = getpass.getpass(prompt) else: response = raw_input(prompt) except KeyboardInterrupt: response = '' abort = 1 return response, abort class Ssh: """A SSH connection class.""" def __init__(self, username=None, host='localhost', port=None): """Constructor. This does not try to connect.""" self.debuglevel = DEBUG_LEVEL self.sshpath = SSH_PATH self.username = username self.host = host self.port = port self.isopen = 0 self.sshpid = 0 # perhaps merge this with isopen self.old_handler = signal.getsignal(signal.SIGCHLD) sig_handler = signal.signal(signal.SIGCHLD, self.sig_handler) def __del__(self): """Destructor -- close the connection.""" if self.isopen: self.close() def sig_handler(self, signum, stack): """ Handle SIGCHLD signal """ if signum == signal.SIGCHLD: try: os.waitpid(self.sshpid, 0) except: pass if self.old_handler != signal.SIG_DFL: self.old_handler(signum, stack) def attach_agent(self, key=None): if os.name != 'posix': # only posix support at this time return if 'SSH_AUTH_SOCK' not in os.environ.keys(): fssa.fssa(key) def set_debuglevel(self, debuglevel): """Set the debug level.""" self.debuglevel = debuglevel def set_sshpath(self, sshpath): """Set the ssh path.""" self.sshpath=sshpath # Low level functions def open(self, cmd=None): """Opens a ssh connection. Raises an mysshError if myssh.sshpath is not a file. Raises an error if attempting to open an already open connection. """ self.attach_agent() if not os.path.isfile(self.sshpath): raise mysshError, \ "Path to ssh or plink is not defined or invalid.\nsshpath='%s'" \ % self.sshpath if self.isopen: raise mysshError, "Connection already open." sshargs = '' if self.sshpath.lower().find('plink') != -1: sshargs = '-ssh ' if self.port and self.port != '': sshargs += PORT_STR + `self.port` + ' ' if self.username and self.username !='': sshargs += self.username + '@' sshargs += self.host if cmd: sshargs += ' ' + cmd if self.debuglevel: print ">> Running %s %s." % (self.sshpath, sshargs) # temporary workaround until I get pid's working under win32 if os.name == 'posix': self.sshin, self.sshoutblocking, self.sshpid = \ sshpopen2(self.sshpath + ' ' + sshargs) else: self.sshin, self.sshoutblocking = \ sshpopen2(self.sshpath + ' ' + sshargs) self.sshout = nbpipe.nbpipe(self.sshoutblocking) self.isopen = 1 if self.debuglevel: print ">> ssh pid is %s." % self.sshpid def close(self, addnewline=1): """Close the ssh connection by closing the input and output pipes. Returns the closing messages. On Posix systems, by default it adds a newline before sending the disconnect escape sequence. Turn this off by setting addnewline=0. """ if os.name == 'posix': try: if addnewline: self.write('\n') self.write(CLOSE_STR) except (OSError, IOError, mysshError): pass output = self.read_lazy() try: self.sshin.close() self.sshoutblocking.close() except: pass if os.name == 'posix': try: os.kill(self.sshpid, signal.SIGHUP) except: pass self.isopen = 0 if self.debuglevel: print ">> Connection closed." return output def write(self, text): """Send text to the ssh process.""" # May block?? Probably not in practice, as ssh has a large input buffer. if self.debuglevel: print ">> Sending %s" % text if self.isopen: while len(text): numtaken = os.write(self.sshin.fileno(),text) if self.debuglevel: print ">> %s characters taken" % numtaken text = text[numtaken:] else: raise mysshError, "Attempted to write to closed connection." # There is a question about what to do with connections closed by the other # end. Should write and read check for this, and force proper close? def read_very_lazy(self): """Very lazy read from sshout. Just reads from text already queued.""" return self.sshout.read_very_lazy() def read_lazy(self): """Lazy read from sshout. Waits a little, but does not block.""" return self.sshout.read_lazy() def read_some(self): """Always read at least one block, unless the connection is closed. My block.""" if self.isopen: return self.sshout.read_some() else: return self.sshout.read_very_lazy() def read_all(self): """Reads until end of file hit. May block.""" if self.isopen: return self.sshout.read_all() else: return self.sshout.read_very_lazy() # High level funcitons def login(self, logintext='Last login:', prompt_callback=_prompt): """Logs in to the ssh host. Checks for standard prompts, and calls the function passed as promptcb to process them. Returns the login banner, or 'None' if login process aborted. """ self.open() banner = self.read_some() if self.debuglevel: print ">> 1st banner read is: %s" % banner while banner.find(logintext) == -1: response, abort = prompt_callback(banner) if abort: return self.close() self.write(response + '\n') banner = self.read_some() return banner def logout(self): """Logs out the session.""" self.close() def sendcmd(self, cmd, readtype=READ_SOME): """Sends the command 'cmd' over the ssh connection, and returns the result. By default it uses read_some, which may block. """ if cmd[-1] != '\n': cmd += '\n' self.write(cmd) if readtype == READ_ALL: return self.read_all() elif readtype == READ_LAZY: return self.read_lazy() else: return self.read_some() def test(): """Test routine for myssh. Usage: python myssh.py [-d] [-sshp path-to-ssh] [username@host | host] [port] Default host is localhost, default port is 22. """ import sys debug = 0 if sys.argv[1:] and sys.argv[1] == '-d': debug = 1 del sys.argv[1] testsshpath = SSH_PATH if sys.argv[1:] and sys.argv[1] == '-sshp': testsshpath = sys.argv[2] del sys.argv[1] del sys.argv[1] testusername = None testhost = 'localhost' testport = '22' if sys.argv[1:]: testhost = sys.argv[1] if testhost.find('@') != -1: testusername, testhost = testhost.split('@') if sys.argv[2:]: testport = sys.argv[2] testcon = Ssh(testusername, testhost, testport) testcon.set_debuglevel(debug) testcon.set_sshpath(testsshpath) testcon.login() cmd = None while (cmd != 'exit') and testcon.isopen: cmd = raw_input("Enter command to send: ") print testcon.sendcmd(cmd) testcon.close() if __name__ == '__main__': test()
{ "repo_name": "isislovecruft/torflow", "path": "NetworkScanners/libs/Pyssh/pyssh.py", "copies": "2", "size": "10720", "license": "bsd-3-clause", "hash": -8387606040305135000, "line_mean": 31.5, "line_max": 81, "alpha_frac": 0.5393656716, "autogenerated": false, "ratio": 3.9703703703703703, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.550973604197037, "avg_score": null, "num_lines": null }
# assign articles import pickle from collections import defaultdict stoplist = set("a about above after again against all am an and any are aren't as at be because been before being below between both but by can't cannot could couldn't did didn't do does doesn't doing don't down during each few for from further had hadn't has hasn't have haven't having he he'd he'll he's her here here's hers herself him himself his how how's i i'd i'll i'm i've if in into is isn't it it's its itself let's me more most mustn't my myself no nor not of off on once only or other ought our ours ourselves out over own same shan't she she'd she'll she's should shouldn't so some such than that that's the their theirs them themselves then there there's these they they'd they'll they're they've this those through to too under until up very was wasn't we we'd we'll we're we've were weren't what what's when when's where where's which while who who's whom why why's with won't would wouldn't you you'd you'll you're you've your yours yourself yourselves".split()) replacelist = "\" ( ) . ".split() with open('data/simple/articles.pickle', 'rb') as articles_file: articles = pickle.load(articles_file) frequency = defaultdict(int) for key, article in articles.iteritems(): # Separate the title from the article lines = articles[key].split('\n\n') title = lines[0].replace('\n', '') # Remove quotes words = str.join('\n', lines[1:]).lower().translate(None, ''.join(replacelist)).split() # Remove common words words = [word for word in words if word not in stoplist] # Count each word occurence # for word in words: # frequency[word] += 1 # Save the new dict item articles[key] = str.join(' ', words) # Only keep words that occur more than once # for key, (title, article) in articles.iteritems(): # articles[key] = (title, [word for word in article if frequency[word] > 1]) with open('data/simple/models/articles_parsed.pickle', 'wb') as handle: pickle.dump(articles, handle, protocol=pickle.HIGHEST_PROTOCOL)
{ "repo_name": "Humblehound/WikiSpatialTree", "path": "prepare_dictionary.py", "copies": "1", "size": "2061", "license": "mit", "hash": 3678021013947525000, "line_mean": 63.4375, "line_max": 979, "alpha_frac": 0.7151868025, "autogenerated": false, "ratio": 3.7955801104972378, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5010766912997238, "avg_score": null, "num_lines": null }
"""Assign a WFO to sites in the metadata tables that have no WFO set.""" from pyiem.util import get_dbconn, logger LOG = logger() def main(): """Go Main""" mesosite = get_dbconn("mesosite") postgis = get_dbconn("postgis") mcursor = mesosite.cursor() mcursor2 = mesosite.cursor() pcursor = postgis.cursor() # Find sites we need to check on mcursor.execute( "select s.id, s.iemid, s.network, st_x(geom) as lon, " "st_y(geom) as lat from stations s WHERE " "(s.wfo IS NULL or s.wfo = '') and s.country = 'US'" ) for row in mcursor: sid = row[0] iemid = row[1] network = row[2] # Look for WFO that pcursor.execute( "select wfo from cwa WHERE " "ST_Contains(the_geom, " " ST_SetSrid(ST_GeomFromEWKT('POINT(%s %s)'), 4326)) ", (row[3], row[4]), ) if pcursor.rowcount == 0: LOG.info( "IEMID: %s ID: %s NETWORK: %s not within CWAs, calc dist", iemid, sid, network, ) pcursor.execute( "SELECT wfo, ST_Distance(the_geom, " " ST_SetSrid(ST_GeomFromEWKT('POINT(%s %s)'), 4326)) as dist " "from cwa ORDER by dist ASC LIMIT 1", (row[3], row[4]), ) wfo, dist = pcursor.fetchone() if dist > 3: LOG.info( " closest CWA %s found >3 degrees away %.2f", wfo, dist, ) continue else: row2 = pcursor.fetchone() wfo = row2[0][:3] LOG.info( "Assinging WFO: %s to IEMID: %s ID: %s NETWORK: %s", wfo, iemid, sid, network, ) mcursor2.execute( "UPDATE stations SET wfo = %s WHERE iemid = %s", (wfo, iemid) ) mcursor.close() mcursor2.close() mesosite.commit() mesosite.close() if __name__ == "__main__": main()
{ "repo_name": "akrherz/iem", "path": "scripts/dbutil/set_wfo.py", "copies": "1", "size": "2127", "license": "mit", "hash": 5840720261777957000, "line_mean": 26.9868421053, "line_max": 79, "alpha_frac": 0.463093559, "autogenerated": false, "ratio": 3.464169381107492, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4427262940107492, "avg_score": null, "num_lines": null }
"""Assign block storage subnets to the given host id.""" # :license: MIT, see LICENSE for more details. import click import SoftLayer from SoftLayer.CLI import environment @click.command() @click.argument('access_id', type=int) @click.option('--subnet-id', multiple=True, type=int, help="ID of the subnets to assign; e.g.: --subnet-id 1234") @environment.pass_env def cli(env, access_id, subnet_id): """Assign block storage subnets to the given host id. access_id is the host_id obtained by: slcli block access-list <volume_id> SoftLayer_Account::iscsiisolationdisabled must be False to use this command """ try: subnet_ids = list(subnet_id) block_manager = SoftLayer.BlockStorageManager(env.client) assigned_subnets = block_manager.assign_subnets_to_acl(access_id, subnet_ids) for subnet in assigned_subnets: message = "Successfully assigned subnet id: {} to allowed host id: {}".format(subnet, access_id) click.echo(message) failed_to_assign_subnets = list(set(subnet_ids) - set(assigned_subnets)) for subnet in failed_to_assign_subnets: message = "Failed to assign subnet id: {} to allowed host id: {}".format(subnet, access_id) click.echo(message) except SoftLayer.SoftLayerAPIError as ex: message = "Unable to assign subnets.\nReason: {}".format(ex.faultString) click.echo(message)
{ "repo_name": "allmightyspiff/softlayer-python", "path": "SoftLayer/CLI/block/subnets/assign.py", "copies": "2", "size": "1445", "license": "mit", "hash": -8689241738293449000, "line_mean": 38.0540540541, "line_max": 108, "alpha_frac": 0.676816609, "autogenerated": false, "ratio": 3.8533333333333335, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0014894313930655584, "num_lines": 37 }
"""Assign chores Write a program that takes a list of people’s email addresses and a list of chores that need to be done and randomly assigns chores to people. Email each person their assigned chores. If you’re feeling ambitious, keep a record of each person’s previously assigned chores so that you can make sure the program avoids assigning anyone the same chore they did last time. For another possible feature, schedule the program to run once a week automatically. Notes: * ``smtp_info`` file has each item on a separate line. * Use :func:`input` for password to prevent storing in unencrypted file. * It may be easier to: * Setup a crontab to run weekly. * Store `saved_time` and `prev_chores` in a :py:mod:`shelve` database. """ def main(): import openpyxl, random, smtplib, datetime # Open the spreadsheet and get the lists of data. wb = openpyxl.load_workbook('choresList.xlsx') sheet = wb['Sheet1'] names, emails, chores, prev_chores = [], [], [], [] for row in range(2, sheet.max_row + 1): # skip title row name = sheet['A' + str(row)].value email = sheet['B' + str(row)].value chore = sheet['C' + str(row)].value prev_chore = sheet['D' + str(row)].value names.append(name) emails.append(email) chores.append(chore) prev_chores.append(prev_chore) # Run weekly saved_time = sheet['E2'].value interval = datetime.timedelta(days=7) now = datetime.datetime.now() if saved_time is None: saved_time = now - interval # First run, so it's been a week timedelta = saved_time + interval if timedelta > now: time_left = round((timedelta - now).total_seconds()/60, 2) print(f"RuntimeError: Need to wait {time_left} minutes before running again.") raise RuntimeError else: sheet['E2'].value = now # save to spreadsheet # Log in to email account. with open('../smtp_info') as config: myEmail, password, server, port = config.read().splitlines() smtpObj = smtplib.SMTP_SSL(server, port) # Using port 465 smtpObj.ehlo() smtpObj.login(myEmail, password) # Randomly assign chores for i in range(0, len(names)): random_chore = random.choice(chores) # Check previous chore before assignment while random_chore == prev_chores[i] and len(chores) > 1: random_chore = random.choice(chores) # Keep track of chores assigned sheet['D' + str(i + 2)].value = random_chore chores.remove(random_chore) # remove assigned chore from pool # Send email. body = "Subject: Chore for the Week: %s.\nDear %s,\n\nThis week, you're in charge of:\n%s. " \ "\n\nThank you in advance for your efforts!" % (random_chore, names[i], random_chore) print(f'Sending email to {emails[i]}...') sendmailStatus = smtpObj.sendmail(myEmail, emails[i], body) if sendmailStatus != {}: print(f'There was a problem sending email to {emails[i]}: {sendmailStatus}') smtpObj.quit() wb.save('choresList.xlsx') if __name__ == '__main__': main()
{ "repo_name": "JoseALermaIII/python-tutorials", "path": "pythontutorials/books/AutomateTheBoringStuff/Ch16/Projects/P1_assignChores.py", "copies": "1", "size": "3180", "license": "mit", "hash": 7366868954102329000, "line_mean": 35.0681818182, "line_max": 102, "alpha_frac": 0.6379962193, "autogenerated": false, "ratio": 3.5783540022547915, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.47163502215547914, "avg_score": null, "num_lines": null }
# - Assign kafka cluster and topic to send enent # - Assign one stock and curl one information per second # - AAPL, GOOG symbol of stock # - good practive provide default argument from googlefinance import getQuotes from kafka import KafkaProducer from kafka.errors import KafkaError,KafkaTimeoutError #- raise exception to handle kafka error import argparse # use argparse to set commandline arguements import json import time import logging import schedule # -set job run auto # action on exit import atexit topic_name = 'stock-analyzer' kafka_broker = '127.0.0.1:9002' logger_format = '%(asctime)-15s %(message)s' logging.basicConfig(format=logger_format) logger = logging.getLogger('data-producer') # - TRACE DEBUG INFO WARNING ERROR logger.setLevel(logging.DEBUG) def fetch_price(producer, symbol): """ helper function to get stock data and send to kafka @param producer - instance of a kafka producer @param symbol - symbol of the stock, string type @return None """ logger.debug('Start to fetch stock price for %s', symbol) # Start debug try: price = json.dumps(getQuotes(symbol)) logger.debug('Get stock info %s',price) # Get debug producer.send(topic=topic_name,value=price, timestamp_ms= time.time()) logger.debug('Sent stock price for %s to kafka',symbol) #Sent debug except KafkaTimeoutError as timeout_error: logger.warn('Failed to send stock price for %s to kafka, caused by: %s',(symbol,timeout_error)) #use warning cause assume lose one does not matter, however if it is what customer required then # use error except Exception: logger.warn('Failed to get stock price for %s', symbol) def shutdown_hook(producer): try: producer.flush(10) logger.info('Finished flushing pending messages') except KafkaError as KafkaError: logger.warn('Failed to flush pending messages to kafka') finally: try: producer.close() logger.info('Kafka connection closed ') except Exception as e: logger.warn('Failed to close kafka connection') if __name__ == '__main__': # Python enter point argument is main # - set commandline arguments parser = argparse.ArgumentParser() parser.add_argument('symbol', help ='the symbol of the stock') parser.add_argument('topic_name', help = 'the kafka topic') parser.add_argument('kafka_broker',help= 'the location of kafka broker') # - parse argument args = parser.parse_args() symbol = args.symbol topic_name = args.topic_name kafka_broker = args.kafka_broker # need to talk to kafka server to produce message thus require us to have # kafka producer # - initiate a kafka producer # bootstrap -server need to contact server to get data producer = KafkaProducer( bootstrap_servers=kafka_broker ) fetch_price(producer,symbol) # - schdule to run every 1 sec schedule.every(1).second.do(fetch_price, producer,symbol) atexit.register(shutdown_hook, producer) while True: # Always true if never finish loop schedule.run_pending() time.sleep(1)
{ "repo_name": "samli6479/bigdata", "path": "pybuild/data-producer.py", "copies": "2", "size": "2967", "license": "apache-2.0", "hash": -2326645250609006600, "line_mean": 29.2857142857, "line_max": 99, "alpha_frac": 0.7435119649, "autogenerated": false, "ratio": 3.5195729537366547, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5263084918636655, "avg_score": null, "num_lines": null }
"""Assign load balancer rule or list of load balancer rules to a global load balancer rules.""" from baseCmd import * from baseResponse import * class assignToGlobalLoadBalancerRuleCmd (baseCmd): typeInfo = {} def __init__(self): self.isAsync = "true" """the ID of the global load balancer rule""" """Required""" self.id = None self.typeInfo['id'] = 'uuid' """the list load balancer rules that will be assigned to global load balancer rule""" """Required""" self.loadbalancerrulelist = [] self.typeInfo['loadbalancerrulelist'] = 'list' """Map of LB rule id's and corresponding weights (between 1-100) in the GSLB rule, if not specified weight of a LB rule is defaulted to 1. Specified as 'gslblbruleweightsmap[0].loadbalancerid=UUID&gslblbruleweightsmap[0].weight=10'""" self.gslblbruleweightsmap = [] self.typeInfo['gslblbruleweightsmap'] = 'map' self.required = ["id", "loadbalancerrulelist", ] class assignToGlobalLoadBalancerRuleResponse (baseResponse): typeInfo = {} def __init__(self): """any text associated with the success or failure""" self.displaytext = None self.typeInfo['displaytext'] = 'string' """true if operation is executed successfully""" self.success = None self.typeInfo['success'] = 'boolean'
{ "repo_name": "MissionCriticalCloud/marvin", "path": "marvin/cloudstackAPI/assignToGlobalLoadBalancerRule.py", "copies": "1", "size": "1391", "license": "apache-2.0", "hash": -3113933261371510000, "line_mean": 38.7428571429, "line_max": 242, "alpha_frac": 0.6534867002, "autogenerated": false, "ratio": 4.008645533141211, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.02929057688218445, "num_lines": 35 }
# Assignment 12 Analysis -- Emily ''' -pull out spike and y-layers in image -(spike) look at distributions of synapse density in the spike across x,y,z -make old plots prettier/writeup this weekend -(spike) in old hist graphs, make bin size a function of sample size -extract image(s) in dataset of y-layers -talk about it, analyze spatial distribution -heat maps/hex plots of y-layers, how is density distributed across them? ''' # Setup from mpl_toolkits.mplot3d import axes3d import matplotlib.pyplot as plt #%matplotlib inline import numpy as np import urllib2 import scipy.stats as stats np.set_printoptions(precision=3, suppress=True) url = ('https://raw.githubusercontent.com/Upward-Spiral-Science' '/data/master/syn-density/output.csv') data = urllib2.urlopen(url) csv = np.genfromtxt(data, delimiter=",")[1:] # don't want first row (labels) # chopping data based on thresholds on x and y coordinates x_bounds = (409, 3529) y_bounds = (1564, 3124) def check_in_bounds(row, x_bounds, y_bounds): if row[0] < x_bounds[0] or row[0] > x_bounds[1]: return False if row[1] < y_bounds[0] or row[1] > y_bounds[1]: return False if row[3] == 0: return False return True indices_in_bound, = np.where(np.apply_along_axis(check_in_bounds, 1, csv, x_bounds, y_bounds)) data_thresholded = csv[indices_in_bound] n = data_thresholded.shape[0] def synapses_over_unmasked(row): s = (row[4]/row[3])*(64**3) return [row[0], row[1], row[2], s] syn_unmasked = np.apply_along_axis(synapses_over_unmasked, 1, data_thresholded) syn_normalized = syn_unmasked # Extract images in dataset of y-layers from image_scraping_jay import get_image_url from PIL import Image from cStringIO import StringIO import requests import urllib import io ''' y_bounds = [(1564,1837), (1837,2071), (2071,2305), (2305,2539), (2539,3124)] for _, bounds in enumerate(y_bounds): new_im = Image.new('RGB',(1000,1000)) temp = data_thresholded[:,1] temp = temp[np.logical_and(temp>=bounds[0], temp<bounds[1])] ys = np.unique(temp) i = 0 ''' def get_image(xrange,yrange,xs,ys): spacing = 100 new_im = Image.new('RGB',(spacing*(10+len(xs[xrange[0]:xrange[1]])),spacing*(10+len(ys[yrange[0]:yrange[1]])))) i = 0 for y in ys[yrange[0]:yrange[1]]: j = 0 for x in xs[xrange[0]:xrange[1]]: z = 1054 im_url = get_image_url(x,y,z,1) opener = urllib2.build_opener() opener.addheaders = [('User-agent', 'Mozilla/5.0')] response = opener.open(im_url) img_file = StringIO(response.read()) im = Image.open(img_file) #size = (int(np.floor(1000/len(xs))),int(np.floor(500/len(ys)))) #size = (int(4000),int(np.floor(4000/len(ys)))) size = (spacing,spacing) im.thumbnail(size, Image.ANTIALIAS) new_im.paste(im,(j,i)) j += spacing + 10 i += spacing + 10 new_im.save('new_im'+str(i)+'.bmp') new_im.show() xs = np.unique(data_thresholded[:,0]) ys = np.unique(data_thresholded[:,1]) get_image((0,1),(0,len(ys)-1),xs,ys)
{ "repo_name": "Upward-Spiral-Science/team1", "path": "code/assignment12_emily.py", "copies": "1", "size": "3046", "license": "apache-2.0", "hash": 6111872903007349000, "line_mean": 29.7676767677, "line_max": 112, "alpha_frac": 0.6628365069, "autogenerated": false, "ratio": 2.7565610859728507, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.8728173116902578, "avg_score": 0.03824489519405467, "num_lines": 99 }
# Assignment 1 # 18 January 2016 # Team Rython - Dainius Masiliunas - Tim Weerman # Apache license 2.0 # Import packages import os,os.path import mapnik # Set working directory ####### Change this for your own system ####### # os.chdir('YOURWORKINGDIRECTORY') os.chdir('/home/tim/geoscripting/Assignment1') print os.getcwd() os.chdir('data') # Loading osgeo try: from osgeo import ogr, osr print 'Import of ogr and osr from osgeo worked. Hurray!\n' except: print 'Import of ogr and osr from osgeo failed\n\n' # Load driver driverName = "ESRI Shapefile" drv = ogr.GetDriverByName( driverName ) # Set layer name and file name fn = "map.shp" layername = "locations" # Create shape file ds = drv.CreateDataSource(fn) print ds.GetRefCount() # Set spatial reference spatialReference = osr.SpatialReference() spatialReference.ImportFromProj4('+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs') # Create Layer layer=ds.CreateLayer(layername, spatialReference, ogr.wkbPoint) layerDefinition = layer.GetLayerDefn() # Make a coordinate list coordinate_list = [[5.655246,51.989645, "exportfile1.kml"], \ [6.552223,53.212979, "exportfile2.kml"]] # For loop to go through the points within the coordinate list for coordinates in coordinate_list: point = ogr.Geometry(ogr.wkbPoint) point.SetPoint(0, coordinates[0], coordinates[1]) feature = ogr.Feature(layerDefinition) feature.SetGeometry(point) layer.CreateFeature(feature) # Here a .kml file is created per point from the coordinate list f = open(coordinates[2], "w+") f.write("<Placemark>" + point.ExportToKML() + "</Placemark>") f.close() print "The new extent" print layer.GetExtent() # Saving the object by destroying it ds.Destroy() # Set working directory os.chdir('..') # File with symbol for point file_symbol=os.path.join("figs","mm_20_white.png") # Create a map map = mapnik.Map(800, 400) #This is the image final image size # Background for the map map.background = mapnik.Color("steelblue") # Create the rule and style obj r = mapnik.Rule() s = mapnik.Style() # Set the land polygone polyStyle= mapnik.PolygonSymbolizer(mapnik.Color("darkred")) pointStyle = mapnik.PointSymbolizer(mapnik.PathExpression(file_symbol)) r.symbols.append(polyStyle) r.symbols.append(pointStyle) s.rules.append(r) map.append_style("mapStyle", s) # Adding point layer layerPoint = mapnik.Layer("pointLayer") layerPoint.datasource = mapnik.Shapefile(file=os.path.join("data","map.shp")) layerPoint.styles.append("mapStyle") # Adding polygon layerPoly = mapnik.Layer("polyLayer") layerPoly.datasource = mapnik.Shapefile(file=os.path.join("data","ne_110m_land.shp")) layerPoly.styles.append("mapStyle") # Add layers to map map.layers.append(layerPoly) map.layers.append(layerPoint) # Set boundaries for the Netherlands boundsLL = (5 , 51, 7, 54.5) #(minx, miny, maxx,maxy) map.zoom_to_box(mapnik.Box2d(*boundsLL)) # zoom to bbox mapnik.render_to_file(map, os.path.join("figs","map3.png"), "png") print "All done - check content" # Deleting the created shapefiles (map.shp etc.) os.system('./clean.sh')
{ "repo_name": "GreatEmerald/geoscripting", "path": "Assignment1/Assignment1.py", "copies": "1", "size": "3124", "license": "apache-2.0", "hash": -2835492597362380000, "line_mean": 26.1739130435, "line_max": 85, "alpha_frac": 0.7282330346, "autogenerated": false, "ratio": 3.093069306930693, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9265877978571335, "avg_score": 0.011084872591871657, "num_lines": 115 }
import httplib2 #Needed to download file from internet, not simply reading a file on the hard drive. YearsOfAge=0 #global variables Workclass=1 #fnlwgt=2 Not needed. #Education=3 Not needed. Education_Number=4 Marital_Status=5 Occupation=6 Relationship=7 Race=8 Sex=9 Capital_Gain=10 Capital_Loss=11 Hours_Per_Week=12 #Native_Country=13 Not needed. Outcome=14 #Start of functions. #Start of counting(). def counting(number): for row in file: filearray.append(row) number+=1 #Incrememnt by 1 for each record in the file. return(number) #End of counting(). #Start of weights(). def weights(docarray,number,position): counter=0 #A simple counter. ref={} #A dictionary (Currently empty). attArray = [] while(counter<number): split=docarray[counter].split(", "); if split[position] in ref: ref[split[position]]+=1 else: ref[split[position]]=1 attArray.append(position) counter+=1 counter=0 #Reset the counter to 0. for attArray[counter] in ref: ref[attArray[counter]]=ref[attArray[counter]]/sev return(ref) #End of weights(). #Start of separateXYnum(). def separateXYnum(records,attributepos): X=0 Y=0 i=0 countU=0 countO=0 while(i<sev): record=records[i] recordarray=record.split(", ") if recordarray[Outcome].startswith('>'): X+=int(recordarray[attributepos]) #Earns more. countO+=1 else: Y+=int(recordarray[attributepos]) #Earns less. countU+=1 i+=1 average_X=X/countO average_Y=Y/countU midpoint=average_X+average_Y midpoint = midpoint/2 return(midpoint) #End of separateXYnum(). #Start of separate(). def separate(diction,file,number,n): i=0 pos=0 neg=0 midp=0 midn=0 above={} below={} while(i<number): line=file[i].split(', ') weight=diction[line[n]] if(file[i].find('>50K')!=-1): midp=midp + weight pos+=1 elif(file[i].find('<=50K')!=-1): midn=midn+weight neg+=1 i+=1 midpoint=((midp/pos)+(midn/neg))/2 return(midpoint) #End of separate(). #End of functions. #Start of Main(). filedown = "http://archive.ics.uci.edu/ml/machine-learning-databases/adult/adult.data" #Download the file h = httplib2.Http(".cache") file_headers, file = h.request(filedown) file = file.decode() file = file.split('\n') filearray=[] count=0 print('Calculating...') #So the user knows the program is working, if it is running slowly. count=counting(count) #Run the function counting(). sev=int(count*0.50) #Trainging set. #The following are all text based data. workweight=weights(filearray,sev,Workclass) maritalweight=weights(filearray,sev,Marital_Status) occuweight=weights(filearray,sev,Occupation) raceweight=weights(filearray,sev,Race) sexweight=weights(filearray,sev,Sex) relationweight=weights(filearray,sev,Relationship) #The following are all integer based data. age_mid=separateXYnum(filearray,YearsOfAge) work_mid=separate(workweight,filearray,sev,Workclass) edu_mid=separateXYnum(filearray,Education_Number) marital_mid=separate(maritalweight,filearray,sev,Marital_Status) occu_mid=separate(occuweight,filearray,sev,Occupation) relation_mid=separate(relationweight,filearray,sev,Relationship) race_mid=separate(raceweight,filearray,sev,Race) sex_mid=separate(sexweight,filearray,sev,Sex) gain_mid=separateXYnum(filearray,Capital_Gain) loss_mid=separateXYnum(filearray,Capital_Loss) hrs_mid=separateXYnum(filearray,Hours_Per_Week) #Testing set counter = 0 correct = 0 while(sev<count-2): #Errors resulted if it wasn't at -2. More=0 Less=0 attribute=filearray[sev].split(", ") #print("Check?:",type(attribute[age]),attribute[age]) Print until error, program was hitting the end of the file. Fixed now. if (int(attribute[YearsOfAge]))>age_mid: More+=-2 else: #I know these two are a little hardcoded, but it gave higher accuracy :) Less+=2 if int(attribute[Education_Number])>edu_mid: More+=1 else: Less+=1 if int(attribute[Hours_Per_Week])>hrs_mid: More+=1 else: Less+=1 if int(attribute[Capital_Gain])>gain_mid: More+=1 else: Less+=1 if int(attribute[Capital_Loss])>loss_mid: More+=1 else: Less+=1 if (float(workweight[attribute[Workclass]])<work_mid): More+=1 else: Less+=1 if (float(maritalweight[attribute[Marital_Status]])>marital_mid): More+=1 else: Less+=1 if (float(occuweight[attribute[Occupation]])>occu_mid): More+=1 else: Less+=1 if (float(raceweight[attribute[Race]])>race_mid): More+=1 else: Less+=1 if (float(sexweight[attribute[Sex]])>sex_mid): More+=1 else: Less+=1 if (float(relationweight[attribute[Relationship]])>relation_mid): More+=1 else: Less+=1 if (More>Less): answer='>50K' else: answer='<=50K' if(filearray[sev].find(answer) != -1): correct +=1 else: correct +=0 counter+=1 sev+=1 accuracy = ((correct/counter)*100) #Claculate the accuracy. total = 100 #Provide a total % value to compare the output against. char = '/' #Used to separate the output value and the total value. print('Accuracy is:',accuracy, char, total) #Print out the accuracy. Final program output. #End of Main().
{ "repo_name": "Dylan-Kingston/Python", "path": "Assignment1OOP.py", "copies": "1", "size": "5686", "license": "mit", "hash": -3051187094330681000, "line_mean": 22.8907563025, "line_max": 128, "alpha_frac": 0.6470277875, "autogenerated": false, "ratio": 3.1729910714285716, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.43200188589285715, "avg_score": null, "num_lines": null }
# =============================== ASSIGNMENT 2 USE PICKLED DATA FROM ASS 1 ON A NN =============================== # These are all the modules we'll be using later. Make sure you can import them # before proceeding further. from __future__ import print_function import numpy as np import tensorflow as tf from six.moves import cPickle as pickle from six.moves import range import time # First reload the data we generated in 1_notmnist.ipynb. pickle_file = '../../udacity/notMNIST.pickle' #with is just a safe way of dealing with resources. handles correct closing if exceptions, etc with open(pickle_file, 'rb') as f: save = pickle.load(f) train_dataset = save['train_dataset'] train_labels = save['train_labels'] valid_dataset = save['valid_dataset'] valid_labels = save['valid_labels'] test_dataset = save['test_dataset'] test_labels = save['test_labels'] del save # hint to help gc free up memory print('Training set', train_dataset.shape, train_labels.shape) print('Validation set', valid_dataset.shape, valid_labels.shape) print('Test set', test_dataset.shape, test_labels.shape) #Reformat into a shape that's more adapted to the models we're going to train: # data as a flat matrix, # labels as float 1-hot encodings. image_size = 28 flat_img_size = image_size * image_size num_labels = 10 def reformat(dataset, labels): #-1 in reshape means 'use whatever makes sense', either flatten the whole thing or keep previous dimensions. # here it keeps the previous dimension print(labels.shape) dataset = dataset.reshape((-1, flat_img_size)).astype(np.float32) # Map 0 to [1.0, 0.0, 0.0 ...], 1 to [0.0, 1.0, 0.0 ...] #the first part of this, np.arange(num_labels), creates an array that is [0,1,2...9] #we then check, for all rows of labels, if that arange is equal to the content of the row #so if labels[3] = 1, we get something like [false,true,false...], and this is converted to [0.0,1.0,0.0...] #None is actually optional, it just means don't bother about this dimension or something. #or actually, we need None if labels has more than 1 column labels = (np.arange(num_labels) == labels[:,None]).astype(np.float32) return dataset, labels train_dataset, train_labels = reformat(train_dataset, train_labels) valid_dataset, valid_labels = reformat(valid_dataset, valid_labels) test_dataset, test_labels = reformat(test_dataset, test_labels) print('Training set', train_dataset.shape, train_labels.shape) print('Validation set', valid_dataset.shape, valid_labels.shape) print('Test set', test_dataset.shape, test_labels.shape) #================================================== PROBLEM ================================================ #Turn the logistic regression example with SGD into a 1-hidden layer neural network with rectified linear units # (nn.relu()) and 1024 hidden nodes. This model should improve your validation / test accuracy. def accuracy(predictions, labels): #argmax returns the indices of the maximum values across dimension 1 ie colums #predictions has shape (10000,10). this == tests if the max from predictions matches the max (ie the only non-null) label sum_all_correct = np.sum(np.argmax(predictions, 1) == np.argmax(labels, 1)) sum_all = predictions.shape[0] return 100.0 * sum_all_correct / sum_all # import math batch_size = 128 #this random number of training patterns will be used graph = tf.Graph() hidden1_units = 1024 #initialize everything with graph.as_default(): # Input data. The training data is currently empty, but a random minibatch will be fed in the placeholder during training tf_train_dataset = tf.placeholder(tf.float32, shape=(batch_size, flat_img_size)) tf_train_labels = tf.placeholder(tf.float32, shape=(batch_size, num_labels)) tf_valid_dataset = tf.constant(valid_dataset) tf_test_dataset = tf.constant(test_dataset) # Input layer weights = tf.Variable(tf.truncated_normal([flat_img_size, hidden1_units])) biases = tf.Variable(tf.zeros([hidden1_units])) InputLayerOutput = tf.matmul(tf_train_dataset, weights) + biases # 1st hidden layer hidden1_input = tf.nn.relu(InputLayerOutput) weights1= tf.Variable(tf.truncated_normal([hidden1_units, num_labels])) biases1 = tf.Variable(tf.zeros([num_labels])) # Training computation. # logits = tf.matmul(tf_train_dataset, weights) + biases logits = tf.matmul(hidden1_input, weights1) + biases1 loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits, tf_train_labels)) # Optimizer. optimizer = tf.train.GradientDescentOptimizer(0.5).minimize(loss) # Predictions for the training, validation, and test data. train_prediction = tf.nn.softmax(logits) valid_prediction = tf.nn.softmax(tf.matmul(tf.nn.relu(tf.matmul(tf_valid_dataset, weights) + biases), weights1) + biases1) test_prediction = tf.nn.softmax(tf.matmul(tf.nn.relu(tf.matmul(tf_test_dataset, weights) + biases), weights1) + biases1) #another person's code from the forum, also works. basically is the same. # num_nodes= 1024 # batch_size = 128 # graph = tf.Graph() # with graph.as_default(): # # Input data. For the training data, we use a placeholder that will be fed # # at run time with a training minibatch. # tf_train_dataset = tf.placeholder(tf.float32, shape=(batch_size, flat_img_size)) # tf_train_labels = tf.placeholder(tf.float32, shape=(batch_size, num_labels)) # tf_valid_dataset = tf.constant(valid_dataset) # tf_test_dataset = tf.constant(test_dataset) # # Variables. # weights_1 = tf.Variable(tf.truncated_normal([flat_img_size, num_nodes])) # biases_1 = tf.Variable(tf.zeros([num_nodes])) # weights_2 = tf.Variable(tf.truncated_normal([num_nodes, num_labels])) # biases_2 = tf.Variable(tf.zeros([num_labels])) # # Training computation. # relu_layer=tf.nn.relu(tf.matmul(tf_train_dataset, weights_1) + biases_1) # logits = tf.matmul(relu_layer, weights_2) + biases_2 # loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits, tf_train_labels)) # # Optimizer. # optimizer = tf.train.GradientDescentOptimizer(0.5).minimize(loss) # # Predictions for the training, validation, and test data. # train_prediction = tf.nn.softmax(logits) # valid_prediction = tf.nn.softmax(tf.matmul(tf.nn.relu(tf.matmul(tf_valid_dataset, weights_1) + biases_1), weights_2) + biases_2) # test_prediction = tf.nn.softmax(tf.matmul(tf.nn.relu(tf.matmul(tf_test_dataset, weights_1) + biases_1), weights_2) + biases_2) #train the thing num_steps = 3001 with tf.Session(graph=graph) as session: tf.initialize_all_variables().run() start_time = time.clock() print("Initialized") for step in range(num_steps): # Generate a minibatch by pick an offset within the (randomized) training data. Note: we could use better randomization across epochs. offset = (step * batch_size) % (train_labels.shape[0] - batch_size) batch_data = train_dataset[offset:(offset + batch_size), :] batch_labels = train_labels [offset:(offset + batch_size), :] # Dictionary telling the session where to feed the minibatch. Keys are the placeholder nodes and the value are the numpy arrays. feed_dict = {tf_train_dataset : batch_data, tf_train_labels : batch_labels} # Run the thing _, l, predictions = session.run([optimizer, loss, train_prediction], feed_dict=feed_dict) if (step % 500 == 0): print("Minibatch loss at step %d: %f" % (step, l)) print("Minibatch accuracy: %.1f%%" % accuracy(predictions, batch_labels)) print("Validation accuracy: %.1f%%" % accuracy(valid_prediction.eval(), valid_labels)) print("Test accuracy: %.1f%%" % accuracy(test_prediction.eval(), test_labels)) end_time = time.clock() print("Whole thing took: ", end_time - start_time)
{ "repo_name": "vberthiaume/vblandr", "path": "udacity/Pycharm/ass3/ass2.py", "copies": "1", "size": "7995", "license": "apache-2.0", "hash": -5438227548183845000, "line_mean": 48.96875, "line_max": 142, "alpha_frac": 0.6823014384, "autogenerated": false, "ratio": 3.47911227154047, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.46614137099404696, "avg_score": null, "num_lines": null }
# Assignment 3 for OMS6250 # # Defines a Topology, which is a collection of Nodes. Students should not # modify this file. This is NOT a topology like the ones defined in Mininet projects. # # Copyright 2015 Sean Donovan from DistanceVector import * class Topology(object): def __init__(self, conf_file): ''' Initializes the topology. Called from outside of DistanceVector.py ''' self.topodict = {} self.nodes = [] self.topo_from_conf_file(conf_file) def topo_from_conf_file(self, conf_file): ''' This created all the nodes in the Topology from the configuration file passed into __init__(). Can throw an exception if there is a problem with the config file. ''' try: conf = __import__(conf_file) for key in conf.topo.keys(): new_node = DistanceVector(key, self, conf.topo[key]) self.nodes.append(new_node) self.topodict[key] = new_node except: print "error importing conf_file " + conf_file raise self.verify_topo() def verify_topo(self): ''' Once the topology is imported, we verify the topology to make sure it is actually valid. ''' print self.topodict for node in self.nodes: try: node.verify_neighbors() except: print "error with neighbors of " + node.name raise def run_topo(self): ''' This is where most of the action happens. First, we have to "prime the pump" and send to each neighbor that they are connected. Next, in a loop, we go through all of the nodes in the topology running their instances of Bellman-Ford, passing and receiving messages, until there are no further messages to service. Each loop, print out the distances after the loop instance. After the full loop, check to see if we're finished (all queues are empty). ''' #Prime the pump for node in self.nodes: node.send_initial_messages() done = False while done == False: for node in self.nodes: node.process_BF() node.log_distances() # Done with a round. Now, we call finish_round() which writes out # each entry in log_distances(). By default, this will will print # out alphabetical order, which you can turn off so the logfile # matches what is printed during log_distances(). finish_round() done = True for node in self.nodes: if len(node) != 0: done = False break
{ "repo_name": "yevheniyc/Projects", "path": "1c_GT_Computer_Networks/Project-3/Topology.py", "copies": "2", "size": "2803", "license": "mit", "hash": -2006720488810451200, "line_mean": 34.4810126582, "line_max": 86, "alpha_frac": 0.5718872636, "autogenerated": false, "ratio": 4.580065359477124, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.009373154851178343, "num_lines": 79 }
# Assignment 3 for OMS6250 # # This defines a DistanceVector Node that can run the Bellman-Ford # algorithm. The TODOs are all related to implementing BF. Students should # modify this file as necessary, guided by the TODO comments and the # assignment instructions. This is the only file that needs to be modified # to complete the project. # # Copyright 2015 Dave Lillethun & Sean Donovan from Node import * from helpers import * class DistanceVector(Node): #TODO: You need to have a structure that contains current distances def __init__(self, name, topolink, neighbors): ''' Constructor. This is run once when the DistanceVector object is created at the beginning of the simulation. Initializing data structors specific to a DV node is done here.''' super(DistanceVector, self).__init__(name, topolink, neighbors) #TODO: You may need to initialize your distance vector data structure def __str__(self): ''' Returns a string representation of the Distance Vector node. ''' #TODO: (optional) You may want to modify this to print your distance info. retstr = self.name + " : links ( " for neighbor in self.links: retstr = retstr + neighbor + " " return retstr + ")" def send_initial_messages(self): ''' This is run once at the beginning of the simulation, after all DistanceVector objects are created and their links to each other are established, but before any of the rest of the simulation begins. You can have nodes send out their initial DV advertisements here. ''' for neighbor in self.links: # TODO - Build message msg = None # Send message to neighbor self.send_msg(msg, neighbor) def process_BF(self): ''' This is run continuously (repeatedly) during the simulation. DV messages from other nodes are received here, processed, and any new DV messages that need to be sent to other nodes as a result are sent. ''' # TODO: The Bellman-Ford algorithm needs to be implemented here. # 1. Process queued messages # 2. Send neighbors updated distances # Process queue: for msg in self.messages: # TODO: Do something pass # Empty queue self.messages = [] # Send neighbors udpated distances: pass def log_distances(self): ''' Prints distances in the following format (no whitespace either end): A:A0,B1,C2 A is the node were on, B is the neighbor, 1 is it's distance A0 shows that the distance to self is 0 Taken from topo1.py ''' # TODO: The string in the format above (no newlines, no whitepsace) must # be defined. THen log with write_entry, example below. You'll need to # make a loop over all the switches and call add_entry() (see helpers.py) # for each switch. switch = "A" logstring = "A0,B1,C2" add_entry(switch, logstring) pass
{ "repo_name": "yevheniyc/Python", "path": "1c_GT_Computer_Networks/Project-3/DistanceVector.py", "copies": "2", "size": "3114", "license": "mit", "hash": 1040994699810046700, "line_mean": 35.6352941176, "line_max": 82, "alpha_frac": 0.6409762364, "autogenerated": false, "ratio": 4.373595505617978, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.6014571742017978, "avg_score": null, "num_lines": null }
# Assignment 3 for OMS6250 # # This defines a Node that can fun the Bellman-Ford algorithm. Students # should not modify this file, but should instead modify the DistanceVector # class that inherits from Node. # # Copyright 2015 Sean Donovan class Node(object): def __init__(self, name, topolink, neighbors): # name is the name of the local node # links is a list of all neighbors's names. # topology is a backlink to the Topology class. Used for accessing neighbors # as follows: self.topology.topodict['A'] # messages is a list of pending messages from neighbors to be processed. # The format of the message is up to you; a tuple will work. self.name = name self.links = neighbors self.topology = topolink self.messages = [] def __len__(self): ''' Returns the length of the message queue. ''' return len(self.messages) def __str__(self): ''' Returns a string representation of the node. ''' retstr = self.name + " : links ( " for neighbor in self.links: retstr = retstr + neighbor + " " return retstr + ")" def __repr__(self): return self.__str__() def verify_neighbors(self): ''' Verify that all your neighbors has a backlink to you. ''' for neighbor in self.links: if self.name not in self.topology.topodict[neighbor].links: raise Exception(neighbor + " does not have link to " + self.name) def send_msg(self, msg, dest): ''' Performs the send operation, after verifying that the neighber is valid. ''' if dest not in self.links: raise Exception("Neighbor " + dest + " not part of neighbors of " + self.name) self.topology.topodict[dest].queue_msg(msg) def queue_msg(self, msg): ''' Allows neighbors running Bellman-Ford to send you a message, to be processed next time through self.process_BF(). ''' self.messages.append(msg)
{ "repo_name": "yevheniyc/Python", "path": "1c_GT_Computer_Networks/Project-3/Node.py", "copies": "2", "size": "2062", "license": "mit", "hash": 5351375641381893000, "line_mean": 33.3666666667, "line_max": 90, "alpha_frac": 0.611542192, "autogenerated": false, "ratio": 4.174089068825911, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5785631260825911, "avg_score": null, "num_lines": null }
# Assignment 3 output validator for OMS6250 (Fall 2015)+ # # This output validator is designed to check the student's result log files for errors. # Errors Detected: # No intermediate steps shown # Incorrect separator between steps # Improper node label format # Improper node link weight format # Distance vector to self not included in DVT # # # Copyright 26 August 2015 Michael D. Brown import sys import re from helpers import ROUND_SEP line_number = 1 def validateStudentOutput(filename): # Overall Formatting Checks intermediateStepCheck(filename) # Line By Line Chekcs with open(filename) as f: for line in f: if line != ROUND_SEP: line = line[0:len(line)-1] validateLine(line) global line_number line_number = line_number + 1 def intermediateStepCheck(filename): totalSteps=0 with open(filename) as f: for line in f: if line == ROUND_SEP: totalSteps += 1 if totalSteps < 2: print("Invalid Output: Intermediate steps were not present or step separator was not formatted properly in " + filename) def validateLine(line): node = neighbors = "" colonIndex = line.find(':') if colonIndex == 2: node = line[0:colonIndex] neighbors = line[colonIndex+1:].split(',') elif colonIndex == 1: node = line[0] neighbors = line[2:].split(',') else: print "Invalid Output[L" + str(line_number) + "]: Node label should be no longer than 2 characters. (" + node + ")" return if not node.isalpha(): print "Invalid Output[L" + str(line_number) + "]: Node labels should only contain alphabetic characters. (" + node + ")" validateNeighbors(neighbors, node) def validateNeighbors(neighbors, node): nodeInDVT = False for neighbor in neighbors: m = re.search("\d", neighbor) if m: weightIndex = m.start() label = neighbor[:weightIndex] weight = unicode(neighbor[weightIndex:], 'utf-8') if not label.isalpha(): print "Invalid Output[L" + str(line_number) + "]: Node labels should only contain alphabetic characters. (" + label + ")" if not weight.isnumeric(): print "Invalid Output[L" + str(line_number) + "]: Link weights should only contain numeric characters. (" + weight + ")" if label == node: nodeInDVT = True else: print "Invalid Output[L" + str(line_number) + "]: No link weight present for neighbor " + neighbor + " in DVT entry for node " + node if not nodeInDVT: print "Invalid Output[L" + str(line_number) + "]: Node does not have a link weight for itself." # Script Start # Step 1: check for argument if len(sys.argv) != 2: print "Syntax:" print " python output_validator.py <log_file>" exit() # Step 2: Run validator print "Ouput validation initiated on " + sys.argv[1] + ":" validateStudentOutput(sys.argv[1]) print "Output validation complete."
{ "repo_name": "yevheniyc/Python", "path": "1c_GT_Computer_Networks/Project-3/output_validator.py", "copies": "2", "size": "3149", "license": "mit", "hash": -4081770638514540000, "line_mean": 31.4639175258, "line_max": 145, "alpha_frac": 0.6113051762, "autogenerated": false, "ratio": 4.0475578406169666, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5658863016816966, "avg_score": null, "num_lines": null }
# Assignment 5 # The goal of this assignment is to train a skip-gram model over Text8(http://mattmahoney.net/dc/textdata) data. # These are all the modules we'll be using later. Make sure you can import them before proceeding further. from __future__ import print_function import collections import math import numpy as np import os import random import tensorflow as tf import zipfile from matplotlib import pylab from six.moves import range from six.moves.urllib.request import urlretrieve from sklearn.manifold import TSNE # ====================================================================================================== # Download the data from the source website if necessary. url = 'http://mattmahoney.net/dc/' def maybe_download(filename, expected_bytes): """Download a file if not present, and make sure it's the right size.""" if not os.path.exists(filename): filename, _ = urlretrieve(url + filename, filename) statinfo = os.stat(filename) if statinfo.st_size == expected_bytes: print('Found and verified %s' % filename) else: print(statinfo.st_size) raise Exception( 'Failed to verify ' + filename + '. Can you get to it with a browser?') return filename filename = maybe_download('text8.zip', 31344016) # text8.zip is 30mb! # ====================================================================================================== # Read the data into a string. def read_data(filename): f = zipfile.ZipFile(filename) for name in f.namelist(): return tf.compat.as_str(f.read(name)).split() f.close() words = read_data(filename) print('Data size %d' % len(words)) # ====================================================================================================== # Build the dictionary and replace rare words with UNK token. vocabulary_size = 50000 def build_dataset(words): count = [['UNK', -1]] count.extend(collections.Counter(words).most_common(vocabulary_size - 1)) dictionary = dict() for word, _ in count: dictionary[word] = len(dictionary) data = list() unk_count = 0 for word in words: if word in dictionary: index = dictionary[word] else: index = 0 # dictionary['UNK'] unk_count = unk_count + 1 data.append(index) count[0][1] = unk_count reverse_dictionary = dict(zip(dictionary.values(), dictionary.keys())) return data, count, dictionary, reverse_dictionary data, count, dictionary, reverse_dictionary = build_dataset(words) print('Most common words (+UNK)', count[:5]) print('Sample data', data[:10]) del words # Hint to reduce memory. # ====================================================================================================== # Function to generate a training batch for the skip-gram model. data_index = 0 def generate_batch(batch_size, num_skips, skip_window): global data_index assert batch_size % num_skips == 0 assert num_skips <= 2 * skip_window batch = np.ndarray(shape=(batch_size), dtype=np.int32) labels = np.ndarray(shape=(batch_size, 1), dtype=np.int32) span = 2 * skip_window + 1 # [ skip_window target skip_window ] buffer = collections.deque(maxlen=span) for _ in range(span): buffer.append(data[data_index]) data_index = (data_index + 1) % len(data) for i in range(batch_size // num_skips): target = skip_window # target label at the center of the buffer targets_to_avoid = [skip_window] for j in range(num_skips): while target in targets_to_avoid: target = random.randint(0, span - 1) targets_to_avoid.append(target) batch[i * num_skips + j] = buffer[skip_window] labels[i * num_skips + j, 0] = buffer[target] buffer.append(data[data_index]) data_index = (data_index + 1) % len(data) return batch, labels print('data:', [reverse_dictionary[di] for di in data[:8]]) for num_skips, skip_window in [(2, 1), (4, 2)]: data_index = 0 batch, labels = generate_batch(batch_size=8, num_skips=num_skips, skip_window=skip_window) print('\nwith num_skips = %d and skip_window = %d:' % (num_skips, skip_window)) print(' batch:', [reverse_dictionary[bi] for bi in batch]) print(' labels:', [reverse_dictionary[li] for li in labels.reshape(8)]) # ====================================================================================================== # Train a skip-gram model. batch_size = 128 embedding_size = 128 # Dimension of the embedding vector. skip_window = 1 # How many words to consider left and right. num_skips = 2 # How many times to reuse an input to generate a label. # We pick a random validation set to sample nearest neighbors. here we limit the # validation samples to the words that have a low numeric ID, which by # construction are also the most frequent. valid_size = 16 # Random set of words to evaluate similarity on. valid_window = 100 # Only pick dev samples in the head of the distribution. valid_examples = np.array(random.sample(range(valid_window), valid_size)) num_sampled = 64 # Number of negative examples to sample. graph = tf.Graph() with graph.as_default(), tf.device('/cpu:0'): # Input data. train_dataset = tf.placeholder(tf.int32, shape=[batch_size]) train_labels = tf.placeholder(tf.int32, shape=[batch_size, 1]) valid_dataset = tf.constant(valid_examples, dtype=tf.int32) # Variables. embeddings = tf.Variable(tf.random_uniform([vocabulary_size, embedding_size], -1.0, 1.0)) softmax_weights = tf.Variable(tf.truncated_normal([vocabulary_size, embedding_size], stddev=1.0 / math.sqrt(embedding_size))) softmax_biases = tf.Variable(tf.zeros([vocabulary_size])) # Model. # Look up embeddings for inputs. embed = tf.nn.embedding_lookup(embeddings, train_dataset) # Compute the softmax loss, using a sample of the negative labels each time. loss = tf.reduce_mean(tf.nn.sampled_softmax_loss(softmax_weights, softmax_biases, embed, train_labels, num_sampled, vocabulary_size)) # Optimizer. optimizer = tf.train.AdagradOptimizer(1.0).minimize(loss) # Compute the similarity between minibatch examples and all embeddings. # We use the cosine distance: norm = tf.sqrt(tf.reduce_sum(tf.square(embeddings), 1, keep_dims=True)) normalized_embeddings = embeddings / norm valid_embeddings = tf.nn.embedding_lookup(normalized_embeddings, valid_dataset) similarity = tf.matmul(valid_embeddings, tf.transpose(normalized_embeddings)) # ====================================================================================================== num_steps = 100001 with tf.Session(graph=graph) as session: tf.initialize_all_variables().run() print('Initialized') average_loss = 0 for step in range(num_steps): batch_data, batch_labels = generate_batch( batch_size, num_skips, skip_window) feed_dict = {train_dataset: batch_data, train_labels: batch_labels} _, l = session.run([optimizer, loss], feed_dict=feed_dict) average_loss += l if step % 2000 == 0: if step > 0: average_loss = average_loss / 2000 # The average loss is an estimate of the loss over the last 2000 batches. print('Average loss at step %d: %f' % (step, average_loss)) average_loss = 0 # note that this is expensive (~20% slowdown if computed every 500 steps) if step % 10000 == 0: sim = similarity.eval() for i in xrange(valid_size): valid_word = reverse_dictionary[valid_examples[i]] top_k = 8 # number of nearest neighbors nearest = (-sim[i, :]).argsort()[1:top_k + 1] log = 'Nearest to %s:' % valid_word for k in xrange(top_k): close_word = reverse_dictionary[nearest[k]] log = '%s %s,' % (log, close_word) print(log) final_embeddings = normalized_embeddings.eval() # ====================================================================================================== num_points = 400 tsne = TSNE(perplexity=30, n_components=2, init='pca', n_iter=5000) two_d_embeddings = tsne.fit_transform(final_embeddings[1:num_points + 1, :]) # ====================================================================================================== def plot(embeddings, labels): assert embeddings.shape[0] >= len(labels), 'More labels than embeddings' pylab.figure(figsize=(15, 15)) # in inches for i, label in enumerate(labels): x, y = embeddings[i, :] pylab.scatter(x, y) pylab.annotate(label, xy=(x, y), xytext=(5, 2), textcoords='offset points', ha='right', va='bottom') pylab.show() words = [reverse_dictionary[i] for i in range(1, num_points + 1)] plot(two_d_embeddings, words) # ====================================================================================================== # Problem # # An alternative to Word2Vec is called CBOW (Continuous Bag of Words). In the CBOW model, instead of predicting a context word from a word vector, you predict a word from the sum of all the word vectors in its context. Implement and evaluate a CBOW model trained on the text8 dataset.
{ "repo_name": "vberthiaume/vblandr", "path": "udacity/Pycharm/ass3/ass5.py", "copies": "1", "size": "9408", "license": "apache-2.0", "hash": 5900785772235257000, "line_mean": 41, "line_max": 284, "alpha_frac": 0.5927933673, "autogenerated": false, "ratio": 3.900497512437811, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9984805966365642, "avg_score": 0.0016969826744337917, "num_lines": 224 }
# Assignment: 5(ThomasBradyITC110Assignment4.py) # Name: Thomas Brady # Date: 2/1/18 # Description: This program imports the graphics library and draws # a "meh" face using circles, rectangles, and lines. from graphics import * def Meh(): # Build Graphic Window and set Coordinates win = GraphWin("Meh Face!!!!", 500,500) win.setBackground("white") win.setCoords(0,0,100,100) # Draw Head circHead = Circle(Point(50,50),37) circHead.setFill("white") circHead.setWidth(6) circHead.draw(win) # Draw Left Eye circEye1 = Circle(Point(35,60),10) circEye1.setWidth(6) circEye1.draw(win) # Draw Left Eyeball Retina circEyeBallOne = Circle(Point(35,60),4) circEyeBallOne.setFill("black") circEyeBallOne.draw(win) # Draw Rectangle to cut Left Retina in half circCutRetinaOne = Rectangle(Point(30.9,60),Point(39.1,64.1)) circCutRetinaOne.setFill("white") circCutRetinaOne.setOutline("white") circCutRetinaOne.draw(win) # Draw Line for Left Eyelid lineEyeLid1 = Line(Point(25,60),Point(45,60)) lineEyeLid1.setWidth(6) lineEyeLid1.draw(win) # Draw Right Eye circEye2 = Circle(Point(65,60),10) circEye2.setWidth(6) circEye2.draw(win) # Draw Right Eyeball Retina circEyeBallTwo = Circle(Point(65,60),4) circEyeBallTwo.setFill("black") circEyeBallTwo.draw(win) # Draw Rectangle to cut Right Retina in half circCutRetinaOne = Rectangle(Point(60.9,60),Point(69.1,64.1)) circCutRetinaOne.setFill("white") circCutRetinaOne.setOutline("white") circCutRetinaOne.draw(win) # Draw Line for Right Eyelid lineEyeLid2 = Line(Point(55,60),Point(75,60)) lineEyeLid2.setWidth(6) lineEyeLid2.draw(win) # Draw Left Eyebrow lineEyeBrowLeft = Line(Point(27,74),Point(43,74)) lineEyeBrowLeft.setWidth(6) lineEyeBrowLeft.draw(win) # Draw Right Eyebrow lineEyeBrowRight = Line(Point(57,74),Point(73,74)) lineEyeBrowRight.setWidth(6) lineEyeBrowRight.draw(win) #Draw Lines for Mouth lineMouth1 = Line(Point(50,30),Point(63,36)) lineMouth1.setWidth(6) lineMouth1.draw(win) lineMouth2 = Line(Point(62.8,36),Point(71,32)) lineMouth2.setWidth(6) lineMouth2.draw(win) lineMouth3 = Line(Point(50.3,30),Point(37,36)) lineMouth3.setWidth(6) lineMouth3.draw(win) lineMouth4 = Line(Point(37.2,36),Point(29,32)) lineMouth4.setWidth(6) lineMouth4.draw(win) # Draw meh. label lblMeh = Text(Point(51.5,5),"meh.") lblMeh.setTextColor("black") lblMeh.setStyle("bold") lblMeh.setFace("courier") lblMeh.setSize(35) lblMeh.draw(win) Meh() input('')
{ "repo_name": "SeattleCentral/ITC110", "path": "awesomeness/brady_face.py", "copies": "1", "size": "2924", "license": "mit", "hash": 5185509856506638000, "line_mean": 27.24, "line_max": 67, "alpha_frac": 0.626880985, "autogenerated": false, "ratio": 2.877952755905512, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.8927838698591386, "avg_score": 0.015399008462825145, "num_lines": 100 }
# Assignment_6.py # Plays Tic Tac Toe with login function and repeatability # <Chad Hobbs> from graphics import * def login(): # Initiates variables and gets name greeting = "" name = False key = -1 print("Welcome to Chad's Tic-Tac-Toe game!") print("") user = input("Please enter your name: ") # Gets user and password list and puts it into a basic list users = open("userlist.txt","r") piece = users.read() userlist = piece.split("=") users.close() for l in range(len(userlist)): # Error handling loop, removes blank list entries if userlist[l] == '': userlist.remove('') users.close() # closes file from read/write for i in range(len(userlist)): # Breaks down list into individual elements userlist[i] = userlist[i].split("-") if user == userlist[i][0]: # While breaking the list down into more lists, we also check to see if the user already exists name = True key = i if name == True: #If a user was found, passord is requested and checked, otherwise a new entry will be made for j in range(3): #allow for 3 login attempts greeting = "Hello " + user + ", what is your password?: " pw = input(greeting) if userlist[key][1] == pw: print("Your password is correct. You have won",int(userlist[key][2]),"games!") return key,userlist else: print("Your name and password are incorrect. Please try again.") print("You have exceeded login attempts. Please contact the administrator.") key = -3 return key,userlist else: # Gets a pw for a new user greeting = "Welcome " + user + ", please enter a password to save your high score: " pw = input(greeting) temp = [user,pw,0] userlist.append(temp) key = len(userlist) - 1 return key,userlist def logout(userlist): # This function stores the user back in the user text file string = "" users = open("userlist.txt","w") # Get's the userlist file and opens it for overwrite for i in range(len(userlist)): string = string + userlist[i][0] + "-" + userlist[i][1] + "-" + str(userlist[i][2]) + "=" users.write(string) # Writes the data into the file users.close() # Closes the file from writing def create_board(): board = [['','',''],['','',''],['','','']] win = GraphWin("Tic Tac Toe",300,300) win.setCoords(30,30,0,0) win.setBackground("white") Line(Point(10,0),Point(10,30)).draw(win) Line(Point(20,0),Point(20,30)).draw(win) Line(Point(0,10),Point(30,10)).draw(win) Line(Point(0,20),Point(30,20)).draw(win) return win,board def get_column(board,i): return board[0][i] + board[1][i] + board[2][i] def check_winner(board): row1 = "".join(board[0]) if row1 == 'XXX': return 'X' if row1 == 'OOO': return 'O' row2 = "".join(board[1]) if row2 == 'XXX': return 'X' if row2 == 'OOO': return 'O' row3 = "".join(board[2]) if row3 == 'XXX': return 'X' if row3 == 'OOO': return 'O' col = get_column(board,0) if col == 'XXX': return 'X' if col == 'OOO': return 'O' col = get_column(board,1) if col == 'XXX': return 'X' if col == 'OOO': return 'O' col = get_column(board,2) if col == 'XXX': return 'X' if col == 'OOO': return 'O' diag = board[0][0] + board[1][1] + board[2][2] if diag == 'XXX': return 'X' if diag == 'OOO': return 'O' diag = board[2][0] + board[1][1] + board[0][2] if diag == 'XXX': return 'X' if diag == 'OOO': return 'O' return None def take_turn(win,board,who): # Get Move p = win.getMouse() col = int(p.getX() // 10) row = int(p.getY() // 10) Text(Point(col*10 + 5, row*10 + 5),who).draw(win) board[row][col] = who def main(): log = False while log == False: # Multiple 3 game set loop key,userlist = login() # Logs in user if key == -3: log = True break xwins = 0 owins = 0 for i in range(3): print("Playing best of 3, you are currently on game",i+1) win,board = create_board() # Creates game board for turn in range(9): if turn % 2 == 0: # Even -> X who = 'X' else: who = 'O' take_turn(win,board,who) if check_winner(board) != None: print("Player",check_winner(board),"has won this game!") if who == 'X': userlist[key][2] = int(userlist[key][2]) + 1 xwins = xwins + 1 else: owins = owins + 1 win.close() # Exit the game break if xwins == 2 or owins == 2: # Check to see if either side has won best of 3 win.close() break print(userlist[key][0],"has a high score of",userlist[key][2]) print("") repeat = input("Would you like to play another best of 3?(yes or no): ") if repeat.lower() != "yes": print("Thank you for playing!") log = True logout(userlist) #logs out current user if new data is to be written to file main()
{ "repo_name": "itsallvoodoo/csci-school", "path": "CSCI220/Week 09 - MAR12-16/Assignment_6.py", "copies": "1", "size": "5848", "license": "apache-2.0", "hash": -5589457472817332000, "line_mean": 29.2727272727, "line_max": 130, "alpha_frac": 0.4969220246, "autogenerated": false, "ratio": 3.7083069118579584, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4705228936457958, "avg_score": null, "num_lines": null }
# Assignment_7_CDH.py # Plays tic-tac-toe with 2 players and a database # <Chad Hobbs> from graphics import * def create_board(): board = [['','',''],['','',''],['','','']] wwin = GraphWin("Tic Tac Toe",300,300) wwin.setCoords(30,30,0,0) Line(Point(10,0),Point(10,30)).draw(wwin) Line(Point(20,0),Point(20,30)).draw(wwin) Line(Point(0,10),Point(30,10)).draw(wwin) Line(Point(0,20),Point(30,20)).draw(wwin) return wwin,board def get_column(board,i): return board[0][i] + board[1][i] + board[2][i] def get_row(board,i): return board[i][0] + board[i][1] + board[i][2] def check_winner(board): for i in range(3): row = get_row(board,i) if row == 'XXX': return 'X' if row == 'OOO': return 'O' for i in range(3): col = get_column(board,i) if col == 'XXX': return 'X' if col == 'OOO': return 'O' diag = board[0][0] + board[1][1] + board[2][2] if diag == 'XXX': return 'X' if diag == 'OOO': return 'O' diag = board[2][0] + board[1][1] + board[0][2] if diag == 'XXX': return 'X' if diag == 'OOO': return 'O' return None def take_turn(win,board,who): # Get Move p = win.getMouse() col = int(p.getX() // 10) row = int(p.getY() // 10) Text(Point(col*10 + 5, row*10 + 5),who).draw(win) board[row][col] = who def run_game(): win,board = create_board() turn = 9 while turn > 0: turn = turn - 1 if turn % 2 == 0: # Even -> X who = 'X' else: who = 'O' take_turn(win,board,who) if check_winner(board) != None: win.close() return check_winner(board) # Exit the game win.close() return check_winner(board) def get_data(): # Gets data out of database users = [] passwords = [] scores = [] games = [] infile = open("db.txt","r") first_line = infile.readline() for line in infile: if line != '': u_p_s = line.strip().split(',') if len(u_p_s) == 4: users.append(u_p_s[0]) passwords.append(u_p_s[1]) scores.append(int(u_p_s[2])) games.append(int(u_p_s[3])) infile.close() return users,passwords,scores,games def get_user(key): users,passwords,scores,games = get_data() user = users[key] return user def get_password(key): users,passwords,scores,games = get_data() password = passwords[key] return password def get_score(key): users,passwords,scores,games = get_data() score = scores[key] return score def get_game(key): users,passwords,scores,games = get_data() game = games[key] return game def update_password(key,password): users,passwords,scores,games = get_data() passwords[key] = password store_data(users,passwords,scores,games) return def update_score(key,score): users,passwords,scores,games = get_data() scores[key] = score store_data(users,passwords,scores,games) return def update_game(key,game): users,passwords,scores,games = get_data() games[key] = game store_data(users,passwords,scores,games) return def store_data(users,passwords,scores,games): outfile = open("db.txt","w") print("user,password,wins,games",file=outfile) i = 0 while i < len(users): print(users[i],passwords[i],scores[i],games[i],sep=',',file=outfile) i = i + 1 outfile.close() return def valid_user(u,p): users,passwords,scores,games = get_data() i = 0 while i < len(users): if users[i] == u and passwords[i] == p: return True,i i = i+1 return False,i def button_clicked(p1,p2,p): big_x = max([p1.getX(),p2.getX()]) small_x = min([p1.getX(),p2.getX()]) big_y = max([p1.getY(),p2.getY()]) small_y = min([p1.getY(),p2.getY()]) x = p.getX() y = p.getY() if y <= big_y and y >= small_y and x <= big_x and x >= small_x: return True return False def draw_button(win,pt1,pt2,text): button = Rectangle(pt1,pt2) button.draw(win) Text(Point((pt1.getX() + pt2.getX())/2,(pt1.getY() + pt2.getY())/2),text).draw(win) return def login(player): key = 99 win = GraphWin("Login",300,300) win.setCoords(0,0,100,100) player_text = "Player " + str(player) Text(Point(50,80),player_text).draw(win) Text(Point(20,60),"User: ").draw(win) Text(Point(20,40),"Password: ").draw(win) user_entry = Entry(Point(50,60),10) user_entry.draw(win) password_entry = Entry(Point(50,40),10) password_entry.draw(win) message = Text(Point(50,90),"") message.draw(win) # Create a login button and a quit button login_p1 = Point(15,15) login_p2 = Point(35,25) draw_button(win,login_p1,login_p2,"Login") quit_p1 = Point(65,15) quit_p2 = Point(85,25) draw_button(win,quit_p1,quit_p2,"Back") while True: # Maximum number of clicks p = win.getMouse() if button_clicked(login_p1,login_p2,p): user = user_entry.getText() password = password_entry.getText() check_users,key = valid_user(user,password) if check_users: win.close() return True,key else: message.setText("Invalid user and/or password") elif button_clicked(quit_p1,quit_p2,p): win.close() return None,key #win.close() #return None,key def show_winner(best,key): # Display the results win = GraphWin("Final Results",300,300) win.setCoords(0,0,100,100) Text(Point(50,80),"The Winner of this best of 3 is "+str(best)).draw(win) user = get_user(key) score = get_score(key) games = get_game(key) Text(Point(50,60),"Statistics for "+user).draw(win) Text(Point(50,40),"Lifetime wins: "+str(score)).draw(win) Text(Point(50,20),"Lifetime games: "+str(games)).draw(win) win.getMouse() win.close() return def new_player(): # Draw prompts and entry boxes win = GraphWin("Create New Player",300,300) win.setCoords(0,0,100,100) Text(Point(50,90),"Please enter a name and password").draw(win) Text(Point(20,70),"User: ").draw(win) Text(Point(20,50),"Password: ").draw(win) Text(Point(20,30),"Verify PW: ").draw(win) user = Entry(Point(50,70),10) user.draw(win) pw1 = Entry(Point(50,50),10) pw1.draw(win) pw2 = Entry(Point(50,30),10) pw2.draw(win) submit_p1 = Point(15,10) submit_p2 = Point(35,20) draw_button(win,submit_p1,submit_p2,"Submit") back_p1 = Point(65,10) back_p2 = Point(85,20) draw_button(win,back_p1,back_p2,"Back") # Get mouseclick and either submit or go back clicked = False while not clicked: p = win.getMouse() if button_clicked(submit_p1,submit_p2,p): password_entry = pw1.getText() password_entry2 = pw2.getText() user_entry = user.getText() if password_entry == password_entry2: users,passwords,scores,games = get_data() users.append(user_entry) passwords.append(password_entry) scores.append(0) games.append(0) store_data(users,passwords,scores,games) clicked = True else: Text(Point(50,40),"Password Mismatch!").draw(win) if button_clicked(back_p1,back_p2,p): clicked = True win.close() return def change_pw(): valid,key = login(1) win = GraphWin("Update Password",300,300) win.setCoords(0,0,100,100) if valid: Text(Point(50,90),"Please enter new password").draw(win) Text(Point(20,50),"Password: ").draw(win) Text(Point(20,30),"Again: ").draw(win) pw1 = Entry(Point(50,50),10) pw1.draw(win) pw2 = Entry(Point(50,30),10) pw2.draw(win) submit_p1 = Point(15,10) submit_p2 = Point(35,20) draw_button(win,submit_p1,submit_p2,"Submit") back_p1 = Point(65,10) back_p2 = Point(85,20) draw_button(win,back_p1,back_p2,"Back") # Get mouseclick and either submit or go back clicked = False while not clicked: p = win.getMouse() if button_clicked(submit_p1,submit_p2,p): password_entry = pw1.getText() password_entry2 = pw2.getText() if password_entry == password_entry2: update_password(key,password_entry) clicked = True else: Text(Point(50,40),"Password Mismatch!").draw(win) if button_clicked(back_p1,back_p2,p): clicked = True else: Text(Point(50,50),"Your login was incorrect!").draw(win) Text(Point(50,50),"Click on screen to close").draw(win) win.getMouse() win.close() return def welcome(): # Initial user interaction screen. win = GraphWin("Tic Tac Toe",300,300) win.setCoords(0,0,100,100) Text(Point(50,95),"Welcome to Chad's Tic-tac-toe game!").draw(win) # Draw all of our welcome screen buttons c_p1 = Point(35,77) c_p2 = Point(65,87) draw_button(win,c_p1,c_p2,"Create New") pw_p1 = Point(35,58) pw_p2 = Point(65,68) draw_button(win,pw_p1,pw_p2,"Update PW") onep_p1 = Point(35,39) onep_p2 = Point(65,49) draw_button(win,onep_p1,onep_p2,"1 Player") twop_p1 = Point(35,20) twop_p2 = Point(65,30) draw_button(win,twop_p1,twop_p2,"2 Player") q_p1 = Point(35,1) q_p2 = Point(65,11) draw_button(win,q_p1,q_p2,"Quit") # Check see which route to take clicked = False while not clicked: click = win.getMouse() if button_clicked(c_p1,c_p2,click): new_player() win.close() return 0,0,0 if button_clicked(onep_p1,onep_p2,click): play,key = login(1) win.close() return 1,key,0 if button_clicked(pw_p1,pw_p2,click): change_pw() win.close() return 0,0,0 if button_clicked(twop_p1,twop_p2,click): play,key = login(1) play,key2 = login(2) win.close() return 2,key,key2 if button_clicked(q_p1,q_p2,click): win.close() return 3,0,0 def main(): play = 0 while play != 3: play,key,key2 = welcome() if play == 1: # 1 Player Version wins_X = 0 wins_O = 0 while wins_X < 2 and wins_O < 2: winner = run_game() if winner == 'X': wins_X = wins_X + 1 score = get_score(key) + 1 update_score(key,score) elif winner == 'O': wins_O = wins_O + 1 game = get_game(key) game = game + 1 update_game(key,game) if wins_X > wins_O: best = 'X' else: best = 'O' show_winner(best,key) if play == 2: # 2 Player Version wins_X = 0 wins_O = 0 while wins_X < 2 and wins_O < 2: winner = run_game() if winner == 'X': wins_X = wins_X + 1 score = get_score(key) + 1 print(score) update_score(key,score) elif winner == 'O': wins_O = wins_O + 1 score = get_score(key2) + 1 update_score(key2,score) game = get_game(key) game = game + 1 update_game(key,game) game = get_game(key2) game = game + 1 update_game(key2,game) if wins_X > wins_O: best = 'X' else: best = 'O' show_winner(best,key) show_winner(best,key2) # If 3 becomes true exit game main()
{ "repo_name": "itsallvoodoo/csci-school", "path": "CSCI220/Week 09 - MAR12-16/Assignment_7_CDH.py", "copies": "2", "size": "12857", "license": "apache-2.0", "hash": 7585787189706302000, "line_mean": 28.0397196262, "line_max": 87, "alpha_frac": 0.5078167535, "autogenerated": false, "ratio": 3.3017462763225476, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4809563029822548, "avg_score": null, "num_lines": null }
from graphics import * def creature(): win = GraphWin("Creature", 700, 700) # "title", and dimensions body4 = Circle(Point(448, 420), 42) # (center, radius) of body segment 4 body4.setFill("green") # fills the body with green color body4.setOutline("red") # outlines the body with red body4.draw(win) # draws the body body3 = Circle(Point(406, 462), 42) # (center, radius) of body segment 3 body3.setFill("blue") # fills the body with blue color body3.setOutline("green") # outlines the body with green body3.draw(win) # draws the body body2 = Circle(Point(364, 420), 42) # (center, radius) of body segment 2 body2.setFill("red") # fills the body with red color body2.setOutline("blue") # outlines the body with blue body2.draw(win) # draws the body body1 = Circle(Point(322, 462), 42) # (center, radius) of body segment 1 body1.setFill("green") # fills the body with green color body1.setOutline("red") # outlines the body with red body1.draw(win) # draws the body head = Circle(Point(280, 420), 42) # (center, radius) of head head.setFill("yellow") # fills the head with yellow color head.setOutline("blue") # outlines the head with blue head.draw(win) # draws the head # eye1 = Circle(Point(265, 418), 12) # (center, radius) of eye1 # eye1.setFill("yellow") # fills eye1 with yellow color # eye1.setOutline("blue") # outlines eye1 with purple # eye1.draw(win) # draws eye1 # inner_eye1 = Circle(Point(265, 418), 10) # (center, radius) of inner_eye1 # inner_eye1.setFill("black") # fills the inner_eye1 with black color # inner_eye1.draw(win) # draws inner_eye1 # eye2 = Circle(Point(293, 418), 12) # (center, radius) of eye2 # eye2.setFill("yellow") # fills eye2 with yellow color # eye2.setOutline("blue") # outlines eye2 with purple color # eye2.draw(win) # draws eye2 # inner_eye2 = Circle(Point(293, 418), 10) # (center, radius) of inner_eye2 # inner_eye2.setFill("black") # fills inner_eye2 with black color # inner_eye2.draw(win) # draws inner_eye2 lower_antenna1 = Rectangle(Point(266, 380), Point(268, 360)) # dimensions of the 1st lower antenna lower_antenna1.setFill("purple") # fill with purple color lower_antenna1.draw(win) # draw antenna 1 upper_antenna1 = Circle(Point(267, 356), 6) # dimensions of the 1st upper antenna upper_antenna1.setFill("red") # fill with red color upper_antenna1.draw(win) # draw antenna 1 lower_antenna2 = Rectangle(Point(294, 380), Point(296, 360)) # dimensions of the 2nd lower antenna lower_antenna2.setFill("purple") # fill with purple color lower_antenna2.draw(win) # draw antenna 2 upper_antenna2 = Circle(Point(295, 356), 6) # dimensions of the 2nd upper antenna upper_antenna2.setFill("red") # fill with red color upper_antenna2.draw(win) # draw antenna 2 leg1 = Rectangle(Point(315, 504), Point(317, 520)) # dimensions of the 1st leg leg1.setFill("black") # fill with black color leg1.draw(win) # draw leg 1 foot1 = Rectangle(Point(312, 520), Point(317, 522)) # dimensions of the 1st foot foot1.setFill("black") # fill with black color foot1.draw(win) # draw foot 1 leg2 = Rectangle(Point(320, 504), Point(322, 519)) # dimensions of the 2nd leg leg2.setFill("black") # fill with black color leg2.draw(win) # draw leg 2 foot2 = Rectangle(Point(317, 519), Point(322, 521)) # dimensions of the 2nd foot foot2.setFill("black") # fill with black color foot2.draw(win) # draw foot 2 leg3 = Rectangle(Point(401, 504), Point(403, 520)) # dimensions of the 3rd leg leg3.setFill("black") # fill with black color leg3.draw(win) # draw leg 3 foot3 = Rectangle(Point(398, 520), Point(403, 522)) # dimensions of the 3rd foot foot3.setFill("black") # fill with black color foot3.draw(win) # draw foot 3 leg4 = Rectangle(Point(406, 504), Point(408, 519)) # dimensions of the 4th leg leg4.setFill("black") # fill with black color leg4.draw(win) # draw leg 4 foot4 = Rectangle(Point(403, 519), Point(408, 521)) # dimensions of the 4th foot foot4.setFill("black") # fill with black color foot4.draw(win) # draw foot 4 # mouth = Oval(Point(278,437), Point(282,441)) # dimensions of the mouth # mouth.setFill("black") # fills the oval with black # mouth.draw(win) # draws the mouth balloonstring3 = Rectangle(Point(405, 250), Point(405, 412)) # dimensions of the balloonstring3 balloonstring3.setOutline("yellow") # outlines with yellow color balloonstring3.draw(win) # draw balloonstring3 balloon3 = Oval(Point(355,145), Point(455,255)) # draws a balloon balloon3.setFill("red") # fills balloon with red balloon3.draw(win) # draws balloon balloonstring2 = Rectangle(Point(360, 150), Point(360, 378)) # dimensions of balloonstring2 balloonstring2.setOutline("blue") # outlines with blue color balloonstring2.draw(win) # draw balloonstring2 balloon2 = Oval(Point(315,150), Point(405,20)) # dimensions of balloon2 balloon2.setFill("yellow") # fills balloon2 with yellow balloon2.draw(win) # draws balloon2 balloonstring1 = Rectangle(Point(321, 200), Point(321, 418)) # dimensions of balloonstring1 balloonstring1.setOutline("red") # outlines with red color balloonstring1.draw(win) # draws balloonstring1 balloon1 = Oval(Point(276,200), Point(366,90)) # dimensions of balloon1 balloon1.setFill("blue") # fills balloon1 with blue color balloon1.draw(win) # draws balloon1 hexagon1 = Polygon(Point(308, 550), Point(350,550), Point(371, 592), Point(350, 634), Point(308, 634), Point(287, 592)) # draws hexagon1 hexagon1.setFill("yellow") # fills hexagon with yellow hexagon1.setOutline("green") # outlines hexagon with green hexagon1.draw(win) # draws hexagon1 hexagon2 = Polygon(Point(392, 550), Point(434,550), Point(455, 592), Point(434, 634), Point(392, 634), Point(371, 592)) # draws hexagon2 hexagon2.setFill("green") # fills hexagon2 with green hexagon2.setOutline("yellow") # oulines hexagon2 yellow hexagon2.draw(win) # draws hexagon2 upper_triangle1 = Polygon(Point(350,550), Point(371, 592), Point(392, 550)) # dimensions of upper_triangle1 upper_triangle1.setFill("blue") # fills upper_triangle1 blue upper_triangle1.setOutline("red") # outlines upper_triangle1 red upper_triangle1.draw(win) # draws upper_triangle1 lower_triangle1 = Polygon(Point(371,592), Point(350, 634), Point(392, 634)) # dimensions of lower_triangle1 lower_triangle1.setFill("red") # fills lower_triangle1 red lower_triangle1.setOutline("blue") # outlines lower_triangle1 blue lower_triangle1.draw(win) # draws lower_triangle1 star1 = Polygon(Point(130,120), Point(138, 134), Point(158, 134), Point(144,148), Point(150, 162), Point(130, 150), Point(110, 162), Point(116, 148), Point(102, 134), Point(122, 134)) # dimensions of star star1.setFill("gold") # fills star1 gold star1.setOutline("yellow") # outlines star1 yellow star1.draw(win) # draws star1 star2 = Polygon(Point(50,50), Point(58, 64), Point(78, 64), Point(64,78), Point(70, 92), Point(50, 80), Point(30, 92), Point(36, 78), Point(22, 64), Point(42, 64)) # dimensions of star star2.setFill("gold") # fills star2 gold star2.setOutline("yellow") # outlines star2 yellow star2.draw(win) # draws star2 star3 = Polygon(Point(530,120), Point(538, 134), Point(558, 134), Point(544,148), Point(550, 162), Point(530, 150), Point(510, 162), Point(516, 148), Point(502, 134), Point(522, 134)) # dimensions of star star3.setFill("gold") # fills star3 gold star3.setOutline("yellow") # outlines star3 yellow star3.draw(win) # draws star3 star4 = Polygon(Point(650,50), Point(658, 64), Point(678, 64), Point(664,78), Point(670, 92), Point(650, 80), Point(630, 92), Point(636, 78), Point(622, 64), Point(642, 64)) # dimensions of star star4.setFill("gold") # fills star4 gold star4.setOutline("yellow") # outlines star4 yellow star4.draw(win) # draws star4 star5 = Polygon(Point(230,20), Point(238, 34), Point(258, 34), Point(244,48), Point(250, 62), Point(230, 50), Point(210, 62), Point(216, 48), Point(202, 34), Point(222, 34)) # dimensions of star star5.setFill("gold") # fills star5 gold star5.setOutline("yellow") # outlines star5 yellow star5.draw(win) # draws star5 title1 = Text(Point(150, 250), "Practicing.") # placement of title "Practicing" title1.setSize(36) # sets font size to 36 points title1.setTextColor("red") # sets the color of the text to red title1.draw(win) # draws the text subtitle1 = Text(Point(150, 300), "Pre-butterfly afloat.") # placement of subtitle "A pre-butterfly afloat." subtitle1.setSize(20) # sets font size to 20 points subtitle1.setTextColor("blue") # sets the color of the text to blue subtitle1.draw(win) # draws the text win.getMouse() # pauses for user to click mouse in window win.close() creature()
{ "repo_name": "TopGirlCoder/code", "path": "python/caterpillar/creature.py", "copies": "1", "size": "9420", "license": "mit", "hash": 2500584505064649000, "line_mean": 46.5808080808, "line_max": 205, "alpha_frac": 0.7158174098, "autogenerated": false, "ratio": 2.851952770208901, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.40677701800089006, "avg_score": null, "num_lines": null }
#Assignment cw-1 by Andrew Malfavon #Problem 3.6 pg130 #2/10/2016 import math def trapezint1(f, a, b): return ((b-a)/2.0)*(f(a) + f(b)) print trapezint1(math.sin, 0, math.pi) print trapezint1(math.cos, 0, math.pi) print trapezint1(math.sin, 0, math.pi/2) def trapezint2(f, a, b): return (float((b-a))/4.0)*(f(a) + f(b) + 2*f((a+b)/2.0)) print trapezint2(math.sin, 0, math.pi) print trapezint2(math.cos, 0, math.pi) print trapezint2(math.sin, 0, math.pi/2) def trapezint3(f, a, b, n): sum = 0 h = (b-a)/float(n) for i in range(1, n-2): sum += (h/2.0)*(f(a + i*h) + f(a + (i+1)*h)); return sum print trapezint3(math.sin, 0, math.pi, 1000); print trapezint3(math.cos, 0, math.pi, 1000); print trapezint3(math.sin, 0, math.pi/2, 1000); #Unit Tests def func(x): return x**2; def test_trapezint(): assert trapezint1(func, 2, 3) == 13.0/2.0 assert trapezint2(func, 2, 3) == 51.0/8.0 #used round function solely for testing. assert round(trapezint3(func, 2, 5, 100000)) == 39.0
{ "repo_name": "chapman-phys227-2016s/cw-1-classwork-team", "path": "trapezint.py", "copies": "1", "size": "1030", "license": "mit", "hash": -4303743580097700400, "line_mean": 25.4102564103, "line_max": 60, "alpha_frac": 0.6203883495, "autogenerated": false, "ratio": 2.172995780590717, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.8199840446099471, "avg_score": 0.018708736798249137, "num_lines": 39 }
# Assignment for Amy Winarske, Module 5 #Coding assignments can be saved as .py files when appropriate or .txt files otherwise, for example, #you may want to past the code from multiple .py files into a single .txt file. #When you've completed the assignment, attach the file(s) to the drop box for this module, which you'll find near the end of the module. Please be sure to include your last name and the course number in the file name of the document, like so: "your name X442.3 Assignment 5." #1.Using the keys method for dictionaries and the sort method for lists, #write a for loop that prints the keys and corresponding values of a dictionary in the alphabetical order of the keys. mydico={'Winarske':'Amy', 'Brignetti':'Chris', 'Bradley':'Steve', 'Hanna':'Julia', 'Simmons':'Amanda', 'Caruthers':'Bruce'} for key in sorted(mydico.keys()): print(key, mydico[key]) #2.As an alternative to the range function, some programmers like to increment a counter inside a while loop and #stop the while loop when the counter is no longer less than the length of the array. #Rewrite the following code using a while loop instead of a for loop. a = [7,12,9,14,15,18,12] b = [9,14,8,3,15,17,15] big = [] #for i in range(len(a)): # big.append(max(a[i],b[i])) i=0 while True: if i < len(a): big.append(max(a[i],b[i])) print(big) i = i+1 print(i) else: break else: print('We are done') #3.Write a loop that reads each line of a file and counts the number of lines that are read #until the total length of the lines is 1,000 or more characters. #Use a break statement to make sure that you don't continue reading the file once at least 1,000 characters are read. # I am assuming you really meant this counts the number of characters read and not the number of lines fileobject=open("myfile.txt",'r') linecount=0 totalchar=0 while True: line=fileobject.readline() print(line) if line == "": print(totalchar) break elif totalchar > 1000 : print(totalchar) break else: totalchar=totalchar+len(line) fileobject.close() #4.Modify the program written in question 3 so that it doesn't count characters on any line that begins with a pound sign (#). fileobject=open("myfile.txt",'r') linecount=0 totalchar=0 while True: line=fileobject.readline() print(line) if line == "": print(totalchar) break elif totalchar > 1000 : print(totalchar) break elif line[0] == '#' : continue else: totalchar=totalchar+len(line) fileobject.close()
{ "repo_name": "awinarske/Demo", "path": "UCBPy/Module5.py", "copies": "1", "size": "2628", "license": "mit", "hash": 7648457715759854000, "line_mean": 33.1298701299, "line_max": 275, "alpha_frac": 0.6837899543, "autogenerated": false, "ratio": 3.4761904761904763, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.951198317464874, "avg_score": 0.029599451168347064, "num_lines": 77 }
"""AssignmentGroups API Tests for Version 1.0. This is a testing template for the generated AssignmentGroupsAPI Class. """ import unittest import requests import secrets from py3canvas.apis.assignment_groups import AssignmentGroupsAPI from py3canvas.apis.assignment_groups import Gradingrules from py3canvas.apis.assignment_groups import Assignmentgroup class TestAssignmentGroupsAPI(unittest.TestCase): """Tests for the AssignmentGroupsAPI.""" def setUp(self): self.client = AssignmentGroupsAPI(secrets.instance_address, secrets.access_token) def test_list_assignment_groups(self): """Integration test for the AssignmentGroupsAPI.list_assignment_groups method.""" course_id = None # Change me!! r = self.client.list_assignment_groups(course_id, exclude_assignment_submission_types=None, grading_period_id=None, include=None, override_assignment_dates=None, scope_assignments_to_student=None) def test_get_assignment_group(self): """Integration test for the AssignmentGroupsAPI.get_assignment_group method.""" course_id = None # Change me!! assignment_group_id = None # Change me!! r = self.client.get_assignment_group(course_id, assignment_group_id, grading_period_id=None, include=None, override_assignment_dates=None) def test_create_assignment_group(self): """Integration test for the AssignmentGroupsAPI.create_assignment_group method.""" # This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration. pass def test_edit_assignment_group(self): """Integration test for the AssignmentGroupsAPI.edit_assignment_group method.""" # This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration. pass def test_destroy_assignment_group(self): """Integration test for the AssignmentGroupsAPI.destroy_assignment_group method.""" course_id = None # Change me!! assignment_group_id = None # Change me!! r = self.client.destroy_assignment_group(course_id, assignment_group_id, move_assignments_to=None)
{ "repo_name": "tylerclair/py3canvas", "path": "py3canvas/tests/assignment_groups.py", "copies": "1", "size": "2244", "license": "mit", "hash": 6365111869879541000, "line_mean": 44.75, "line_max": 204, "alpha_frac": 0.7179144385, "autogenerated": false, "ratio": 4.186567164179104, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5404481602679104, "avg_score": null, "num_lines": null }
"""AssignmentGroups API Version 1.0. This API client was generated using a template. Make sure this code is valid before using it. """ import logging from datetime import date, datetime from .base import BaseCanvasAPI from .base import BaseModel class AssignmentGroupsAPI(BaseCanvasAPI): """AssignmentGroups API Version 1.0.""" def __init__(self, *args, **kwargs): """Init method for AssignmentGroupsAPI.""" super(AssignmentGroupsAPI, self).__init__(*args, **kwargs) self.logger = logging.getLogger("py3canvas.AssignmentGroupsAPI") def list_assignment_groups(self, course_id, exclude_assignment_submission_types=None, grading_period_id=None, include=None, override_assignment_dates=None, scope_assignments_to_student=None): """ List assignment groups. Returns the list of assignment groups for the current context. The returned groups are sorted by their position field. """ path = {} data = {} params = {} # REQUIRED - PATH - course_id """ID""" path["course_id"] = course_id # OPTIONAL - include """Associations to include with the group. "discussion_topic", "all_dates" "assignment_visibility" & "submission" are only valid are only valid if "assignments" is also included. The "assignment_visibility" option additionally requires that the Differentiated Assignments course feature be turned on.""" if include is not None: self._validate_enum(include, ["assignments", "discussion_topic", "all_dates", "assignment_visibility", "overrides", "submission"]) params["include"] = include # OPTIONAL - exclude_assignment_submission_types """If "assignments" are included, those with the specified submission types will be excluded from the assignment groups.""" if exclude_assignment_submission_types is not None: self._validate_enum(exclude_assignment_submission_types, ["online_quiz", "discussion_topic", "wiki_page", "external_tool"]) params["exclude_assignment_submission_types"] = exclude_assignment_submission_types # OPTIONAL - override_assignment_dates """Apply assignment overrides for each assignment, defaults to true.""" if override_assignment_dates is not None: params["override_assignment_dates"] = override_assignment_dates # OPTIONAL - grading_period_id """The id of the grading period in which assignment groups are being requested (Requires grading periods to exist.)""" if grading_period_id is not None: params["grading_period_id"] = grading_period_id # OPTIONAL - scope_assignments_to_student """If true, all assignments returned will apply to the current user in the specified grading period. If assignments apply to other students in the specified grading period, but not the current user, they will not be returned. (Requires the grading_period_id argument and grading periods to exist. In addition, the current user must be a student.)""" if scope_assignments_to_student is not None: params["scope_assignments_to_student"] = scope_assignments_to_student self.logger.debug("GET /api/v1/courses/{course_id}/assignment_groups with query params: {params} and form data: {data}".format(params=params, data=data, **path)) return self.generic_request("GET", "/api/v1/courses/{course_id}/assignment_groups".format(**path), data=data, params=params, all_pages=True) def get_assignment_group(self, course_id, assignment_group_id, grading_period_id=None, include=None, override_assignment_dates=None): """ Get an Assignment Group. Returns the assignment group with the given id. """ path = {} data = {} params = {} # REQUIRED - PATH - course_id """ID""" path["course_id"] = course_id # REQUIRED - PATH - assignment_group_id """ID""" path["assignment_group_id"] = assignment_group_id # OPTIONAL - include """Associations to include with the group. "discussion_topic" and "assignment_visibility" and "submission" are only valid if "assignments" is also included. The "assignment_visibility" option additionally requires that the Differentiated Assignments course feature be turned on.""" if include is not None: self._validate_enum(include, ["assignments", "discussion_topic", "assignment_visibility", "submission"]) params["include"] = include # OPTIONAL - override_assignment_dates """Apply assignment overrides for each assignment, defaults to true.""" if override_assignment_dates is not None: params["override_assignment_dates"] = override_assignment_dates # OPTIONAL - grading_period_id """The id of the grading period in which assignment groups are being requested (Requires grading periods to exist on the account)""" if grading_period_id is not None: params["grading_period_id"] = grading_period_id self.logger.debug("GET /api/v1/courses/{course_id}/assignment_groups/{assignment_group_id} with query params: {params} and form data: {data}".format(params=params, data=data, **path)) return self.generic_request("GET", "/api/v1/courses/{course_id}/assignment_groups/{assignment_group_id}".format(**path), data=data, params=params, single_item=True) def create_assignment_group(self, course_id, group_weight=None, integration_data=None, name=None, position=None, rules=None, sis_source_id=None): """ Create an Assignment Group. Create a new assignment group for this course. """ path = {} data = {} params = {} # REQUIRED - PATH - course_id """ID""" path["course_id"] = course_id # OPTIONAL - name """The assignment group's name""" if name is not None: data["name"] = name # OPTIONAL - position """The position of this assignment group in relation to the other assignment groups""" if position is not None: data["position"] = position # OPTIONAL - group_weight """The percent of the total grade that this assignment group represents""" if group_weight is not None: data["group_weight"] = group_weight # OPTIONAL - sis_source_id """The sis source id of the Assignment Group""" if sis_source_id is not None: data["sis_source_id"] = sis_source_id # OPTIONAL - integration_data """The integration data of the Assignment Group""" if integration_data is not None: data["integration_data"] = integration_data # OPTIONAL - rules """The grading rules that are applied within this assignment group See the Assignment Group object definition for format""" if rules is not None: data["rules"] = rules self.logger.debug("POST /api/v1/courses/{course_id}/assignment_groups with query params: {params} and form data: {data}".format(params=params, data=data, **path)) return self.generic_request("POST", "/api/v1/courses/{course_id}/assignment_groups".format(**path), data=data, params=params, single_item=True) def edit_assignment_group(self, course_id, assignment_group_id): """ Edit an Assignment Group. Modify an existing Assignment Group. Accepts the same parameters as Assignment Group creation """ path = {} data = {} params = {} # REQUIRED - PATH - course_id """ID""" path["course_id"] = course_id # REQUIRED - PATH - assignment_group_id """ID""" path["assignment_group_id"] = assignment_group_id self.logger.debug("PUT /api/v1/courses/{course_id}/assignment_groups/{assignment_group_id} with query params: {params} and form data: {data}".format(params=params, data=data, **path)) return self.generic_request("PUT", "/api/v1/courses/{course_id}/assignment_groups/{assignment_group_id}".format(**path), data=data, params=params, single_item=True) def destroy_assignment_group(self, course_id, assignment_group_id, move_assignments_to=None): """ Destroy an Assignment Group. Deletes the assignment group with the given id. """ path = {} data = {} params = {} # REQUIRED - PATH - course_id """ID""" path["course_id"] = course_id # REQUIRED - PATH - assignment_group_id """ID""" path["assignment_group_id"] = assignment_group_id # OPTIONAL - move_assignments_to """The ID of an active Assignment Group to which the assignments that are currently assigned to the destroyed Assignment Group will be assigned. NOTE: If this argument is not provided, any assignments in this Assignment Group will be deleted.""" if move_assignments_to is not None: params["move_assignments_to"] = move_assignments_to self.logger.debug("DELETE /api/v1/courses/{course_id}/assignment_groups/{assignment_group_id} with query params: {params} and form data: {data}".format(params=params, data=data, **path)) return self.generic_request("DELETE", "/api/v1/courses/{course_id}/assignment_groups/{assignment_group_id}".format(**path), data=data, params=params, single_item=True) class Gradingrules(BaseModel): """Gradingrules Model.""" def __init__(self, never_drop=None, drop_highest=None, drop_lowest=None): """Init method for Gradingrules class.""" self._never_drop = never_drop self._drop_highest = drop_highest self._drop_lowest = drop_lowest self.logger = logging.getLogger('py3canvas.Gradingrules') @property def never_drop(self): """Assignment IDs that should never be dropped.""" return self._never_drop @never_drop.setter def never_drop(self, value): """Setter for never_drop property.""" self.logger.warn("Setting values on never_drop will NOT update the remote Canvas instance.") self._never_drop = value @property def drop_highest(self): """Number of highest scores to be dropped for each user.""" return self._drop_highest @drop_highest.setter def drop_highest(self, value): """Setter for drop_highest property.""" self.logger.warn("Setting values on drop_highest will NOT update the remote Canvas instance.") self._drop_highest = value @property def drop_lowest(self): """Number of lowest scores to be dropped for each user.""" return self._drop_lowest @drop_lowest.setter def drop_lowest(self, value): """Setter for drop_lowest property.""" self.logger.warn("Setting values on drop_lowest will NOT update the remote Canvas instance.") self._drop_lowest = value class Assignmentgroup(BaseModel): """Assignmentgroup Model.""" def __init__(self, group_weight=None, name=None, rules=None, assignments=None, sis_source_id=None, integration_data=None, position=None, id=None): """Init method for Assignmentgroup class.""" self._group_weight = group_weight self._name = name self._rules = rules self._assignments = assignments self._sis_source_id = sis_source_id self._integration_data = integration_data self._position = position self._id = id self.logger = logging.getLogger('py3canvas.Assignmentgroup') @property def group_weight(self): """the weight of the Assignment Group.""" return self._group_weight @group_weight.setter def group_weight(self, value): """Setter for group_weight property.""" self.logger.warn("Setting values on group_weight will NOT update the remote Canvas instance.") self._group_weight = value @property def name(self): """the name of the Assignment Group.""" return self._name @name.setter def name(self, value): """Setter for name property.""" self.logger.warn("Setting values on name will NOT update the remote Canvas instance.") self._name = value @property def rules(self): """the grading rules that this Assignment Group has.""" return self._rules @rules.setter def rules(self, value): """Setter for rules property.""" self.logger.warn("Setting values on rules will NOT update the remote Canvas instance.") self._rules = value @property def assignments(self): """the assignments in this Assignment Group (see the Assignment API for a detailed list of fields).""" return self._assignments @assignments.setter def assignments(self, value): """Setter for assignments property.""" self.logger.warn("Setting values on assignments will NOT update the remote Canvas instance.") self._assignments = value @property def sis_source_id(self): """the sis source id of the Assignment Group.""" return self._sis_source_id @sis_source_id.setter def sis_source_id(self, value): """Setter for sis_source_id property.""" self.logger.warn("Setting values on sis_source_id will NOT update the remote Canvas instance.") self._sis_source_id = value @property def integration_data(self): """the integration data of the Assignment Group.""" return self._integration_data @integration_data.setter def integration_data(self, value): """Setter for integration_data property.""" self.logger.warn("Setting values on integration_data will NOT update the remote Canvas instance.") self._integration_data = value @property def position(self): """the position of the Assignment Group.""" return self._position @position.setter def position(self, value): """Setter for position property.""" self.logger.warn("Setting values on position will NOT update the remote Canvas instance.") self._position = value @property def id(self): """the id of the Assignment Group.""" return self._id @id.setter def id(self, value): """Setter for id property.""" self.logger.warn("Setting values on id will NOT update the remote Canvas instance.") self._id = value
{ "repo_name": "tylerclair/py3canvas", "path": "py3canvas/apis/assignment_groups.py", "copies": "1", "size": "14670", "license": "mit", "hash": 5859957307424242000, "line_mean": 39.75, "line_max": 195, "alpha_frac": 0.6438991138, "autogenerated": false, "ratio": 4.367371241440905, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5511270355240905, "avg_score": null, "num_lines": null }
"""AssignmentGroups API Version 1.0. This API client was generated using a template. Make sure this code is valid before using it. """ import logging from datetime import date, datetime from base import BaseCanvasAPI from base import BaseModel class AssignmentGroupsAPI(BaseCanvasAPI): """AssignmentGroups API Version 1.0.""" def __init__(self, *args, **kwargs): """Init method for AssignmentGroupsAPI.""" super(AssignmentGroupsAPI, self).__init__(*args, **kwargs) self.logger = logging.getLogger("pycanvas.AssignmentGroupsAPI") def list_assignment_groups(self, course_id, exclude_assignment_submission_types=None, grading_period_id=None, include=None, override_assignment_dates=None, scope_assignments_to_student=None): """ List assignment groups. Returns the list of assignment groups for the current context. The returned groups are sorted by their position field. """ path = {} data = {} params = {} # REQUIRED - PATH - course_id """ID""" path["course_id"] = course_id # OPTIONAL - include """Associations to include with the group. "discussion_topic", "all_dates" "assignment_visibility" & "submission" are only valid are only valid if "assignments" is also included. The "assignment_visibility" option additionally requires that the Differentiated Assignments course feature be turned on.""" if include is not None: self._validate_enum(include, ["assignments", "discussion_topic", "all_dates", "assignment_visibility", "overrides", "submission"]) params["include"] = include # OPTIONAL - exclude_assignment_submission_types """If "assignments" are included, those with the specified submission types will be excluded from the assignment groups.""" if exclude_assignment_submission_types is not None: self._validate_enum(exclude_assignment_submission_types, ["online_quiz", "discussion_topic", "wiki_page", "external_tool"]) params["exclude_assignment_submission_types"] = exclude_assignment_submission_types # OPTIONAL - override_assignment_dates """Apply assignment overrides for each assignment, defaults to true.""" if override_assignment_dates is not None: params["override_assignment_dates"] = override_assignment_dates # OPTIONAL - grading_period_id """The id of the grading period in which assignment groups are being requested (Requires the Multiple Grading Periods feature turned on.)""" if grading_period_id is not None: params["grading_period_id"] = grading_period_id # OPTIONAL - scope_assignments_to_student """If true, all assignments returned will apply to the current user in the specified grading period. If assignments apply to other students in the specified grading period, but not the current user, they will not be returned. (Requires the grading_period_id argument and the Multiple Grading Periods feature turned on. In addition, the current user must be a student.)""" if scope_assignments_to_student is not None: params["scope_assignments_to_student"] = scope_assignments_to_student self.logger.debug("GET /api/v1/courses/{course_id}/assignment_groups with query params: {params} and form data: {data}".format(params=params, data=data, **path)) return self.generic_request("GET", "/api/v1/courses/{course_id}/assignment_groups".format(**path), data=data, params=params, all_pages=True) def get_assignment_group(self, course_id, assignment_group_id, grading_period_id=None, include=None, override_assignment_dates=None): """ Get an Assignment Group. Returns the assignment group with the given id. """ path = {} data = {} params = {} # REQUIRED - PATH - course_id """ID""" path["course_id"] = course_id # REQUIRED - PATH - assignment_group_id """ID""" path["assignment_group_id"] = assignment_group_id # OPTIONAL - include """Associations to include with the group. "discussion_topic" and "assignment_visibility" and "submission" are only valid if "assignments" is also included. The "assignment_visibility" option additionally requires that the Differentiated Assignments course feature be turned on.""" if include is not None: self._validate_enum(include, ["assignments", "discussion_topic", "assignment_visibility", "submission"]) params["include"] = include # OPTIONAL - override_assignment_dates """Apply assignment overrides for each assignment, defaults to true.""" if override_assignment_dates is not None: params["override_assignment_dates"] = override_assignment_dates # OPTIONAL - grading_period_id """The id of the grading period in which assignment groups are being requested (Requires the Multiple Grading Periods account feature turned on)""" if grading_period_id is not None: params["grading_period_id"] = grading_period_id self.logger.debug("GET /api/v1/courses/{course_id}/assignment_groups/{assignment_group_id} with query params: {params} and form data: {data}".format(params=params, data=data, **path)) return self.generic_request("GET", "/api/v1/courses/{course_id}/assignment_groups/{assignment_group_id}".format(**path), data=data, params=params, single_item=True) def create_assignment_group(self, course_id, group_weight=None, integration_data=None, name=None, position=None, rules=None, sis_source_id=None): """ Create an Assignment Group. Create a new assignment group for this course. """ path = {} data = {} params = {} # REQUIRED - PATH - course_id """ID""" path["course_id"] = course_id # OPTIONAL - name """The assignment group's name""" if name is not None: data["name"] = name # OPTIONAL - position """The position of this assignment group in relation to the other assignment groups""" if position is not None: data["position"] = position # OPTIONAL - group_weight """The percent of the total grade that this assignment group represents""" if group_weight is not None: data["group_weight"] = group_weight # OPTIONAL - sis_source_id """The sis source id of the Assignment Group""" if sis_source_id is not None: data["sis_source_id"] = sis_source_id # OPTIONAL - integration_data """The integration data of the Assignment Group""" if integration_data is not None: data["integration_data"] = integration_data # OPTIONAL - rules """The grading rules that are applied within this assignment group See the Assignment Group object definition for format""" if rules is not None: data["rules"] = rules self.logger.debug("POST /api/v1/courses/{course_id}/assignment_groups with query params: {params} and form data: {data}".format(params=params, data=data, **path)) return self.generic_request("POST", "/api/v1/courses/{course_id}/assignment_groups".format(**path), data=data, params=params, single_item=True) def edit_assignment_group(self, course_id, assignment_group_id): """ Edit an Assignment Group. Modify an existing Assignment Group. Accepts the same parameters as Assignment Group creation """ path = {} data = {} params = {} # REQUIRED - PATH - course_id """ID""" path["course_id"] = course_id # REQUIRED - PATH - assignment_group_id """ID""" path["assignment_group_id"] = assignment_group_id self.logger.debug("PUT /api/v1/courses/{course_id}/assignment_groups/{assignment_group_id} with query params: {params} and form data: {data}".format(params=params, data=data, **path)) return self.generic_request("PUT", "/api/v1/courses/{course_id}/assignment_groups/{assignment_group_id}".format(**path), data=data, params=params, single_item=True) def destroy_assignment_group(self, course_id, assignment_group_id, move_assignments_to=None): """ Destroy an Assignment Group. Deletes the assignment group with the given id. """ path = {} data = {} params = {} # REQUIRED - PATH - course_id """ID""" path["course_id"] = course_id # REQUIRED - PATH - assignment_group_id """ID""" path["assignment_group_id"] = assignment_group_id # OPTIONAL - move_assignments_to """The ID of an active Assignment Group to which the assignments that are currently assigned to the destroyed Assignment Group will be assigned. NOTE: If this argument is not provided, any assignments in this Assignment Group will be deleted.""" if move_assignments_to is not None: params["move_assignments_to"] = move_assignments_to self.logger.debug("DELETE /api/v1/courses/{course_id}/assignment_groups/{assignment_group_id} with query params: {params} and form data: {data}".format(params=params, data=data, **path)) return self.generic_request("DELETE", "/api/v1/courses/{course_id}/assignment_groups/{assignment_group_id}".format(**path), data=data, params=params, single_item=True) class Gradingrules(BaseModel): """Gradingrules Model.""" def __init__(self, never_drop=None, drop_highest=None, drop_lowest=None): """Init method for Gradingrules class.""" self._never_drop = never_drop self._drop_highest = drop_highest self._drop_lowest = drop_lowest self.logger = logging.getLogger('pycanvas.Gradingrules') @property def never_drop(self): """Assignment IDs that should never be dropped.""" return self._never_drop @never_drop.setter def never_drop(self, value): """Setter for never_drop property.""" self.logger.warn("Setting values on never_drop will NOT update the remote Canvas instance.") self._never_drop = value @property def drop_highest(self): """Number of highest scores to be dropped for each user.""" return self._drop_highest @drop_highest.setter def drop_highest(self, value): """Setter for drop_highest property.""" self.logger.warn("Setting values on drop_highest will NOT update the remote Canvas instance.") self._drop_highest = value @property def drop_lowest(self): """Number of lowest scores to be dropped for each user.""" return self._drop_lowest @drop_lowest.setter def drop_lowest(self, value): """Setter for drop_lowest property.""" self.logger.warn("Setting values on drop_lowest will NOT update the remote Canvas instance.") self._drop_lowest = value class Assignmentgroup(BaseModel): """Assignmentgroup Model.""" def __init__(self, group_weight=None, name=None, rules=None, assignments=None, sis_source_id=None, integration_data=None, position=None, id=None): """Init method for Assignmentgroup class.""" self._group_weight = group_weight self._name = name self._rules = rules self._assignments = assignments self._sis_source_id = sis_source_id self._integration_data = integration_data self._position = position self._id = id self.logger = logging.getLogger('pycanvas.Assignmentgroup') @property def group_weight(self): """the weight of the Assignment Group.""" return self._group_weight @group_weight.setter def group_weight(self, value): """Setter for group_weight property.""" self.logger.warn("Setting values on group_weight will NOT update the remote Canvas instance.") self._group_weight = value @property def name(self): """the name of the Assignment Group.""" return self._name @name.setter def name(self, value): """Setter for name property.""" self.logger.warn("Setting values on name will NOT update the remote Canvas instance.") self._name = value @property def rules(self): """the grading rules that this Assignment Group has.""" return self._rules @rules.setter def rules(self, value): """Setter for rules property.""" self.logger.warn("Setting values on rules will NOT update the remote Canvas instance.") self._rules = value @property def assignments(self): """the assignments in this Assignment Group (see the Assignment API for a detailed list of fields).""" return self._assignments @assignments.setter def assignments(self, value): """Setter for assignments property.""" self.logger.warn("Setting values on assignments will NOT update the remote Canvas instance.") self._assignments = value @property def sis_source_id(self): """the sis source id of the Assignment Group.""" return self._sis_source_id @sis_source_id.setter def sis_source_id(self, value): """Setter for sis_source_id property.""" self.logger.warn("Setting values on sis_source_id will NOT update the remote Canvas instance.") self._sis_source_id = value @property def integration_data(self): """the integration data of the Assignment Group.""" return self._integration_data @integration_data.setter def integration_data(self, value): """Setter for integration_data property.""" self.logger.warn("Setting values on integration_data will NOT update the remote Canvas instance.") self._integration_data = value @property def position(self): """the position of the Assignment Group.""" return self._position @position.setter def position(self, value): """Setter for position property.""" self.logger.warn("Setting values on position will NOT update the remote Canvas instance.") self._position = value @property def id(self): """the id of the Assignment Group.""" return self._id @id.setter def id(self, value): """Setter for id property.""" self.logger.warn("Setting values on id will NOT update the remote Canvas instance.") self._id = value
{ "repo_name": "PGower/PyCanvas", "path": "pycanvas/apis/assignment_groups.py", "copies": "1", "size": "15084", "license": "mit", "hash": 4354846805164811000, "line_mean": 39.9, "line_max": 195, "alpha_frac": 0.6294749403, "autogenerated": false, "ratio": 4.473309608540926, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5602784548840926, "avg_score": null, "num_lines": null }
ASSIGNMENT_LEFT = "<-" DOG = "dog" CALL = "call" END = "end" BOX = "box" SAY = "say" RETURN = "return" EXIT = "exit" IF = "if" ELSE = "else" COMMA = "," WHILE = "while" PLUS = "+" MINUS = "-" DIVIDE = "/" MOD = "%" MULTIPLY = "*" LEFT_PARAN = "(" RIGHT_PARAN = ")" LT = "<" GT = ">" LE = "<=" GE = ">=" EQ = "=" NE = "!=" AND = "&&" OR = "||" NOT = "!" PERIOD = "." IDENTIFIER = "Variable" NUMBER = "Number" STRING = "String" from Lexer import Lexer as lexer from symbols import * from Node import Node class Parser: def __init__(self, sourceText, outputFile, verbose=False): self.token = "" self.ast = "" self.indent = 0 self.lexer = lexer(sourceText, verbose) self.out = outputFile self.verbose = verbose # Set of executing instructions self.executing = set() def error(self, token=None): token = self.token print("PROBLEM with following token: " + token.type + " at line " + str(token.lineIndex + 1) + ", col " + str(token.colIndex + 1)) quit() def parse(self): self.token = self.getToken() self.program() if self.verbose: print("Successful Parse") def getToken(self): if self.token: # print the current token, before we get the next one if self.verbose: print((" " * self.indent) + " (" + self.token.show(align=False) + ")") # get next token return self.lexer.get() def found(self, token_type, node=None): if self.token.type == token_type: self.consume(token_type, node) return True return False def consume(self, token_type, node=None): if self.token.type == token_type: if node: node.add(self.token) # Do the translating if token_type == DOG: # Write functions self.out.write('def ') elif token_type == AND: self.out.write('and ') elif token_type == OR: self.out.write('or ') elif token_type == CALL: pass elif token_type == BOX: pass elif token_type == ASSIGNMENT_LEFT: self.out.write('= ') elif token_type == EQ: self.out.write('== ') elif token_type == SAY: self.out.write('print ') elif token_type == PERIOD: self.out.write('\n') elif token_type == END: self.out.write('\n\n') self.indent -= 1 elif token_type == EXIT: quit() elif token_type == COMMA: if "assignment" in self.executing: self.out.write('\n') else: self.out.write(', ') elif token_type == ELSE: self.indent -= 1 self.out.write(self.indent*' ' + 'else:\n') self.indent += 1 else: # write directly the token self.out.write(str(self.token.cargo) + ' ') self.token = self.getToken() else: print("consume problem: wrong token") self.error(self.token) def program(self): """ program: statement (statement)* EOF """ self.executing.add("program") node = Node() self.block(node) while not self.found(EOF, node): self.block(node) self.executing.remove("program") def block(self, node=None): """ block: DOG function | statement | IF if_cond | WHILE while_cond """ # print any indent self.out.write(self.indent*' ') if self.found(DOG, node): self.function(node) elif self.found(IF, node): self.if_cond(node) elif self.found(WHILE, node): self.while_cond(node) else: self.statement(node) def function(self, node=None): """ function: IDENTIFIER "("IDENTIFIER (COMMA IDENTIFIER)* ")" (block)* END """ self.executing.add("function") if self.found(IDENTIFIER): # Define a function pass else: print("dog problem: Missing dog name") self.error(self.token) if self.found(LEFT_PARAN, node): if self.found(IDENTIFIER, node): while self.found(COMMA, node): if self.found(IDENTIFIER, node): pass else: print("dog problem: Missing identifier after ','") self.error(self.token) if self.found(RIGHT_PARAN, node): # Increase indent and add colon self.indent += 1 self.out.write(':\n') else: print("dog problem: Missing ')'") self.error(self.token) else: if self.found(RIGHT_PARAN, node): # Increase indent and add colon self.indent += 1 self.out.write(':\n') else: print("dog problem: Missing ')'") self.error(self.token) else: print("dog problem: Missing '('") self.error(self.token) # end function when find "end" # if we find eof first, error while not self.found(END, node): if self.found(EOF, node): print("dog problem: Unexpected end of file: Missing end statement") self.error(self.token) self.block() self.executing.remove("function") def function_call(self, node=None): self.executing.add("function_call") if not self.found(IDENTIFIER, node): print("dog call problem: Missing dog name") self.error(self.token) if not self.found(LEFT_PARAN, node): print("dog call problem: Missing '('") self.error(self.token) self.expression(node) while self.found(COMMA, node): self.expression(node) if not self.found(RIGHT_PARAN, node): print("dog call problem: Missing ')'") self.error(self.token) self.executing.remove("function_call") def if_cond(self, node=None): """ ifcond: condition block (ELSE block)? END """ self.executing.add("if_cond") self.condition() # print the colon and increase indent self.out.write(':\n') self.indent += 1 while (not self.found(END, node)) and (not self.found(ELSE, node)): if self.found(EOF): print("If condition error: Unexpected end of file. Probably missing 'end'") self.error(self.token) self.block(node) if self.found(ELSE, node): while not self.found(END): if self.found(EOF): print("If condition error: Unexpected end of file. Probably missing 'end'") self.error(self.token) self.block(node) break self.executing.remove("if_cond") def while_cond(self, node=None): """ ifcond: condition block END """ self.executing.add("while_cond") self.condition(node) # print the colon and increase indent self.out.write(':\n') self.indent += 1 while not self.found(END): if self.found(EOF): print("While condition error: Unexpected end of file. Probably missing 'end'") self.error(self.token) self.block(node) self.executing.remove("while_cond") def factor(self, node=None): """ factor: STRING | IDENTIFIER | NUMBER | "(" expression ")" | LIST """ if self.found(STRING, node): pass elif self.found(IDENTIFIER, node): pass elif self.found(NUMBER, node): pass elif self.found(LEFT_PARAN, node): # Error if string if self.about(STRING, node): print("factor problem: cannot perform arithmetics on strings") self.error(self.token) self.expression(node) if self.found(RIGHT_PARAN, node): pass else: print("factor problem: missing ')'") self.error(self.token) # elif self.found(LIST_BEGIN): # self.found(NUMBER) # while self.found(COMMA): # self.found(NUMBER) # if not self.found(LIST_END): # print("factor problem: lists should end with []") else: print("factor problem: wrong token") self.error(self.token) def term(self, node=None): """ term: factor ((MULTIPLY | DIVIDE | MOD) factor)* """ self.factor(node) while self.found(MULTIPLY, node) or self.found(DIVIDE, node) or self.found(MOD, node): if self.found(STRING, node): print("term problem: cannot perform arithmetics on strings") self.error(self.token) self.factor(node) def expression(self, node=None): """ expression: (term ((PLUS | MINUS) term)* | statement) """ if(self.found(LEFT_PARAN)): if(self.found(CALL)): self.function_call() return self.term(node) while self.found(PLUS, node) or self.found(MINUS, node): if self.found(STRING, node): print("expression problem: cannot perform arithmetics on strings") self.error(self.token) self.term(node) if not self.found(RIGHT_PARAN): print("expression problem: missing ')'") self.error(self.token) else: if(self.found(CALL)): self.function_call() return self.term(node) while self.found(PLUS, node) or self.found(MINUS, node): if self.found(STRING, node): print("expression problem: cannot perform arithmetics on strings") self.error(self.token) self.term(node) def condition(self, node=None): """ condition: "(" condition ")" | (simpleCondition | "!" condition) (("&&" | "||") condition | "!" condition))* """ if self.found(LEFT_PARAN, node): self.condition(node) if not self.found(RIGHT_PARAN, node): print("condition error: Missing ')'") self.error(self.token) elif self.found(NOT, node): self.condition(node) else: self.simpleCondition(node) while self.found(AND, node) or self.found(OR, node): self.condition(node) def simpleCondition(self, node=None): """ condition: expression ("<" | ">" | "<=" | ">=" | "=" | "!=") expression """ self.expression(node) if (self.found(GE, node) or self.found(LE, node) or self.found(LT, node) or self.found(GT, node) or self.found(EQ, node) or self.found(NE, node)): self.expression(node) elif self.token.type != RIGHT_PARAN: print("condition error: wrong token") self.error(self.token) else: pass def statement(self, node=None): """ statement: (EXIT | SAY expression | assignmentStatement | RETURN expression) PERIOD """ if self.found(EXIT, node): pass elif self.found(CALL, node): self.function_call(node) elif self.found(SAY, node): self.expression(node) elif self.found(RETURN, node): self.expression() else: self.assignmentStatement(node) if not self.found(PERIOD, node): print("statement problem: Missing '.'") self.error(self.token) def assignmentStatement(self, node=None): """ assignmentStatement: BOX assignment (COMMA assignment)* """ self.executing.add("assignment") if self.found(BOX, node): pass else: # print("'box' keyword missing") # self.error(self.token) pass self.assignment(node) while self.found(COMMA, node): self.assignment(node) self.executing.remove("assignment") def assignment(self, node=None): """ assignment: IDENTIFIER <- expression """ if self.found(IDENTIFIER, node): if self.found(ASSIGNMENT_LEFT, node): self.expression(node) else: print("assignment sign '<-' missing") self.error(self.token) else: print("identifier missing") self.error(self.token)
{ "repo_name": "dragosthealex/hackerLite", "path": "python/Parser.py", "copies": "1", "size": "11364", "license": "mit", "hash": -145961674322657900, "line_mean": 25.7388235294, "line_max": 134, "alpha_frac": 0.5869412179, "autogenerated": false, "ratio": 3.63764404609475, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9656705983752571, "avg_score": 0.013575856048435591, "num_lines": 425 }
ASSIGNMENT_NAME = 'P5' ASSIGNMENT_TEST_NUM = 8 OUTPUT_RESULT_REG_EXP = [] SCRIPT_REG_EXP = [] SCRIPT_EXISTENCE_REG_EXP = [] FUNCTION_ORDER = ['most_repeated_letters', 'has_equal_letters', 'is_palindrome', 'is_trick_round', 'total_points'] TEST_FUNC = {'most_repeated_letters':[ {'input_args':['a'], 'return_val':1}, {'input_args':['aabbcc'],'return_val':2}, {'input_args':['azaca'],'return_val':3}, {'input_args':['tttzzkzbza'],'return_val':4}, {'input_args':['qwhertyuiopasdfghjklzoxcvbnom'],'return_val':3}], 'has_equal_letters':[ {'input_args':['khaleesi'],'return_val':True}, {'input_args':['brood'],'return_val':False}, {'input_args':['aeiou'],'return_val':False}, {'input_args':['bdfzh'],'return_val':False}, {'input_args':['abcduo'],'return_val':True}, {'input_args':['zaxecivobu'],'return_val':True}], 'is_palindrome':[ {'input_args':['a'],'return_val':True}, {'input_args':['level'],'return_val':True}, {'input_args':['palindrome'],'return_val':False}, {'input_args':['beliilec'],'return_val':False}, {'input_args':['levvel'],'return_val':True}, {'input_args':['op'],'return_val':False}], 'is_trick_round':[ {'input_args':['angry','imbue'],'return_val':True}, {'input_args':['ayewrngry','imywerewrwerdewdescbue'],'return_val':True}, {'input_args':['hotcat','mouse'],'return_val':False}, {'input_args':['y','y'],'return_val':False}, {'input_args':['youtube','findyo'],'return_val':False}, {'input_args':['itshard','toyfind'],'return_val':True}], 'total_points':[ {'input_args':['dog'],'return_val':3}, {'input_args':['auffer'],'return_val':24}, {'input_args':['evlevelve'],'return_val':15}, {'input_args':['glgjssa'],'return_val':14}, {'input_args':['oliveisbutifulufitubsievilo'],'return_val':45}, {'input_args':['qyhfbzzbfhyq'],'return_val':120}] } TEST_SCRIPT = {'total_points':[{'script_pat':'most_repeated_letters\('}, {'script_pat':'has_equal_letters\('}, {'script_pat':'is_palindrome\('}, {'script_pat_count':'for '}, {'script_pat':'"""[\s\S]*?"""|\'\'\'[\s\S]*?\'\'\''}], 'most_repeated_letters':[{'script_pat_count':'for '}, {'script_pat':'"""[\s\S]*?"""|\'\'\'[\s\S]*?\'\'\''}], 'has_equal_letters':[{'script_pat_count':'for '}, {'script_pat':'"""[\s\S]*?"""|\'\'\'[\s\S]*?\'\'\''}], 'is_palindrome':[{'script_pat_count':'for '}, {'script_pat':'"""[\s\S]*?"""|\'\'\'[\s\S]*?\'\'\''}], 'is_trick_round':[{'script_pat_count':'for '}, {'script_pat':'"""[\s\S]*?"""|\'\'\'[\s\S]*?\'\'\''}] } GRADING_RULES_ORDER = ['most_repeated_letters', 'has_equal_letters', 'is_palindrome', 'total_points_use_3_func', 'total_points', 'is_trick_round', 'docstring', 'atleast_2_for'] GRADING_RULES = {'most_repeated_letters':{'rules':'and','order':0,'points':1, 'test':[{'type':'func','func_name':'most_repeated_letters', 'index':0,'check':'return_val','error':'test 1 of most_repeated_letters'}, {'type':'func','func_name':'most_repeated_letters', 'index':1,'check':'return_val','error':'test 2 of most_repeated_letters'}, {'type':'func','func_name':'most_repeated_letters', 'index':2,'check':'return_val','error':'test 3 of most_repeated_letters'}, {'type':'func','func_name':'most_repeated_letters', 'index':3,'check':'return_val','error':'test 4 of most_repeated_letters'}]}, 'has_equal_letters':{'rules':'and','order':1,'points':1, 'test':[{'type':'func','func_name':'has_equal_letters', 'index':0,'check':'return_val','error':'test 1 of has_equal_letters'}, {'type':'func','func_name':'has_equal_letters', 'index':1,'check':'return_val','error':'test 2 of has_equal_letters'}, {'type':'func','func_name':'has_equal_letters', 'index':2,'check':'return_val','error':'test 3 of has_equal_letters'}, {'type':'func','func_name':'has_equal_letters', 'index':3,'check':'return_val','error':'test 4 of has_equal_letters'}, {'type':'func','func_name':'has_equal_letters', 'index':4,'check':'return_val','error':'test 5 of has_equal_letters'}]}, 'is_palindrome': {'rules':'and','order':2,'points':1, 'test':[{'type':'func','func_name':'is_palindrome', 'index':0,'check':'return_val','error':'test 1 of is_palindrome'}, {'type':'func','func_name':'is_palindrome', 'index':1,'check':'return_val','error':'test 2 of is_palindrome'}, {'type':'func','func_name':'is_palindrome', 'index':2,'check':'return_val','error':'test 3 of is_palindrome'}, {'type':'func','func_name':'is_palindrome', 'index':3,'check':'return_val','error':'test 4 of is_palindrome'}, {'type':'func','func_name':'is_palindrome', 'index':4,'check':'return_val','error':'test 5 of is_palindrome'}]}, 'total_points_use_3_func':{'rules':'and','order':3,'points':1, 'test':[{'type':'script','func_name':'total_points', 'index':0, 'check':'script_pat', 'error':'most_repeated_letters in total_points'}, {'type':'script','func_name':'total_points', 'index':1, 'check':'script_pat', 'error':'has_equal_letters in total_points'}, {'type':'script','func_name':'total_points', 'index':2, 'check':'script_pat', 'error':'is_palindrome in total_points'}]}, 'total_points': {'rules':'groupadd','order':4,'points':3, 'groups':[(1,[{'type':'func','func_name':'total_points', 'index':0,'check':'return_val','error':'test 1-1 of total_points'}, {'type':'func','func_name':'total_points', 'index':1,'check':'return_val','error':'test 1-2 of total_points'}]), (1,[{'type':'func','func_name':'total_points', 'index':2,'check':'return_val','error':'test 2-1 of total_points'}, {'type':'func','func_name':'total_points', 'index':3,'check':'return_val','error':'test 2-2 of total_points'}]), (1,[{'type':'func','func_name':'total_points', 'index':4,'check':'return_val','error':'test 3-1 of total_points'}, {'type':'func','func_name':'total_points', 'index':5,'check':'return_val','error':'test 3-2 of total_points'}])]}, 'is_trick_round':{'rules':'and','order':5,'points':1, 'test':[{'type':'func','func_name':'is_trick_round', 'index':0,'check':'return_val','error':'test 1 of is_trick_round'}, {'type':'func','func_name':'is_trick_round', 'index':1,'check':'return_val','error':'test 2 of is_trick_round'}, {'type':'func','func_name':'is_trick_round', 'index':2,'check':'return_val','error':'test 3 of is_trick_round'}, {'type':'func','func_name':'is_trick_round', 'index':3,'check':'return_val','error':'test 4 of is_trick_round'}, {'type':'func','func_name':'is_trick_round', 'index':4,'check':'return_val','error':'test 5 of is_trick_round'}]}, 'docstring':{'rules':'and','order':6,'points':1, 'test':[{'type':'script','func_name':'total_points', 'index':4, 'check':'script_pat', 'error':'docstring in total_points'}, {'type':'script','func_name':'most_repeated_letters', 'index':1, 'check':'script_pat', 'error':'docstring in most_repeated_letters'}, {'type':'script','func_name':'has_equal_letters', 'index':1, 'check':'script_pat', 'error':'docstring in has_equal_letters'}, {'type':'script','func_name':'is_palindrome', 'index':1, 'check':'script_pat', 'error':'docstring in is_palindrome'}, {'type':'script','func_name':'is_trick_round', 'index':1, 'check':'script_pat', 'error':'docstring in is_trick_round'}]}, 'atleast_2_for':{'rules':'sum','order':7,'lowerBound':2,'points':1, 'test':[{'type':'script','func_name':'total_points', 'index':3, 'check':'script_pat_count', 'error':'for in total_points'}, {'type':'script','func_name':'most_repeated_letters', 'index':0, 'check':'script_pat_count', 'error':'for in most_repeated_letters'}, {'type':'script','func_name':'has_equal_letters', 'index':0, 'check':'script_pat_count', 'error':'for in has_equal_letters'}, {'type':'script','func_name':'is_palindrome', 'index':0, 'check':'script_pat_count', 'error':'for in is_palindrome'}, {'type':'script','func_name':'is_trick_round', 'index':0, 'check':'script_pat_count', 'error':'for in is_trick_round'}]} } SCRIPT_TEST = True
{ "repo_name": "shenghaozou/PythonGrader", "path": "P5/gradeSettings.py", "copies": "1", "size": "11063", "license": "apache-2.0", "hash": -3032532808754354000, "line_mean": 94.3706896552, "line_max": 178, "alpha_frac": 0.4384886559, "autogenerated": false, "ratio": 4.100444773906598, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5038933429806598, "avg_score": null, "num_lines": null }
"""Assignment.""" from . import HDLObject from .const import HDLIntegerConstant from .expr import HDLExpression from .signal import HDLSignal, HDLSignalSlice from .concat import HDLConcatenation from .stmt import HDLStatement from .port import HDLModulePort from .ifelse import HDLIfExp from .macro import HDLMacroValue class HDLLazyValue(HDLObject): """Lazy evaluated value.""" def __init__(self, fn, *args, **kwargs): """Initialize.""" self._args = kwargs.pop("fnargs", []) self._kwargs = kwargs.pop("fnkwargs", {}) self._fn = fn def evaluate(self, signals=None, symbols=None): """Evaluate.""" if signals is None: signals = {} if symbols is None: symbols = {} if not callable(self._fn): if self._fn not in symbols or not callable(symbols[self._fn]): raise RuntimeError( "unresolved lazy function: '{}'".format(self._fn) ) else: self._fn = symbols[self._fn] resolved_args = [] for arg in self._args: if isinstance(arg, str) and arg in signals: resolved_args.append(signals[arg]) elif isinstance(arg, HDLObject): resolved_args.append(arg) else: raise RuntimeError( "unresolved argument in lazy eval: '{}'".format(arg) ) resolved_kwargs = {} for name, kwarg in self._kwargs.items(): if isinstance(kwarg, str) and arg in signals: resolved_kwargs[name] = signals[kwarg] elif isinstance(arg, HDLObject): resolved_args[name] = kwarg else: raise RuntimeError( "unresolved argument in lazy eval: '{}'".format(kwarg) ) return self._fn(*resolved_args, **resolved_kwargs) class HDLAssignment(HDLStatement): """Signal assignment.""" def __init__(self, signal, value, assign_type="block", **kwargs): """Initialize.""" if isinstance(signal, HDLModulePort): if signal.direction in ("out", "inout"): signal = signal.signal else: raise ValueError("cannot assign to input port") if not isinstance(signal, (HDLSignal, HDLSignalSlice)): raise TypeError("only HDLSignal, HDLSignalSlice can be assigned") self.assign_type = assign_type self.signal = signal if isinstance(signal, HDLSignal): sig_type = signal.sig_type elif isinstance(signal, HDLSignalSlice): sig_type = signal.signal.sig_type if sig_type in ("comb", "const"): stmt_type = "par" elif sig_type in ("reg", "var"): stmt_type = "seq" elif sig_type == "other": stmt_type = "null" if isinstance( value, ( HDLIntegerConstant, HDLSignal, HDLExpression, HDLSignalSlice, HDLConcatenation, HDLIfExp, HDLMacroValue, HDLLazyValue, ), ): self.value = value elif isinstance(value, int): self.value = HDLIntegerConstant(value, **kwargs) else: raise TypeError( "only integer, HDLIntegerConstant, " "HDLSignal, HDLExpression, HDLConcatenation, " "HDLIfExp, HDLMacroValue " "allowed, got: {}".format(value.__class__.__name__) ) super().__init__(stmt_type=stmt_type) def get_assignment_type(self): """Get assignment type.""" if isinstance(self.signal, HDLSignal): sig_type = self.signal.sig_type elif isinstance(self.signal, HDLSignalSlice): sig_type = self.signal.signal.sig_type if sig_type in ("comb", "const"): return "parallel" else: return "series" def dumps(self): """Get representation.""" ret_str = self.signal.dumps(decl=False) if self.signal.sig_type in ("comb", "const"): ret_str += " = " else: ret_str += " <= " ret_str += self.value.dumps() + ";" return ret_str def is_legal(self): """Determine legality.""" # always return True for now. return True
{ "repo_name": "brunosmmm/hdltools", "path": "hdltools/abshdl/assign.py", "copies": "1", "size": "4511", "license": "mit", "hash": -8126923710740754000, "line_mean": 30.5454545455, "line_max": 77, "alpha_frac": 0.530924407, "autogenerated": false, "ratio": 4.325023969319271, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5355948376319271, "avg_score": null, "num_lines": null }
"""Assignments API Tests for Version 1.0. This is a testing template for the generated AssignmentsAPI Class. """ import unittest import requests import secrets from py3canvas.apis.assignments import AssignmentsAPI from py3canvas.apis.assignments import Turnitinsettings from py3canvas.apis.assignments import Assignmentoverride from py3canvas.apis.assignments import Externaltooltagattributes from py3canvas.apis.assignments import Assignment from py3canvas.apis.assignments import Needsgradingcount from py3canvas.apis.assignments import Rubriccriteria from py3canvas.apis.assignments import Assignmentdate from py3canvas.apis.assignments import Rubricrating from py3canvas.apis.assignments import Lockinfo class TestAssignmentsAPI(unittest.TestCase): """Tests for the AssignmentsAPI.""" def setUp(self): self.client = AssignmentsAPI(secrets.instance_address, secrets.access_token) def test_delete_assignment(self): """Integration test for the AssignmentsAPI.delete_assignment method.""" course_id = None # Change me!! id = None # Change me!! r = self.client.delete_assignment(id, course_id) def test_list_assignments(self): """Integration test for the AssignmentsAPI.list_assignments method.""" course_id = None # Change me!! r = self.client.list_assignments(course_id, assignment_ids=None, bucket=None, include=None, needs_grading_count_by_section=None, override_assignment_dates=None, search_term=None) def test_list_assignments_for_user(self): """Integration test for the AssignmentsAPI.list_assignments_for_user method.""" user_id = None # Change me!! course_id = None # Change me!! r = self.client.list_assignments_for_user(user_id, course_id) def test_get_single_assignment(self): """Integration test for the AssignmentsAPI.get_single_assignment method.""" course_id = None # Change me!! id = None # Change me!! r = self.client.get_single_assignment(id, course_id, all_dates=None, include=None, needs_grading_count_by_section=None, override_assignment_dates=None) def test_create_assignment(self): """Integration test for the AssignmentsAPI.create_assignment method.""" # This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration. pass def test_edit_assignment(self): """Integration test for the AssignmentsAPI.edit_assignment method.""" # This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration. pass def test_list_assignment_overrides(self): """Integration test for the AssignmentsAPI.list_assignment_overrides method.""" course_id = None # Change me!! assignment_id = None # Change me!! r = self.client.list_assignment_overrides(course_id, assignment_id) def test_get_single_assignment_override(self): """Integration test for the AssignmentsAPI.get_single_assignment_override method.""" course_id = None # Change me!! assignment_id = None # Change me!! id = None # Change me!! r = self.client.get_single_assignment_override(id, course_id, assignment_id) def test_redirect_to_assignment_override_for_group(self): """Integration test for the AssignmentsAPI.redirect_to_assignment_override_for_group method.""" group_id = None # Change me!! assignment_id = None # Change me!! r = self.client.redirect_to_assignment_override_for_group(group_id, assignment_id) def test_redirect_to_assignment_override_for_section(self): """Integration test for the AssignmentsAPI.redirect_to_assignment_override_for_section method.""" course_section_id = None # Change me!! assignment_id = None # Change me!! r = self.client.redirect_to_assignment_override_for_section(assignment_id, course_section_id) def test_create_assignment_override(self): """Integration test for the AssignmentsAPI.create_assignment_override method.""" # This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration. pass def test_update_assignment_override(self): """Integration test for the AssignmentsAPI.update_assignment_override method.""" # This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration. pass def test_delete_assignment_override(self): """Integration test for the AssignmentsAPI.delete_assignment_override method.""" course_id = None # Change me!! assignment_id = None # Change me!! id = None # Change me!! r = self.client.delete_assignment_override(id, course_id, assignment_id) def test_batch_retrieve_overrides_in_course(self): """Integration test for the AssignmentsAPI.batch_retrieve_overrides_in_course method.""" course_id = None # Change me!! assignment_overrides[id] = None # Change me!! assignment_overrides[assignment_id] = None # Change me!! r = self.client.batch_retrieve_overrides_in_course(course_id, assignment_overrides_id, assignment_overrides_assignment_id) def test_batch_create_overrides_in_course(self): """Integration test for the AssignmentsAPI.batch_create_overrides_in_course method.""" # This method utilises the POST request method and will make changes to the Canvas instance. This needs consideration. pass def test_batch_update_overrides_in_course(self): """Integration test for the AssignmentsAPI.batch_update_overrides_in_course method.""" # This method utilises the PUT request method and will make changes to the Canvas instance. This needs consideration. pass
{ "repo_name": "tylerclair/py3canvas", "path": "py3canvas/tests/assignments.py", "copies": "1", "size": "6048", "license": "mit", "hash": 1387610063421564400, "line_mean": 45.6220472441, "line_max": 186, "alpha_frac": 0.6937830688, "autogenerated": false, "ratio": 4.182572614107884, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.005907953849993135, "num_lines": 127 }
#Assignments are 3 length #NOT are 4 length #AND, OR, LSHIFT and RSHIFT are 5 length data = [] with open("Day_7_1.input") as f: for line in f: line = line.split(" ") data.append(line) store={} index = 0 #First evaluate the assignments and then need to do multiple passes to evaluate values while True: index = 0 loop = False for line in data: if len(line) == 3: if line[0].isdigit(): store[line[2].strip()] = int(line[0]) data.pop(index) loop = True break index+=1 if loop == False: break while len(data)>1: index = 0 loop = False #print len(data) for line in data: #NOT if len(line) == 4: if line[1] in store: store[line[3].strip()] = ~store[line[1]] data.pop(index) break index+=1 else: if line[0] in store and (line[1] == "LSHIFT" or line[1]=="RSHIFT"): if line[1] == "LSHIFT": store[line[4].strip()] = store[line[0]]<<int(line[2]) data.pop(index) break else : store[line[4].strip()] = store[line[0]]>>int(line[2]) data.pop(index) break elif line[0] in store and line[2] in store: if line[1] == "AND": store[line[4].strip()] = store[line[0]] & store[line[2]] data.pop(index) break else: store[line[4].strip()] = store[line[0]] | store[line[2]] data.pop(index) break elif (line[2] in store and line[0].isdigit()) or (line[0] in store and line[2].isdigit()): if line[1] == "OR": if line[0] in store: store[line[4].strip()] = store[line[0]] | int(line[2]) data.pop(index) break else: store[line[4].strip()] = store[line[2]] | int(line[0]) data.pop(index) break else: if line[0] in store: store[line[4].strip()] = store[line[0]] & int(line[2]) data.pop(index) break else: store[line[4].strip()] = store[line[2]] & int(line[0]) data.pop(index) break index+=1 for line in data: store[line[2].strip()] = store[line[0]] data.pop(index) for key,value in store.items(): print key,value
{ "repo_name": "ujjwalgulecha/AdventOfCode", "path": "2015/Day_07/Part_1.py", "copies": "1", "size": "2044", "license": "mit", "hash": 7142472214036676000, "line_mean": 23.0588235294, "line_max": 93, "alpha_frac": 0.5772994129, "autogenerated": false, "ratio": 2.6788990825688073, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.8406839329956652, "avg_score": 0.06987183310243117, "num_lines": 85 }
#Assignments are 3 length #NOT are 4 length #AND, OR, LSHIFT and RSHIFT are 5 length data = [] with open("Day_7_2.input") as f: for line in f: line = line.split(" ") data.append(line) store={} index = 0 #First evaluate the assignments and then need to do multiple passes to evaluate values while True: index = 0 loop = False for line in data: if len(line) == 3: if line[0].isdigit(): store[line[2].strip()] = int(line[0]) data.pop(index) loop = True break index+=1 if loop == False: break while len(data)>1: index = 0 loop = False #print len(data) for line in data: #NOT if len(line) == 4: if line[1] in store: store[line[3].strip()] = ~store[line[1]] data.pop(index) break index+=1 else: if line[0] in store and (line[1] == "LSHIFT" or line[1]=="RSHIFT"): if line[1] == "LSHIFT": store[line[4].strip()] = store[line[0]]<<int(line[2]) data.pop(index) break else : store[line[4].strip()] = store[line[0]]>>int(line[2]) data.pop(index) break elif line[0] in store and line[2] in store: if line[1] == "AND": store[line[4].strip()] = store[line[0]] & store[line[2]] data.pop(index) break else: store[line[4].strip()] = store[line[0]] | store[line[2]] data.pop(index) break elif (line[2] in store and line[0].isdigit()) or (line[0] in store and line[2].isdigit()): if line[1] == "OR": if line[0] in store: store[line[4].strip()] = store[line[0]] | int(line[2]) data.pop(index) break else: store[line[4].strip()] = store[line[2]] | int(line[0]) data.pop(index) break else: if line[0] in store: store[line[4].strip()] = store[line[0]] & int(line[2]) data.pop(index) break else: store[line[4].strip()] = store[line[2]] & int(line[0]) data.pop(index) break index+=1 for line in data: store[line[2].strip()] = store[line[0]] data.pop(index) for key,value in store.items(): print key,value
{ "repo_name": "ujjwalgulecha/AdventOfCode", "path": "2015/Day_07/Part_2.py", "copies": "1", "size": "2044", "license": "mit", "hash": 3158296940581670400, "line_mean": 23.0588235294, "line_max": 93, "alpha_frac": 0.5772994129, "autogenerated": false, "ratio": 2.6788990825688073, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.8406839329956652, "avg_score": 0.06987183310243117, "num_lines": 85 }
assignments = [] def cross(A, B): "Cross product of elements in A and elements in B." return [s+t for s in A for t in B] rows = 'ABCDEFGHI' cols = '123456789' ## List of all the boxes boxes = cross(rows, cols) ## Units for Horizontal Contraints row_units = [cross(r, cols) for r in rows] ## Units for Vertical Contraints column_units = [cross(rows, c) for c in cols] ## Units for Square Contraints square_units = [cross(rs, cs) for rs in ('ABC','DEF','GHI') for cs in ('123','456','789')] ## Units for Diagonal Constraints diag_units = [[s+t for s,t in zip(rows, cols)],[s+t for s,t in zip(rows, cols[::-1])]] ## List of all the units unitlist = row_units + column_units + square_units + diag_units ## Dict for all the boxes and corresponding list of units units = dict((s, [u for u in unitlist if s in u]) for s in boxes) ## Dict for all the peers for given box peers = dict((s, set(sum(units[s],[]))-set([s])) for s in boxes) def assign_value(values, box, value): """ Please use this function to update your values dictionary! Assigns a value to a given box. If it updates the board record it. """ values[box] = value if len(value) == 1: assignments.append(values.copy()) return values def naked_twins(values): """Eliminate values using the naked twins strategy. Args: values(dict): a dictionary of the form {'box_name': '123456789', ...} Returns: the values dictionary with the naked twins eliminated from peers. """ # Find all instances of naked twins possible_twins = [] for unit in unitlist: possible_twins += [((u, v), unit) for u in unit for v in unit if u != v and len(values[u]) == 2 and values[u] == values[v]] possible_naked_twins = {} for twins, unit in possible_twins: if twins not in possible_naked_twins.keys(): possible_naked_twins[twins] = [] possible_naked_twins[twins] += [unit] naked_twins = {} for possible_naked_twin_key, possible_naked_twin_value in possible_naked_twins.items(): if len(possible_naked_twins) > 1: naked_twins[possible_naked_twin_key] = possible_naked_twin_value # Eliminate the naked twins as possibilities for their peers in same unit for naked_twin_key, naked_twin_value in naked_twins.items(): for unit in naked_twin_value: for box in unit: if box not in naked_twin_key: for value in values[naked_twin_key[0]]: values = assign_value(values, box, values[box].replace(value,'')) return values def grid_values(grid): """ Convert grid into a dict of {square: char} with '123456789' for empties. Args: grid(string) - A grid in string form. Returns: A grid in dictionary form Keys: The boxes, e.g., 'A1' Values: The value in each box, e.g., '8'. If the box has no value, then the value will be '123456789'. """ chars = [] digits = '123456789' for ch in grid: if ch in digits: chars.append(ch) if ch == '.': chars.append(digits) assert len(chars) == 81, "Input grid must be a string of length 81 (9x9)" return dict(zip(boxes, chars)) def display(values): """ Display the values as a 2-D grid. Args: values(dict): The sudoku in dictionary form """ width = 1 + max(len(values[s]) for s in boxes) line = '+'.join(['-' * (width * 3)] * 3) for r in rows: print(''.join(values[r + c].center(width)+('|' if c in '36' else '') for c in cols)) if r in 'CF': print(line) print def eliminate(values): # Write a function that will take as an input, the sudoku in dictionary form, # run through all the boxes, applying the eliminate technique, # and return the resulting sudoku in dictionary form. #print(values) solved_values = [box for box in values.keys() if len(values[box]) == 1] for solved_val in solved_values: digit = values[solved_val] for peer in peers[solved_val]: values = assign_value(values, peer, values[peer].replace(digit,'')) return values def only_choice(values): """Finalize all values that are the only choice for a unit. Go through all the units, and whenever there is a unit with a value that only fits in one box, assign the value to this box. Input: Sudoku in dictionary form. Output: Resulting Sudoku in dictionary form after filling in only choices. """ for unit in unitlist: for digit in '123456789': dplaces = [box for box in unit if digit in values[box]] if len(dplaces) == 1: values = assign_value(values, dplaces[0], digit) return values def reduce_puzzle(values): stalled = False while not stalled: # Check how many boxes have a determined value solved_values_before = len([box for box in values.keys() if len(values[box]) == 1]) values = eliminate(values) values = naked_twins(values) values = only_choice(values) solved_values_after = len([box for box in values.keys() if len(values[box]) == 1]) stalled = solved_values_before == solved_values_after if len([box for box in values.keys() if len(values[box]) == 0]): return False return values def search(values): "Using depth-first search and propagation, create a search tree and solve the sudoku." # First, reduce the puzzle using the previous function values = reduce_puzzle(values) if values is False: return False if all(len(values[s]) == 1 for s in boxes): return values # Choose one of the unfilled squares with the fewest possibilities n,s = min((len(values[s]), s) for s in boxes if len(values[s]) > 1) # Now use recurrence to solve each one of the resulting sudokus, and for value in values[s]: new_sudoku = values.copy() new_sudoku = assign_value(new_sudoku, s, value) attempt = search(new_sudoku) if attempt: return attempt def solve(grid): """ Find the solution to a Sudoku grid. Args: grid(string): a string representing a sudoku grid. Example: '2.............62....1....7...6..8...3...9...7...6..4...4....8....52.............3' Returns: The dictionary representation of the final sudoku grid. False if no solution exists. """ return search(grid_values(grid)) if __name__ == '__main__': diag_sudoku_grid = '2.............62....1....7...6..8...3...9...7...6..4...4....8....52.............3' display(solve(diag_sudoku_grid)) try: from visualize import visualize_assignments visualize_assignments(assignments) except SystemExit: pass except: print('We could not visualize your board due to a pygame issue. Not a problem! It is not a requirement.')
{ "repo_name": "danielkong/AI_Project", "path": "AIND_Sudoku/solution.py", "copies": "1", "size": "6975", "license": "mit", "hash": -7594114815023191000, "line_mean": 34.2272727273, "line_max": 131, "alpha_frac": 0.6143369176, "autogenerated": false, "ratio": 3.480538922155689, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.956382171641337, "avg_score": 0.006210824668463781, "num_lines": 198 }
assignments = [] rows = 'ABCDEFGHI' cols = '123456789' def assign_value(values, box, value): """ Please use this function to update your values dictionary! Assigns a value to a given box. If it updates the board record it. """ # Don't waste memory appending actions that don't actually change any values if values[box] == value: return values values[box] = value if len(value) == 1: assignments.append(values.copy()) return values def naked_twins(values): """Eliminate values using the naked twins strategy. Args: values(dict): a dictionary of the form {'box_name': '123456789', ...} Returns: the values dictionary with the naked twins eliminated from peers. """ for unit in unitlist: # Find the values of the twins in a given unit unit_values = [values[box] for box in unit] naked_values = [n for n in unit_values if unit_values.count( n) == 2 and len(n) == 2] # Elminate the values found in peers for val in naked_values: for ch in val: for box in unit: if values[box] != val: values = assign_value( values, box, values[box].replace(ch, '')) # return the updated values return values def cross(a, b): "Cross product of elements in a and elements in b." # utils.py return [s + t for s in a for t in b] boxes = cross(rows, cols) row_units = [cross(r, cols) for r in rows] column_units = [cross(rows, c) for c in cols] square_units = [cross(rs, cs) for rs in ('ABC', 'DEF', 'GHI') for cs in ('123', '456', '789')] # Adding defenition for diagonal units diagonal_units = [[rows[int(i) - 1] + i for i in cols], [rows[len(cols) - int(i)] + i for i in cols]] # Including the diagonal units in the unit list unitlist = row_units + column_units + square_units + diagonal_units units = dict((s, [u for u in unitlist if s in u]) for s in boxes) peers = dict((s, set(sum(units[s], [])) - set([s])) for s in boxes) def grid_values(grid): """ Convert grid into a dict of {square: char} with '123456789' for empties. Input: A grid in string form. Output: A grid in dictionary form Keys: The boxes, e.g., 'A1' Values: The value in each box, e.g., '8'. If the box has no value, then the value will be '123456789'. """ # utils.py chars = [] digits = '123456789' for c in grid: if c in digits: chars.append(c) if c == '.': chars.append(digits) assert len(chars) == 81 return dict(zip(boxes, chars)) def display(values): """ Display the values as a 2-D grid. Input: The sudoku in dictionary form Output: None """ # utils.py width = 1 + max(len(values[s]) for s in boxes) line = '+'.join(['-' * (width * 3)] * 3) for r in rows: print(''.join(values[r + c].center(width) + ('|' if c in '36' else '') for c in cols)) if r in 'CF': print(line) return def eliminate(values): """ Go through all the boxes, and whenever there is a box with a value, eliminate this value from the values of all its peers. Input: A sudoku in dictionary form. Output: The resulting sudoku in dictionary form. """ # utils.py solved_values = [box for box in values.keys() if len(values[box]) == 1] for box in solved_values: digit = values[box] for peer in peers[box]: values[peer] = values[peer].replace(digit, '') return values def only_choice(values): """ Go through all the units, and whenever there is a unit with a value that only fits in one box, assign the value to this box. Input: A sudoku in dictionary form. Output: The resulting sudoku in dictionary form. """ # utils.py for unit in unitlist: for digit in '123456789': dplaces = [box for box in unit if digit in values[box]] if len(dplaces) == 1: values[dplaces[0]] = digit return values def reduce_puzzle(values): """ Iterate eliminate() and only_choice(). If at some point, there is a box with no available values, return False. If the sudoku is solved, return the sudoku. If after an iteration of both functions, the sudoku remains the same, return the sudoku. Input: A sudoku in dictionary form. Output: The resulting sudoku in dictionary form. """ # utils.py solved_values = [box for box in values.keys() if len(values[box]) == 1] stalled = False while not stalled: solved_values_before = len( [box for box in values.keys() if len(values[box]) == 1]) values = eliminate(values) # Use the Naked Twins implementation as part of the iteration values = naked_twins(values) values = only_choice(values) solved_values_after = len( [box for box in values.keys() if len(values[box]) == 1]) stalled = solved_values_before == solved_values_after if len([box for box in values.keys() if len(values[box]) == 0]): return False return values def search(values): "Using depth-first search and propagation, try all possible values." # utils.py # First, reduce the puzzle using the previous function values = reduce_puzzle(values) if values is False: return False # Failed earlier if all(len(values[s]) == 1 for s in boxes): return values # Solved! # Choose one of the unfilled squares with the fewest possibilities n, s = min((len(values[s]), s) for s in boxes if len(values[s]) > 1) # Now use recurrence to solve each one of the resulting sudokus, and for value in values[s]: new_sudoku = values.copy() new_sudoku[s] = value attempt = search(new_sudoku) if attempt: return attempt def solve(grid): """ Find the solution to a Sudoku grid. Args: grid(string): a string representing a sudoku grid. Example: '2.............62....1....7...6..8...3...9...7...6..4...4....8....52.............3' Returns: The dictionary representation of the final sudoku grid. False if no solution exists. """ values = grid_values(grid) values = search(values) return values if __name__ == '__main__': diag_sudoku_grid = '2.............62....1....7...6..8...3...9...7...6..4...4....8....52.............3' display(solve(diag_sudoku_grid)) try: from visualize import visualize_assignments visualize_assignments(assignments) except SystemExit: pass except: print('We could not visualize your board due to a pygame issue. Not a problem! It is not a requirement.')
{ "repo_name": "broundy/udacity", "path": "nanodegrees/artificial_intelligence/project_1/solution.py", "copies": "1", "size": "6869", "license": "unlicense", "hash": -782947461972684200, "line_mean": 32.5073170732, "line_max": 128, "alpha_frac": 0.5952831562, "autogenerated": false, "ratio": 3.6576144834930777, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4752897639693078, "avg_score": null, "num_lines": null }
assignments = [] rows = 'ABCDEFGHI' cols = '123456789' def assign_value(values, box, value): """ Please use this function to update your values dictionary! Assigns a value to a given box. If it updates the board record it. """ # Don't waste memory appending actions that don't actually change any values if values[box] == value: return values values[box] = value if len(value) == 1: assignments.append(values.copy()) return values def naked_twins(values): """Eliminate values using the naked twins strategy. Args: values(dict): a dictionary of the form {'box_name': '123456789', ...} Returns: the values dictionary with the naked twins eliminated from peers. """ # Find all instances of naked twins twins_list = [] for box in boxes: if len(values[box]) == 2: for peer in peers[box]: if values[peer] == values[box]: twins_list.append([box,peer]) # Eliminate the naked twins as possibilities for their peers if twins_list: for twins in twins_list: # intersect list of twins' peers for common units twins_peers = set(peers[twins[0]]).intersection(set(peers[twins[1]])) for peer in twins_peers: for v in values[twins[0]]: values = assign_value(values, peer, values[peer].replace(v,'')) return values def cross(A, B): "Cross product of elements in A and elements in B." return [s+t for s in A for t in B] def diag(A, B): "Diagonals of A elements with elements in B." return [A[r]+B[c] for r in range(len(A)) for c in range(len(B)) if r == c] def grid_values(grid): """ Convert grid into a dict of {square: char} with '123456789' for empties. Args: grid(string) - A grid in string form. Returns: A grid in dictionary form Keys: The boxes, e.g., 'A1' Values: The value in each box, e.g., '8'. If the box has no value, then the value will be '123456789'. """ return dict((boxes[i], grid[i] if (grid[i] != '.') else '123456789') for i in range(len(boxes))) def display(values): """ Display the values as a 2-D grid. Args: values(dict): The sudoku in dictionary form """ width = 1+max(len(values[s]) for s in boxes) line = '+'.join(['-'*(width*3)]*3) for r in rows: print(''.join(values[r+c].center(width)+('|' if c in '36' else '') for c in cols)) if r in 'CF': print(line) return def eliminate(values): for box,value in values.items(): if len(value) == 1: for peer in peers[box]: values = assign_value(values, peer, values[peer].replace(value,'')) return values def only_choice(values): for box,v in values.items(): if len(v) > 1: for unit in units[box]: pval = str().join(values[key] for key in unit if key != box) d = [val for val in v if val not in pval] if len(d) == 1: values = assign_value(values, box, d[0]) return values def reduce_puzzle(values): stalled = False while not stalled: # Check how many boxes have a determined value solved_values_before = len([box for box in values.keys() if len(values[box]) == 1]) # Use the Eliminate Strategy values = eliminate(values) # Use the Only Choice Strategy values = only_choice(values) # Use the Naked Twins Strategy values = naked_twins(values) # Check how many boxes have a determined value, to compare solved_values_after = len([box for box in values.keys() if len(values[box]) == 1]) # If no new values were added, stop the loop. stalled = solved_values_before == solved_values_after # Sanity check, return False if there is a box with zero available values: if len([box for box in values.keys() if len(values[box]) == 0]): return False return values def search(values): # First, reduce the puzzle using the previous function values = reduce_puzzle(values) if not values: return False # Return solution if all box have unique value if all(len(v) == 1 for v in values.values()): return values # Choose one of the unfilled squares with the fewest possibilities _,box = min((len(v),k) for k,v in values.items() if len(v) > 1) # Now use recursion to solve each one of the resulting sudokus, and if one returns a value (not False), return that answer! # If you're stuck, see the solution.py tab! for val in values[box]: new_values = values.copy() new_values[box] = val res = search(new_values) if res: return res def solve(grid): """ Find the solution to a Sudoku grid. Args: grid(string): a string representing a sudoku grid. Example: '2.............62....1....7...6..8...3...9...7...6..4...4....8....52.............3' Returns: The dictionary representation of the final sudoku grid. False if no solution exists. """ return search(grid_values(grid)) boxes = cross(rows, cols) row_units = [cross(r, cols) for r in rows] column_units = [cross(rows, c) for c in cols] square_units = [cross(rs, cs) for rs in ('ABC','DEF','GHI') for cs in ('123','456','789')] diag_units = [diag(rows, cols)] + [diag(rows, cols[::-1])] unitlist = row_units + column_units + square_units + diag_units units = dict((s, [u for u in unitlist if s in u]) for s in boxes) peers = dict((s, set(sum(units[s],[]))-set([s])) for s in boxes) if __name__ == '__main__': diag_sudoku_grid = '2.............62....1....7...6..8...3...9...7...6..4...4....8....52.............3' display(solve(diag_sudoku_grid)) try: from visualize import visualize_assignments visualize_assignments(assignments) except SystemExit: pass except: print('We could not visualize your board due to a pygame issue. Not a problem! It is not a requirement.')
{ "repo_name": "edno/udacity-sandbox", "path": "ud889/AIND_Sudoku/solution.py", "copies": "1", "size": "6141", "license": "unlicense", "hash": 4764239822282066000, "line_mean": 35.5535714286, "line_max": 127, "alpha_frac": 0.5919231396, "autogenerated": false, "ratio": 3.5662020905923346, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4658125230192335, "avg_score": null, "num_lines": null }
"""Assignment URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.9/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf import settings from django.conf.urls import url, patterns from django.contrib import admin from django.conf.urls import include, url from rest_framework import routers, serializers, viewsets from AlbumCreator.models import Photo # Serializers define the API representation. class PhotoSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Photo fields = ('url_name', 'owner', 'like_total', 'stored_date') class PhotoViewSet(viewsets.ModelViewSet): queryset = Photo.objects.all() serializer_class = PhotoSerializer # Routers provide an easy way of automatically determining the URL conf. router = routers.DefaultRouter() router.register(r'photos', PhotoViewSet) urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^api', include(router.urls)), url(r'^api-auth/', include( 'rest_framework.urls', namespace='rest_framework')), url(r'', include('AlbumCreator.urls', namespace="AlbumCreator")), ]
{ "repo_name": "onurhunce/AutoAlbumCreator", "path": "Assignment/Assignment/urls.py", "copies": "1", "size": "1635", "license": "bsd-2-clause", "hash": -4794932525894363000, "line_mean": 33.7872340426, "line_max": 79, "alpha_frac": 0.719266055, "autogenerated": false, "ratio": 3.8111888111888113, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0019745325596389426, "num_lines": 47 }
"""Assign one or more reviewers to one or more Gerrit CL""" import os import sys from libpycr.exceptions import PyCRError from libpycr.gerrit.changes import fetch_change_list_or_fail from libpycr.gerrit.client import Gerrit from libpycr.meta import GitClBuiltin from libpycr.utils.output import Formatter, NEW_LINE, Token from libpycr.utils.system import fail, warn class Assign(GitClBuiltin): """Implement the ASSIGN command""" @property def description(self): return 'add/delete reviewer to/from change(s)' @staticmethod def display_help(): """Display the help for this command and exit We have to forge our own help output because we do not use the argparse module to parse the command-line arguments. """ buf = [('usage: %s assign [-h] CL [CL ...] ' '[+/-REVIEWER [+/-REVIEWER ...]]')] buf.append('') buf.append('Add or delete reviewer(s) to one or more changes') buf.append('') buf.append('positional arguments:') buf.append((' CL ' 'Gerrit Code Review CL / CL range / Change-Id')) buf.append(' +REVIEWER add REVIEWER to the change') buf.append(' -REVIEWER delete REVIEWER from the change') buf.append('') buf.append('optional arguments:') buf.append(' -h, --help show this help message and exit') print os.linesep.join(buf) % os.path.basename(sys.argv[0]) sys.exit() @staticmethod def parse_command_line(arguments): """Parse the SHOW command command-line arguments Returns a tuple containing three lists: - the list of ChangeInfo - the list of reviewers to add - the list of reviewers to delete :param arguments: a list of command-line arguments to parse :type arguments: list[str] :rtype: tuple[ChangeInfo, list[str], list[str]] """ changes, to_add, to_del = [], [], [] for argument in arguments: if argument in ('-h', '--help'): # Manually handle the --help flag Assign.display_help() if argument[0] == '+': to_add.append(argument[1:]) elif argument[0] == '-': to_del.append(argument[1:]) else: changes.append(argument) if not to_add and not to_del: fail('please specify reviewer(s) to add or delete') return fetch_change_list_or_fail(changes), to_add, to_del @staticmethod def tokenize(idx, change, added, deleted): """Token generator for the output Yields a stream of tokens: tuple of (Token, string). :param idx: index of the change in the list of changes to fetch :type idx: int :param change: the ChangeInfo corresponding to the change :type change: ChangeInfo :param added: the list of reviewers added :type added: list[ReviewerInfo] :param deleted: the list of reviewers deleted :type deleted: list[ReviewerInfo] :yield: tuple[Token, str] """ if idx: yield NEW_LINE for token in change.tokenize(): yield token yield NEW_LINE yield NEW_LINE if not added and not deleted: yield Token.Text, ( '# nothing to do (reviewers list already up-to-date)') return yield Token.Text, '# Reviewers updated:' prefix = (Token.Text, '# ') for reviewer in added: yield NEW_LINE yield prefix yield Token.Review.OK, '+' yield Token.Whitespace, ' ' for token in reviewer.tokenize(): yield token for reviewer in deleted: yield NEW_LINE yield prefix yield Token.Review.KO, '-' yield Token.Whitespace, ' ' for token in reviewer.tokenize(): yield token def run(self, arguments, *args, **kwargs): changes, to_add, to_del = self.parse_command_line(arguments) assert changes, 'unexpected empty list' for idx, change in enumerate(changes): added = [] deleted = [] # Add reviewers for account_id in to_add: try: reviewers = Gerrit.add_reviewer(change.uuid, account_id) if reviewers: added.extend(reviewers) except PyCRError as why: warn('{}: cannot assign reviewer {}'.format( change.change_id[:9], account_id), why) # Delete reviewers for account_id in to_del: try: review = Gerrit.delete_reviewer(change.uuid, account_id) if review: deleted.append(review.reviewer) except PyCRError as why: warn('{}: cannot delete reviewer {}'.format( change.change_id[:9], account_id), why) print Formatter.format(self.tokenize(idx, change, added, deleted))
{ "repo_name": "JcDelay/pycr", "path": "libpycr/builtin/changes/assign.py", "copies": "1", "size": "5261", "license": "apache-2.0", "hash": -3019482366854994000, "line_mean": 31.0792682927, "line_max": 79, "alpha_frac": 0.5533168599, "autogenerated": false, "ratio": 4.435919055649241, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0, "num_lines": 164 }
"""Assign Params to Attributes by Joel Hedlund <joel.hedlund at gmail.com>. Changed:Fabio Zadrozny (binded to Ctrl+1 too) """ __version__ = "1.0.1" __copyright__ = '''Available under the same conditions as PyDev. See PyDev license for details. http://pydev.sourceforge.net ''' import re from org.eclipse.jface.dialogs import MessageDialog #@UnresolvedImport from org.python.pydev.core.docutils import PySelection #@UnresolvedImport from org.python.pydev.core.docutils import ParsingUtils #@UnresolvedImport True, False = 1, 0 #@ReservedAssignment #======================================================================================================================= # ScriptUnapplicableError #======================================================================================================================= class ScriptUnapplicableError(Exception): """Raised when the script is unapplicable to the current line.""" def __init__(self, msg): self.msg = msg def __str__(self): return self.msg #======================================================================================================================= # AssignToAttribsOfSelf #======================================================================================================================= class AssignToAttribsOfSelf: """Assign method parameter values to attributes with same name. Pydev script for generating code that assigns the values of method parameters to attributes of self with the same name. This script must be executed at the method def line, which must contain both the def keyword and the opening paranthesis of the parameter list. Otherwise the script will not make any changes to your code. Ex: def moo(self, cow, sheep=1, *pargs, **kwargs): '''Docstring for method.''' Executing this script at the method def line will generate four lines of code while preserving the docstring, like so: def moo(self, cow, sheep=1, *pargs, **kwargs): '''Docstring for method.''' self.cow = cow self.sheep = sheep self.pargs = pargs self.kwargs = kwargs """ def __init__(self, editor=None): self.editor = editor def isScriptApplicable(self, ps, showError=True): '''Raise ScriptUnapplicableError if the script is unapplicable. @param ps: The current ps as a PySelection. ''' _rDef = re.compile(r'^\s+def\s') try: sCurrentLine = ps.getCursorLineContents() if not _rDef.match(sCurrentLine): msg = "The current line is not the first line of a method def statement." raise ScriptUnapplicableError(msg) oParamInfo = ps.getInsideParentesisToks(True) if not oParamInfo: msg = "The parameter list does not start on the first line of the method def statement." raise ScriptUnapplicableError(msg) lsParams = list(oParamInfo.o1) if not lsParams or lsParams[0] != 'self': msg = "The parameter list does not start with self." raise ScriptUnapplicableError(msg) # Workaround for bug in PySelection.getInsideParentesisToks() # in pydev < 1.0.6. In earlier versions, this can happen # with legal def lines such as "def moo(self, ):" if '' in lsParams: lsParams.remove('') if not len(lsParams) > 1: msg = "The method has no parameters other than self." raise ScriptUnapplicableError(msg) return True except ScriptUnapplicableError, e: if showError: sTitle = "Script Unapplicable" sHeader = "Script: Assign Method Parameters to Attributes of self" sBody = "The script cannot be run due to the following error:" sDialogText = ps.getEndLineDelim().join([sHeader, '', sBody, str(e)]) oShell = self.editor.getSite().getShell() MessageDialog.openInformation(oShell, sTitle, sDialogText) return False def _assignmentLines(self, endLineDelimiter, params, indent): '''Assemble the python code lines for the assignments. @param params: The method parameters as a list of str, must start with 'self'. @param indent: The indentation of the assignment lines as a str. ''' sTempl = indent + "self.%(name)s = %(name)s" ls = [sTempl % {'name':s.split('*')[-1]} for s in params[1:]] return endLineDelimiter.join(ls) def run(self): #gotten here (and not in the class resolution as before) because we want it to be resolved #when we execute it, and not when setting it ps = self.editor.createPySelection() oDocument = ps.getDoc() if not self.isScriptApplicable(ps): return None oParamInfo = ps.getInsideParentesisToks(True) lsParams = list(oParamInfo.o1) # Determine insert point: iClosingParOffset = oParamInfo.o2 iClosingParLine = ps.getLineOfOffset(iClosingParOffset) iInsertAfterLine = iClosingParLine currentIndent = ps.getIndentationFromLine() sIndent = currentIndent + self.editor.getIndentPrefs().getIndentationString() parsingUtils = ParsingUtils.create(oDocument) # Is there a docstring? In that case we need to skip past it. sDocstrFirstLine = ps.getLine(iClosingParLine + 1) sDocstrStart = sDocstrFirstLine.strip()[:2] if sDocstrStart and (sDocstrStart[0] in ['"', "'"] or sDocstrStart in ['r"', "r'"]): iDocstrLine = iClosingParLine + 1 iDocstrLineOffset = ps.getLineOffset(iDocstrLine) li = [sDocstrFirstLine.find(s) for s in ['"', "'"]] iDocstrStartCol = min([i for i in li if i >= 0]) iDocstrStart = iDocstrLineOffset + iDocstrStartCol iDocstrEnd = parsingUtils.eatLiterals(None, iDocstrStart) iInsertAfterLine = ps.getLineOfOffset(iDocstrEnd) sIndent = PySelection.getIndentationFromLine(sDocstrFirstLine) # Workaround for bug in PySelection.addLine() in # pydev < v1.0.6. Inserting at the last line in the file # would raise an exception if the line wasn't newline # terminated. iDocLength = oDocument.getLength() iLastLine = ps.getLineOfOffset(iDocLength) sLastChar = str(parsingUtils.charAt(iDocLength - 1)) endLineDelimiter = ps.getEndLineDelim() if iInsertAfterLine == iLastLine and not endLineDelimiter.endswith(sLastChar): oDocument.replace(iDocLength, 0, endLineDelimiter) line = ps.getLine(iInsertAfterLine + 1) if line.strip() == 'pass': ps.deleteLine(iInsertAfterLine + 1) # Assemble assignment lines and insert them into the document: sAssignments = self._assignmentLines(endLineDelimiter, lsParams, sIndent) ps.addLine(sAssignments, iInsertAfterLine) # Leave cursor at the last char of the new lines. iNewOffset = ps.getLineOffset(iInsertAfterLine + 1) + len(sAssignments) self.editor.setSelection(iNewOffset, 0)
{ "repo_name": "smkr/pyclipse", "path": "plugins/org.python.pydev.jython/jysrc/assign_params_to_attributes_action.py", "copies": "1", "size": "7638", "license": "epl-1.0", "hash": -6436391000690919000, "line_mean": 40.5108695652, "line_max": 120, "alpha_frac": 0.5754124116, "autogenerated": false, "ratio": 4.327478753541077, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.025639899596847378, "num_lines": 184 }
"""Assign Params to Attributes by Joel Hedlund <joel.hedlund at gmail.com>. PyDev script for generating python code that assigns method parameter values to attributes of self with the same name. Activates with 'a' by default. Edit global constants ACTIVATION_STRING and WAIT_FOR_ENTER if this does not suit your needs. See docs on the class AssignToAttribsOfSelf for more details. Contact the author for bug reports/feature requests. Changed:Fabio Zadrozny (binded to Ctrl+1 too) """ __version__ = "1.0.1" __copyright__ = """Available under the same conditions as PyDev. See PyDev license for details. http://pydev.sourceforge.net """ # Change this if the default does not suit your needs ACTIVATION_STRING = 'a' WAIT_FOR_ENTER = False # For earlier Python versions True, False = 1,0 # Set to True to force Jython script interpreter restart on save events. # Useful for Jython PyDev script development, not useful otherwise. DEBUG = False # This is a magic trick that tells the PyDev Extensions editor about the # namespace provided for pydev scripts: if False: from org.python.pydev.editor import PyEdit #@UnresolvedImport cmd = 'command string' editor = PyEdit assert cmd is not None assert editor is not None if DEBUG and cmd == 'onSave': from org.python.pydev.jython import JythonPlugin #@UnresolvedImport editor.pyEditScripting.interpreter = JythonPlugin.newPythonInterpreter() from org.eclipse.jface.action import Action #@UnresolvedImport #======================================================================================================================= # AssignToAttribsOfSelfAction #======================================================================================================================= class AssignToAttribsOfSelfAction(Action): def __init__(self, assign_to_attribs_helper): Action.__init__(self) self.assign_to_attribs_helper = assign_to_attribs_helper def run(self): self.assign_to_attribs_helper.run() #======================================================================================================================= # Actually bind the actions #======================================================================================================================= if cmd == 'onCreateActions' or (DEBUG and cmd == 'onSave'): from org.python.pydev.editor.correctionassist import PythonCorrectionProcessor #@UnresolvedImport import assign_params_to_attributes_action as helper import assign_params_to_attributes_assist #---------------------------------------------------------------------------------------------- Bind it to Ctrl+2, a sDescription = 'Assign method params to attribs of self' assign_to_attribs_helper = helper.AssignToAttribsOfSelf(editor) editor.addOfflineActionListener( ACTIVATION_STRING, AssignToAttribsOfSelfAction(assign_to_attribs_helper), sDescription, WAIT_FOR_ENTER) #------------------------------------------------------------------------------------------------- Bind it to Ctrl+1 ASSIGN_PARAMS_TO_ATTRIBUTES_ASSIST = 'ASSIGN_PARAMS_TO_ATTRIBUTES_ASSIST' if not PythonCorrectionProcessor.hasAdditionalAssist(ASSIGN_PARAMS_TO_ATTRIBUTES_ASSIST): assist = assign_params_to_attributes_assist.AssistAssignParamsToAttributes() PythonCorrectionProcessor.addAdditionalAssist(ASSIGN_PARAMS_TO_ATTRIBUTES_ASSIST, assist)
{ "repo_name": "smkr/pyclipse", "path": "plugins/org.python.pydev.jython/jysrc/pyedit_assign_params_to_attributes.py", "copies": "1", "size": "3468", "license": "epl-1.0", "hash": -4582785159950173700, "line_mean": 40.7831325301, "line_max": 120, "alpha_frac": 0.6023644752, "autogenerated": false, "ratio": 4.48062015503876, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.028112299698510302, "num_lines": 83 }
""" Assign pin directions to all tile pins. Tile pins are defined by one of two methods: - Pins that are part of a direct connection (e.g. edge_with_mux) are assigned based on the direction relationship between the two tiles, e.g. facing each other. - Pins that connect to a routing track face a routing track. Tile pins may end up with multiple edges if the routing tracks are formed differently throughout the grid. No connection database modifications are made in prjxray_assign_tile_pin_direction. """ import argparse from collections import namedtuple import prjxray.db import prjxray.tile import simplejson as json from lib.rr_graph import tracks from lib.connection_database import ( NodeClassification, yield_logical_wire_info_from_node, get_track_model, node_to_site_pins, get_pin_name_of_wire ) from prjxray_constant_site_pins import yield_ties_to_wire from lib import progressbar_utils import datetime from prjxray_db_cache import DatabaseCache now = datetime.datetime.now DirectConnection = namedtuple( 'DirectConnection', 'from_pin to_pin switch_name x_offset y_offset' ) def handle_direction_connections(conn, direct_connections, edge_assignments): # Edges with mux should have one source tile and one destination_tile. # The pin from the source_tile should face the destination_tile. # # It is expected that all edges_with_mux will lies in a line (e.g. X only or # Y only). c = conn.cursor() for src_wire_pkey, dest_wire_pkey, pip_in_tile_pkey, switch_pkey in \ progressbar_utils.progressbar( c.execute(""" SELECT src_wire_pkey, dest_wire_pkey, pip_in_tile_pkey, switch_pkey FROM edge_with_mux;""" )): c2 = conn.cursor() # Get the node that is attached to the source. c2.execute( """ SELECT node_pkey FROM wire WHERE pkey = ?""", (src_wire_pkey, ) ) (src_node_pkey, ) = c2.fetchone() # Find the wire connected to the source. src_wire = list(node_to_site_pins(conn, src_node_pkey)) assert len(src_wire) == 1 source_wire_pkey, src_tile_pkey, src_wire_in_tile_pkey = src_wire[0] c2.execute( """ SELECT tile_type_pkey, grid_x, grid_y FROM tile WHERE pkey = ?""", (src_tile_pkey, ) ) src_tile_type_pkey, source_loc_grid_x, source_loc_grid_y = c2.fetchone( ) c2.execute( """ SELECT name FROM tile_type WHERE pkey = ?""", (src_tile_type_pkey, ) ) (source_tile_type, ) = c2.fetchone() source_wire = get_pin_name_of_wire(conn, source_wire_pkey) # Get the node that is attached to the sink. c2.execute( """ SELECT node_pkey FROM wire WHERE pkey = ?""", (dest_wire_pkey, ) ) (dest_node_pkey, ) = c2.fetchone() # Find the wire connected to the sink. dest_wire = list(node_to_site_pins(conn, dest_node_pkey)) assert len(dest_wire) == 1 destination_wire_pkey, dest_tile_pkey, dest_wire_in_tile_pkey = dest_wire[ 0] c2.execute( """ SELECT tile_type_pkey, grid_x, grid_y FROM tile WHERE pkey = ?;""", (dest_tile_pkey, ) ) dest_tile_type_pkey, destination_loc_grid_x, destination_loc_grid_y = c2.fetchone( ) c2.execute( """ SELECT name FROM tile_type WHERE pkey = ?""", (dest_tile_type_pkey, ) ) (destination_tile_type, ) = c2.fetchone() destination_wire = get_pin_name_of_wire(conn, destination_wire_pkey) c2.execute( "SELECT name FROM switch WHERE pkey = ?" "", (switch_pkey, ) ) switch_name = c2.fetchone()[0] direct_connections.add( DirectConnection( from_pin='{}.{}'.format(source_tile_type, source_wire), to_pin='{}.{}'.format(destination_tile_type, destination_wire), switch_name=switch_name, x_offset=destination_loc_grid_x - source_loc_grid_x, y_offset=destination_loc_grid_y - source_loc_grid_y, ) ) if destination_loc_grid_x == source_loc_grid_x: if destination_loc_grid_y > source_loc_grid_y: source_dir = tracks.Direction.TOP destination_dir = tracks.Direction.BOTTOM else: source_dir = tracks.Direction.BOTTOM destination_dir = tracks.Direction.TOP else: if destination_loc_grid_x > source_loc_grid_x: source_dir = tracks.Direction.RIGHT destination_dir = tracks.Direction.LEFT else: source_dir = tracks.Direction.LEFT destination_dir = tracks.Direction.RIGHT edge_assignments[(source_tile_type, source_wire)].append((source_dir, )) edge_assignments[(destination_tile_type, destination_wire)].append((destination_dir, )) def handle_edges_to_channels( conn, null_tile_wires, edge_assignments, channel_wires_to_tracks ): c = conn.cursor() c.execute( """ SELECT vcc_track_pkey, gnd_track_pkey FROM constant_sources; """ ) vcc_track_pkey, gnd_track_pkey = c.fetchone() const_tracks = { 0: gnd_track_pkey, 1: vcc_track_pkey, } for node_pkey, classification in progressbar_utils.progressbar(c.execute( """ SELECT pkey, classification FROM node WHERE classification != ?; """, (NodeClassification.CHANNEL.value, ))): reason = NodeClassification(classification) if reason == NodeClassification.NULL: for (tile_type, wire) in yield_logical_wire_info_from_node(conn, node_pkey): null_tile_wires.add((tile_type, wire)) if reason != NodeClassification.EDGES_TO_CHANNEL: continue c2 = conn.cursor() for wire_pkey, phy_tile_pkey, tile_pkey, wire_in_tile_pkey in c2.execute( """ SELECT pkey, phy_tile_pkey, tile_pkey, wire_in_tile_pkey FROM wire WHERE node_pkey = ?; """, (node_pkey, )): c3 = conn.cursor() c3.execute( """ SELECT grid_x, grid_y FROM tile WHERE pkey = ?;""", (tile_pkey, ) ) (grid_x, grid_y) = c3.fetchone() c3.execute( """ SELECT name FROM tile_type WHERE pkey = ( SELECT tile_type_pkey FROM tile WHERE pkey = ? ); """, (tile_pkey, ) ) (tile_type, ) = c3.fetchone() wire = get_pin_name_of_wire(conn, wire_pkey) if wire is None: # This node has no site pin, don't need to assign pin direction. continue for other_phy_tile_pkey, other_wire_in_tile_pkey, pip_pkey, pip in c3.execute( """ WITH wires_from_node(wire_in_tile_pkey, phy_tile_pkey) AS ( SELECT wire_in_tile_pkey, phy_tile_pkey FROM wire WHERE node_pkey = ? AND phy_tile_pkey IS NOT NULL ), other_wires(other_phy_tile_pkey, pip_pkey, other_wire_in_tile_pkey) AS ( SELECT wires_from_node.phy_tile_pkey, undirected_pips.pip_in_tile_pkey, undirected_pips.other_wire_in_tile_pkey FROM undirected_pips INNER JOIN wires_from_node ON undirected_pips.wire_in_tile_pkey = wires_from_node.wire_in_tile_pkey) SELECT other_wires.other_phy_tile_pkey, other_wires.other_wire_in_tile_pkey, pip_in_tile.pkey, pip_in_tile.name FROM other_wires INNER JOIN pip_in_tile ON pip_in_tile.pkey == other_wires.pip_pkey WHERE pip_in_tile.is_directional = 1 AND pip_in_tile.is_pseudo = 0; """, (node_pkey, )): # Need to walk from the wire_in_tile table, to the wire table, # to the node table and get track_pkey. # other_wire_in_tile_pkey -> wire pkey -> node_pkey -> track_pkey c4 = conn.cursor() c4.execute( """ SELECT track_pkey, classification FROM node WHERE pkey = ( SELECT node_pkey FROM wire WHERE phy_tile_pkey = ? AND wire_in_tile_pkey = ? );""", (other_phy_tile_pkey, other_wire_in_tile_pkey) ) result = c4.fetchone() assert result is not None, ( wire_pkey, pip_pkey, tile_pkey, wire_in_tile_pkey, other_wire_in_tile_pkey ) (track_pkey, classification) = result # Some pips do connect to a track at all, e.g. null node if track_pkey is None: # TODO: Handle weird connections. # other_node_class = NodeClassification(classification) # assert other_node_class == NodeClassification.NULL, ( # node_pkey, pip_pkey, pip, other_node_class) continue tracks_model = channel_wires_to_tracks[track_pkey] available_pins = set( tracks_model.get_tracks_for_wire_at_coord( (grid_x, grid_y) ).keys() ) edge_assignments[(tile_type, wire)].append(available_pins) for constant in yield_ties_to_wire(wire): tracks_model = channel_wires_to_tracks[ const_tracks[constant]] available_pins = set( tracks_model.get_tracks_for_wire_at_coord( (grid_x, grid_y) ).keys() ) edge_assignments[(tile_type, wire)].append(available_pins) def initialize_edge_assignments(db, conn): """ Create initial edge_assignments map. """ c = conn.cursor() c2 = conn.cursor() c.execute( """ SELECT name, pkey FROM tile_type WHERE pkey IN ( SELECT DISTINCT tile_type_pkey FROM tile );""" ) tiles = dict(c) edge_assignments = {} wires_in_tile_types = set() # First find out which tile types were split during VPR grid formation. # These tile types should not get edge assignments directly, instead # their sites will get edge assignements. sites_as_tiles = set() split_tile_types = set() for site_pkey, tile_type_pkey in c.execute(""" SELECT site_pkey, tile_type_pkey FROM site_as_tile; """): c2.execute( "SELECT name FROM tile_type WHERE pkey = ?", (tile_type_pkey, ) ) split_tile_types.add(c2.fetchone()[0]) c2.execute( """ SELECT name FROM site_type WHERE pkey = ( SELECT site_type_pkey FROM site WHERE pkey = ? );""", (site_pkey, ) ) site_type_name = c2.fetchone()[0] sites_as_tiles.add(site_type_name) # Initialize edge assignments for split tiles for site_type in sites_as_tiles: del tiles[site_type] site_obj = db.get_site_type(site_type) for site_pin in site_obj.get_site_pins(): key = (site_type, site_pin) assert key not in edge_assignments, key edge_assignments[key] = [] for tile_type in db.get_tile_types(): if tile_type not in tiles: continue del tiles[tile_type] # Skip tile types that are split tiles if tile_type in split_tile_types: continue (tile_type_pkey, ) = c.execute( """ SELECT pkey FROM tile_type WHERE name = ? """, (tile_type, ) ).fetchone() for (wire, ) in c.execute(""" SELECT name FROM wire_in_tile WHERE tile_type_pkey = ?""", (tile_type_pkey, )): wires_in_tile_types.add((tile_type, wire)) type_obj = db.get_tile_type(tile_type) for site in type_obj.get_sites(): for site_pin in site.site_pins: if site_pin.wire is None: continue # Skip if this wire is not in the database c.execute( """ SELECT pkey FROM wire_in_tile WHERE name = ? """, (site_pin.wire, ) ) if not c.fetchone(): continue key = (tile_type, site_pin.wire) assert key not in edge_assignments, key edge_assignments[key] = [] for tile_type, tile_pkey in tiles.items(): assert tile_type not in split_tile_types for (wire, ) in c.execute(""" SELECT name FROM wire_in_tile WHERE pkey in ( SELECT DISTINCT wire_in_tile_pkey FROM wire WHERE tile_pkey IN ( SELECT pkey FROM tile WHERE tile_type_pkey = ?) );""", (tile_pkey, )): wires_in_tile_types.add((tile_type, wire)) for (wire, ) in c.execute(""" SELECT DISTINCT name FROM wire_in_tile WHERE pkey in ( SELECT DISTINCT wire_in_tile_pkey FROM wire WHERE tile_pkey IN ( SELECT pkey FROM tile WHERE tile_type_pkey = ?) ) AND site_pin_pkey IS NOT NULL""", (tile_pkey, )): key = (tile_type, wire) assert key not in edge_assignments, key edge_assignments[key] = [] return edge_assignments, wires_in_tile_types def main(): parser = argparse.ArgumentParser() parser.add_argument( '--db_root', help='Project X-Ray Database', required=True ) parser.add_argument('--part', help='FPGA part', required=True) parser.add_argument( '--connection_database', help='Database of fabric connectivity', required=True ) parser.add_argument( '--pin_assignments', help=""" Output JSON assigning pins to tile types and direction connections""", required=True ) args = parser.parse_args() db = prjxray.db.Database(args.db_root, args.part) edge_assignments = {} with DatabaseCache(args.connection_database, read_only=True) as conn: c = conn.cursor() edge_assignments, wires_in_tile_types = initialize_edge_assignments( db, conn ) direct_connections = set() print('{} Processing direct connections.'.format(now())) handle_direction_connections( conn, direct_connections, edge_assignments ) wires_not_in_channels = {} c = conn.cursor() print('{} Processing non-channel nodes.'.format(now())) for node_pkey, classification in progressbar_utils.progressbar( c.execute(""" SELECT pkey, classification FROM node WHERE classification != ?; """, (NodeClassification.CHANNEL.value, ))): reason = NodeClassification(classification) for (tile_type, wire) in yield_logical_wire_info_from_node(conn, node_pkey): key = (tile_type, wire) # Sometimes nodes in particular tile instances are disconnected, # disregard classification changes if this is the case. if reason != NodeClassification.NULL: if key not in wires_not_in_channels: wires_not_in_channels[key] = reason else: other_reason = wires_not_in_channels[key] assert reason == other_reason, ( tile_type, wire, reason, other_reason ) if key in wires_in_tile_types: wires_in_tile_types.remove(key) # List of nodes that are channels. channel_nodes = [] # Map of (tile, wire) to track. This will be used to find channels for pips # that come from EDGES_TO_CHANNEL. channel_wires_to_tracks = {} # Generate track models and verify that wires are either in a channel # or not in a channel. print('{} Creating models from tracks.'.format(now())) for node_pkey, track_pkey in progressbar_utils.progressbar(c.execute( """ SELECT pkey, track_pkey FROM node WHERE classification = ?; """, (NodeClassification.CHANNEL.value, ))): assert track_pkey is not None tracks_model, _ = get_track_model(conn, track_pkey) channel_nodes.append(tracks_model) channel_wires_to_tracks[track_pkey] = tracks_model for (tile_type, wire) in yield_logical_wire_info_from_node(conn, node_pkey): key = (tile_type, wire) # Make sure all wires in channels always are in channels assert key not in wires_not_in_channels if key in wires_in_tile_types: wires_in_tile_types.remove(key) # Make sure all wires appear to have been assigned. if len(wires_in_tile_types) > 0: for tile_type, wire in sorted(wires_in_tile_types): print(tile_type, wire) assert len(wires_in_tile_types) == 0 # Verify that all tracks are sane. for node in channel_nodes: node.verify_tracks() null_tile_wires = set() # Verify that all nodes that are classified as edges to channels have at # least one site, and at least one live connection to a channel. # # If no live connections from the node are present, this node should've # been marked as NULL during channel formation. print('{} Handling edges to channels.'.format(now())) handle_edges_to_channels( conn, null_tile_wires, edge_assignments, channel_wires_to_tracks ) print('{} Processing edge assignments.'.format(now())) final_edge_assignments = {} for key, available_pins in progressbar_utils.progressbar( edge_assignments.items()): (tile_type, wire) = key available_pins = [pins for pins in available_pins if len(pins) > 0] if len(available_pins) == 0: if (tile_type, wire) not in null_tile_wires: # TODO: Figure out what is going on with these wires. Appear to # tile internal connections sometimes? print((tile_type, wire)) final_edge_assignments[key] = [tracks.Direction.RIGHT] continue pins = set(available_pins[0]) for p in available_pins[1:]: pins &= set(p) if len(pins) > 0: final_edge_assignments[key] = [list(pins)[0]] else: # More than 2 pins are required, final the minimal number of pins pins = set() for p in available_pins: pins |= set(p) while len(pins) > 2: pins = list(pins) prev_len = len(pins) for idx in range(len(pins)): pins_subset = list(pins) del pins_subset[idx] pins_subset = set(pins_subset) bad_subset = False for p in available_pins: if len(pins_subset & set(p)) == 0: bad_subset = True break if not bad_subset: pins = list(pins_subset) break # Failed to remove any pins, stop. if len(pins) == prev_len: break final_edge_assignments[key] = pins for key, available_pins in edge_assignments.items(): (tile_type, wire) = key pins = set(final_edge_assignments[key]) for required_pins in available_pins: if len(required_pins) == 0: continue assert len(pins & set(required_pins)) > 0, ( tile_type, wire, pins, required_pins, available_pins ) pin_directions = {} for key, pins in progressbar_utils.progressbar( final_edge_assignments.items()): (tile_type, wire) = key if tile_type not in pin_directions: pin_directions[tile_type] = {} pin_directions[tile_type][wire] = [pin._name_ for pin in pins] with open(args.pin_assignments, 'w') as f: json.dump( { 'pin_directions': pin_directions, 'direct_connections': [d._asdict() for d in direct_connections], }, f, indent=2 ) print( '{} Flushing database back to file "{}"'.format( now(), args.connection_database ) ) if __name__ == '__main__': main()
{ "repo_name": "SymbiFlow/symbiflow-arch-defs", "path": "xc/common/utils/prjxray_assign_tile_pin_direction.py", "copies": "1", "size": "21177", "license": "isc", "hash": 6079953087874920000, "line_mean": 31.6805555556, "line_max": 90, "alpha_frac": 0.5473390943, "autogenerated": false, "ratio": 4.008517887563884, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.00034867483153462327, "num_lines": 648 }
"""Assigns participants with the specified IDs to the organization. Usage: ./rdr_client/run_client.sh --project all-of-us-rdr-prod --account $USER@pmi-ops.org \ pairing_assigner.py file.csv --pairing [site|organization|awardee] \ [--dry_run] [--override_site] Where site = google_group, organization = external_id, awardee = name. The CSV contains lines with P12345678,NEW_ORGANIZATION (no headers necessary). e.g.: Example awardees: P11111111,AZ_TUCSON P22222222,AZ_TUCSON P99999999,PITT P00000000,PITT Example sites: P11111111,hpo-site-monroeville P22222222,hpo-site-phoenix P99999999,hpo-site-tucson P00000000,hpo-site-pitt """ import csv import logging import sys from rdr_service.main_util import configure_logging, get_parser from rdr_service.rdr_client.client import Client, HttpException, client_log def main(client): num_no_change = 0 num_updates = 0 num_errors = 0 pairing_list = ["site", "organization", "awardee"] pairing_key = client.args.pairing if client.args.pairing not in pairing_list: sys.exit("Pairing must be one of site|organization|awardee") with open(client.args.file) as csvfile: reader = csv.reader(csvfile) for line in reader: try: participant_id, new_pairing = [v.strip() for v in line] except ValueError as e: logging.error("Skipping invalid line %d (parsed as %r): %s.", reader.line_num, line, e) num_errors += 1 continue if not (new_pairing and participant_id): logging.warning( "Skipping invalid line %d: missing new_pairing (%r) or participant (%r).", reader.line_num, new_pairing, participant_id, ) num_errors += 1 continue if not participant_id.startswith("P"): logging.error( "Malformed participant ID from line %d: %r does not start with P.", reader.line_num, participant_id ) num_errors += 1 continue try: participant = client.request_json("Participant/%s" % participant_id) except HttpException as e: logging.error("Skipping %s: %s", participant_id, e) num_errors += 1 continue old_pairing = _get_old_pairing(participant, pairing_key) if new_pairing == old_pairing: num_no_change += 1 logging.info("%s unchanged (already %s)", participant_id, old_pairing) continue if not client.args.override_site: if participant.get("site") and participant["site"] != "UNSET": logging.info( "Skipping participant %s already paired with site %s" % (participant_id, participant["site"]) ) continue if client.args.no_awardee_change: if participant.get("awardee") and participant["awardee"] != "UNSET": if not new_pairing.startswith(participant["awardee"]): logging.info( "Skipping participant %s where pairing %s does not begin with old awardee %s" % (participant_id, new_pairing, participant["awardee"]) ) continue logging.info("%s %s => %s", participant_id, old_pairing, new_pairing) if new_pairing == "UNSET": for i in pairing_list: participant[i] = "UNSET" participant["providerLink"] = [] else: for i in pairing_list: del participant[i] participant[pairing_key] = new_pairing if client.args.dry_run: logging.info("Dry run, would update participant[%r] to %r.", pairing_key, new_pairing) else: client.request_json( "Participant/%s" % participant_id, "PUT", participant, headers={"If-Match": client.last_etag} ) num_updates += 1 logging.info( "%s %d participants, %d unchanged, %d errors.", "Would update" if client.args.dry_run else "Updated", num_updates, num_no_change, num_errors, ) def _get_old_pairing(participant, pairing_key): old_pairing = participant[pairing_key] if not old_pairing: return "UNSET" return old_pairing if __name__ == "__main__": configure_logging() client_log.setLevel(logging.WARN) # Suppress the log of HTTP requests. arg_parser = get_parser() arg_parser.add_argument("file", help="The name of file containing the list of HPOs and participant IDs") arg_parser.add_argument("--dry_run", action="store_true") arg_parser.add_argument( "--pairing", help="set level of pairing as one of" "[site|organization|awardee]", required=True ) arg_parser.add_argument( "--override_site", help="Update pairings on participants that have a site pairing already", action="store_true" ) arg_parser.add_argument( "--no_awardee_change", help="Do not re-pair participants if the awardee is changing; " + "just log that it happened", action="store_true", ) main(Client(parser=arg_parser))
{ "repo_name": "all-of-us/raw-data-repository", "path": "rdr_service/rdr_client/pairing_assigner.py", "copies": "1", "size": "5499", "license": "bsd-3-clause", "hash": -593524076951328000, "line_mean": 36.924137931, "line_max": 119, "alpha_frac": 0.5713766139, "autogenerated": false, "ratio": 3.8725352112676057, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4943911825167606, "avg_score": null, "num_lines": null }
"""Assigns secondary IP to NIC""" from baseCmd import * from baseResponse import * class addIpToNicCmd (baseCmd): typeInfo = {} def __init__(self): self.isAsync = "true" """the ID of the nic to which you want to assign private IP""" """Required""" self.nicid = None self.typeInfo['nicid'] = 'uuid' """Secondary IP Address""" self.ipaddress = None self.typeInfo['ipaddress'] = 'string' self.required = ["nicid", ] class addIpToNicResponse (baseResponse): typeInfo = {} def __init__(self): """the ID of the secondary private IP addr""" self.id = None self.typeInfo['id'] = 'string' """Secondary IP address""" self.ipaddress = None self.typeInfo['ipaddress'] = 'string' """the ID of the network""" self.networkid = None self.typeInfo['networkid'] = 'string' """the ID of the nic""" self.nicid = None self.typeInfo['nicid'] = 'string' """the ID of the vm""" self.virtualmachineid = None self.typeInfo['virtualmachineid'] = 'string'
{ "repo_name": "MissionCriticalCloud/marvin", "path": "marvin/cloudstackAPI/addIpToNic.py", "copies": "1", "size": "1143", "license": "apache-2.0", "hash": 1970093816412129300, "line_mean": 27.575, "line_max": 70, "alpha_frac": 0.5634295713, "autogenerated": false, "ratio": 3.901023890784983, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.49644534620849834, "avg_score": null, "num_lines": null }
"""Assigns subnets to a given ACL.""" # :license: MIT, see LICENSE for more details.ß import click import SoftLayer from SoftLayer.CLI import environment from SoftLayer.CLI import exceptions @click.command() @click.argument('host_id') @click.option('--subnet-id', '-s', multiple=True, required=True, help='The id of one SoftLayer_Hardware to authorize') @environment.pass_env def cli(env, host_id, subnet_id): """Assigns subnets to a given host""" block_manager = SoftLayer.BlockStorageManager(env.client) subnet_id_list = list(subnet_id) click.echo('Test to spit out %s' % subnet_id_list[0]) click.echo('\n Test to spit out 2 %s' % type(subnet_id)) #print out the subnet input to find out what's happening here :O #Add a check in case the subnet_id_list is empty result = block_manager.assign_subnets_to_acl(host_id, subnet_id_list) env.fout(result) # If no exception was raised, the command succeeded click.echo('Desired subnets added to host with id: %s' % host_id)
{ "repo_name": "kyubifire/softlayer-python", "path": "SoftLayer/CLI/block/acl/subnet_add.py", "copies": "1", "size": "1037", "license": "mit", "hash": -3065124898964429000, "line_mean": 32.4193548387, "line_max": 73, "alpha_frac": 0.6978764479, "autogenerated": false, "ratio": 3.4304635761589406, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9622959269518377, "avg_score": 0.00107615090811259, "num_lines": 31 }
"""Assigns virtual machine or a list of virtual machines to a load balancer rule.""" from baseCmd import * from baseResponse import * class assignToLoadBalancerRuleCmd (baseCmd): typeInfo = {} def __init__(self): self.isAsync = "true" """the ID of the load balancer rule""" """Required""" self.id = None self.typeInfo['id'] = 'uuid' """the list of IDs of the virtual machine that are being assigned to the load balancer rule(i.e. virtualMachineIds=1,2,3)""" self.virtualmachineids = [] self.typeInfo['virtualmachineids'] = 'list' """VM ID and IP map, vmidipmap[0].vmid=1 vmidipmap[0].ip=10.1.1.75""" self.vmidipmap = [] self.typeInfo['vmidipmap'] = 'map' self.required = ["id", ] class assignToLoadBalancerRuleResponse (baseResponse): typeInfo = {} def __init__(self): """any text associated with the success or failure""" self.displaytext = None self.typeInfo['displaytext'] = 'string' """true if operation is executed successfully""" self.success = None self.typeInfo['success'] = 'boolean'
{ "repo_name": "MissionCriticalCloud/marvin", "path": "marvin/cloudstackAPI/assignToLoadBalancerRule.py", "copies": "1", "size": "1160", "license": "apache-2.0", "hash": 2957108299332438000, "line_mean": 33.1176470588, "line_max": 132, "alpha_frac": 0.6172413793, "autogenerated": false, "ratio": 3.8410596026490067, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9808406349173295, "avg_score": 0.02997892655514218, "num_lines": 34 }
'''assign_uniq_master - assign a unique master to each peptide across all samples using a maximum parsimony approach ===================================================================== :Author: Tom Smith, Manasa Ramakrishna :Release: $Id$ :Date: |today| :Tags: Python Proteomics Peptide-assignment Purpose ------- This script takes the xlsx output from a set of input files (*.txt/*.xlsx) and annotates the table with unique protein information for downstream analyses. The following columns are added: - master_protein(s): The master protein(s) for the peptide. See below for how this is derived - master_uniprot_id(s): The uniprot id(s) for the master protein(s) - protein_description(s): Description(s) for the master protein(s) - protein_length(s): The length(s) of the master protein(s) - crap_protein: Is the protein in the cRAP database of common proteomics proteins, e.g keratin If a log file is requested (--log), basic statistics are collected and written to the log file Fasta description format ------------------------ The source of the protein (SwissProt or TrEMBL) is derived from the protein fasta description, with SwissProt proteins starting 'sp' and TrEMBL 'tr'. Furthermore, the description column is derived from the fasta description too. For this reason the fasta databases must be correctly formatted as in the examples below. This is the standard format for fasta files from uniprot. format: Three-level identifier followed by protein description: >[sp|tr]|[Uniprot id]|[Protein name] [Description] examples: >sp|P27361|MK03_HUMAN Mitogen-activated protein kinase 3 OS=Homo sapiens GN=MAPK3 PE=1 SV=4 >tr|F8W1T5|F8W1T5_HUMAN GTPase RhebL1 (Fragment) OS=Homo sapiens GN=RHEBL1 PE=4 SV=1 Deriving master proteins ---------------------------- Matching peptides to their source proteins (protein inference) is a common task in proteomics and there are many possible approaches. Ultimately, the aim is usually to identify the most likely source protein since taking all possible sources makes downstream analyses very complex. Here we use the parsimonious approach to identify a minimal set of proteins which explains all peptides observed. In essense, the approach is as follows: - start with list of all peptides - sort proteins by the number of peptides observed - take the protein(s) with the most peptides and remove these from the peptides list - continue through the sorted proteins, removing peptides, until the peptides list is empty Additionally, we prioritise matches to SwissProt proteins over TrEMBL proteins. SwissProt proteins have been manually curated and should not contain any redundant proteins, truncated sequences, miss-annotations etc. On the other hand, the set of TrEMBL proteins will ceratainly contain proteins which are redundant with respect to the SwissProt proteins as well as truncated and just plain wrong(!) proteins. It is useful to include the TrEMBL proteins to catch peptides which are from a protein or isoform which has not been curated into SwissProt yet. However, where a SwissProt match is found, we believe it is preferable to ignore any TrEMBL match. Here, for all peptides with matched to both SwissProt and TrEMBL proteins, we remove all the TrEMBL matches. In some instances, it is not possible to assign a single protein to a peptide. In these cases, the proteins names, uniprot ids, descriptions and lengths are ';' separated in the outfile. In addition to the conditions above, In some cases we are looking for master proteins that are consistent across a set of samples. This is to ensure that for a given set of peptides, the same master protein is assigned to all samples. Usage ----- By default, the outfile will be created in the same directory with the suffix annotated.xlsx. You can change the outfile name by specifying the option --outfile python assign_uniq_master.py --infile=RNP.xlsx --fasta-db=Human_201701.fasta --fasta-crap-db=cRAP_FullIdentifiers.fasta --outfile=master_prot_annotated.txt --logfile=master_prot_annot.log Command line options -------------------- ''' #!/usr/bin/env python3 import argparse import collections import copy import errno import glob import os import pandas as pd import re import sys import proteomics.fasta as fasta def writeSectionHeader(logfile, section_header): #underliner = "".join(("-",)*len(section_header)) section_blocker = ("=======================================" "=======================================") underliner1 = ("----------------------------------------" "----------------------------------------") logfile.write("\n%s\n%s\n" % (section_blocker, section_header)) logfile.write("%s\n" % underliner1) return section_blocker ####################### ------------------------- ############################## def main(argv=sys.argv): parser = argparse.ArgumentParser(argv, usage=__doc__) optional = parser.add_argument_group('optional arguments') required = parser.add_argument_group('required arguments') required.add_argument('-i', '--infile', dest="infile", required=True, nargs='+', help=("Provide a single file or folder with " "multiple files for processing")) required.add_argument('-f', '--fasta-db', dest="fasta_db", required=True, help=("Input a fasta file for all proteins in " "the species of interest")) required.add_argument('-fc', '--fasta-crap-db', dest="fasta_crap_db", required=True, help=("Input a fasta file for all proteins that " "are common contaminants in a mass-spec " "experiment")) optional.add_argument('-o', '--outfile', dest="outfile", default=None, help=("Enter a file name for your output. Else " "it will be the same as your input with " "the suffix 'annotated'")) optional.add_argument('-l', '--logfile', dest="logfile", default=os.devnull, help=("Enter a file name for logging program " "output. Else, nothing will be printed")) args = vars(parser.parse_args()) # creating an output file if no name is provided #if args['outfile'] is None: # args['outfile'] = args.pop('infile')[1].replace(".txt", "_annotated.txt") # create a log file (os.devnull by default) logfile = open(args['logfile'], 'w') logfile.write("\nLogfile for assign_uniq_master.py\n") section_blocker = writeSectionHeader(logfile, "Script arguments:") # Outputs each input parameter and user defined value for key, value in args.items(): logfile.write("%s: %s\n" % (key, value)) logfile.write("%s\n\n" % section_blocker) #------------------------------------------------------------ # (1.0) Build dictionaries to map from protein id to protein # sequence and description using the fasta database # protein2seq : key = protein ID, value = sequence # crap_proteins : set with IDs of contaminant proteins #------------------------------------------------------------ crap_proteins = set() protein2description = { entry.title.split(" ")[0]: " ".join(entry.title.split(" ")[1:]) for entry in fasta.FastaIterator(open(args['fasta_db']))} protein2seq = { entry.title.split(" ")[0]:entry.sequence for entry in fasta.FastaIterator(open(args['fasta_db']))} for entry in fasta.FastaIterator(open(args['fasta_crap_db'])): protein2seq[entry.title.split(" ")[0]] = entry.sequence protein2description[entry.title.split(" ")[0]] = entry.title.split(" ")[0] crap_proteins.add(entry.title.split(" ")[0]) #------------------------------------------------ # read each infile into a dataframe and parse rows #------------------------------------------------ for f in args['infile']: fn = f.split(".") if fn[1] == "xlsx": #print("It is an excel file") rnp_df = pd.read_excel(f) #print(f, rnp_df.shape) elif fn[1] in ["text", "txt"]: #print("It is a txt file") rnp_df = pd.read_table(f,sep='\t',comment=None) #print(f, rnp_df.shape) else: raise ValueError("File type must one of .xlsx, .txt, .text") new_protein_sp_ids = set() new_protein_tr_ids = set() # Loop through each peptide and find all SwissProt and trEMBL # proteins that they map to for ix, row in rnp_df.iterrows(): peptide = row['Sequence'] # For each peptide, find all the protein IDs in which it is a perfect match l = [k for k in protein2seq.keys() if peptide in protein2seq[k]] # Collect all the matches that were "Swissprot" matches l_sp = [p for p in l if "sp|" in p] # Extract just the protein ids from the header line sp_id = [i.split("|")[1] for i in l_sp] # Join the protein ids into a string a print join_sp_id = ";".join(sp_id) #print("SwissPROT:",join_sp_id) # Collect all the lower confidence "TremBL" matches l_tr = [ p for p in l if "tr|" in p] tr_id = [i.split("|")[1] for i in l_tr] join_tr_id = ";".join(tr_id) #print("Trembl : ",join_tr_id) # We dont have anything other than SwissProt or trEMBL ids so no need to look for anything else # (1.5.1) does peptide match a cRAP protein? crap = 0 for protein in sp_id: if protein in crap_proteins: crap = 1 break crap_protein.update(crap new_protein_sp_ids.update(sp_id) new_protein_tr_ids.updates(tr_id) rnp_df['new_protein_sp_ids'] = new_protein_sp_ids rnp_df['new_protein_tr_ids'] = new_protein_tr_ids rnp_df['crap_protein'] = crap_protein # Now that we have matched each peptide to a protein using the # FASTA and cRAP fasta files We want to use the maximum # parsimony approach so that a set of peptides arising from # the same protein map back uniquely to that protein. We have # found that some lower quality peptides map to isoforms of # lower quality stored in trEMBL so we only choose those that # are in SwissProt database. We need to go through the # parsimonious approach with Swissprot entries as top # priorities. If this does not return a protein, then we go # to trEMBL and repeat the exercise #(1) Get the mappings between peptide and proteins pep2pro = collections.defaultdict(lambda: collections.defaultdict(set)) pep2allpro = collections.defaultdict(set) pro2pep = collections.defaultdict(set) initial_proteins = set() # (1.1) extract the initial mappings between proteins and peptides for row_ix, row_values in rnp_df[['new_protein_sp_ids', 'Sequence']].iterrows(): proteins = row_values['new_protein_sp_ids'].split(";") peptide = row_values['Sequence'] if peptide in pep2pro: assert pep2allpro[peptide] == proteins, ( "The same peptide is observed more than once with different proteins!") pep2allpro[peptide] = proteins for protein in proteins: initial_proteins.add(protein) pro2pep[protein].add(peptide) pep2pro[peptide][1].add(protein) #print(pep2pro.keys()) #print(pro2pep) #print(initial_proteins) section_blocker = writeSectionHeader(logfile, "Initial file stats") logfile.write("# initial peptides: %i\n" % len(pep2pro)) logfile.write("# initial proteins: %i\n" % len(pro2pep)) logfile.write("%s\n\n" % section_blocker) # (1.3) Use a parsimonious approach to identifty the minimum number # of proteins required to cover all the peptides: # Start from the protein(s) with the most peptides and mark these as covered. # Continue with remaining proteins in order of peptides per protein # until all peptides are covered retained_proteins = [] peptides = copy.deepcopy(set(pep2pro.keys())) peptide_counts = {} tmppro2pep = copy.deepcopy(pro2pep) new_top_level_proteins = copy.deepcopy(initial_proteins) new_pep2pro = collections.defaultdict(set) peptide_count = max(map(len, tmppro2pep.values())) #print(tmppro2pep) #print(new_top_level_proteins) #print(new_pep2pro) print("max peptide count is ", peptide_count) section_blocker = writeSectionHeader( logfile, ("Parsimonious method to identify minimal set of proteins" " to account for all peptides")) while True: # (1.3.1) If all peptides covered or the maximum peptides per # protein = 0, break. if len(peptides) == 0 or peptide_count == 0: logfile.write("All peptides are now accounted for\n") break peptide_count -= 1 top_proteins = set() top_score = 0 #(1.3.2) Find the proteins with the highest number of peptide matches # Iterate through to find the protein with most number of matches. # top_starts off at 0. Then add a protein, then go to the next one and see if it is higher. If yes, make this the new value. If no, continue. for protein in new_top_level_proteins: if len(tmppro2pep[protein]) == top_score: top_proteins.add(protein) elif len(tmppro2pep[protein]) > top_score: top_score = len(tmppro2pep[protein]) top_proteins = set((protein,)) logfile.write("%i remaining protein(s) with %i peptides\n" % ( len(top_proteins), top_score)) ''' # (1.3.3) Remove the top proteins and the associated peptides for top_protein in top_proteins: new_top_level_proteins.remove(top_protein) retained_proteins.append(top_protein) for peptide in pro2pep[top_protein]: new_pep2pro[peptide].add(top_protein) if peptide in peptides: peptides.remove(peptide) for protein in pep2pro[peptide][1]: if protein == top_protein: continue if peptide in tmppro2pep[protein]: tmppro2pep[protein].remove(peptide) logfile.write("\n%i proteins retained\n" % len(retained_proteins)) #logfile.write("\n".join([",".join(map(str, (x, len(tmppro2pep[x]), len(pro2pep[x])))) # for x in new_top_level_proteins])) logfile.write("%i SwissProt proteins retained\n" % len( [x for x in retained_proteins if x.split("|")[0] == 'sp'])) logfile.write("\nNote: If not all SwissProt proteins were retained, this means\n" "these proteins only included peptides which were observed\n" "in other proteins which had a greater number of peptides\n") logfile.write("%s\n\n" % section_blocker) section_blocker = writeSectionHeader(logfile, "proteins per peptide:") counts = collections.Counter([len(x) for x in new_pep2pro.values()]) sum_counts = sum(counts.values()) for k, v in counts.items(): logfile.write("%i peptide(s) (%.2f %%) have %i master protein(s)\n" % ( v, (100 * v)/sum_counts, k)) logfile.write("%s\n\n" % section_blocker) # Check all the peptides are covered assert set(pep2pro.keys()).difference(set(new_pep2pro.keys())) == set() # add the top protein and uniprot id annotations rnp_df['master_protein(s)'] = [";".join(new_pep2pro[protein]) for protein in rnp_df['new_protein_sp_ids']] rnp_df.to_tsv(args['outfile'], index=False, sep="\t") os.chmod(args['outfile'], 0o666) ''' if __name__ == "__main__": sys.exit(main(sys.argv))
{ "repo_name": "TomSmithCGAT/CamProt", "path": "camprot/scripts/assign_uniq_master.py", "copies": "1", "size": "17343", "license": "mit", "hash": -1992384570623952100, "line_mean": 38.8689655172, "line_max": 157, "alpha_frac": 0.578619616, "autogenerated": false, "ratio": 3.8798657718120806, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.995848538781208, "avg_score": 0, "num_lines": 435 }
"""Assign User role to all existing users. Revision ID: 5b33357784a Revises: 1afc3824d35b Create Date: 2013-09-20 15:42:01.558543 """ # revision identifiers, used by Alembic. revision = '5b33357784a' down_revision = '1afc3824d35b' import sqlalchemy as sa from alembic import op from datetime import datetime from sqlalchemy.sql import table, column, select person_table = table('people', column('id', sa.Integer), ) role_table = table('roles', column('id', sa.Integer), column('name', sa.String), ) user_roles_table = table('user_roles', column('id', sa.Integer), column('role_id', sa.Integer), column('person_id', sa.Integer), column('context_id', sa.Integer), column('modified_by_id', sa.Integer), column('created_at', sa.DateTime), column('updated_at', sa.DateTime), ) def upgrade(): users = select([person_table.c.id]) object_editor = select([role_table.c.id])\ .where(role_table.c.name == 'ObjectEditor')\ .limit(1) program_creator = select([role_table.c.id])\ .where(role_table.c.name == 'ProgramCreator')\ .limit(1) #FIXME this could be done better in a more recent version of sqlalchemy #once 0.8.3 is released #op.execute(user_roles_table.insert()\ #.from_select(['user_id'], users)\ #.from_select(['role_id'], role)\ #.values(context_id=None,)) #FIXME workaround until we can do the proper static generation of the sql #statement connection = op.get_bind() users = connection.execute(users).fetchall() object_editor = connection.execute(object_editor).fetchone() program_creator = connection.execute(program_creator).fetchone() current_datetime = datetime.now() for user in users: op.execute(user_roles_table.insert().values( person_id=user['id'], role_id=object_editor['id'], context_id=None, created_at=current_datetime, updated_at=current_datetime, )) op.execute(user_roles_table.insert().values( person_id=user['id'], role_id=program_creator['id'], context_id=None, created_at=current_datetime, updated_at=current_datetime, )) def downgrade(): '''Intentionally does nothing as we can't distinguish between migration added assignments and not. ''' pass
{ "repo_name": "vladan-m/ggrc-core", "path": "src/ggrc_basic_permissions/migrations/versions/20130920154201_5b33357784a_assign_user_role_to_.py", "copies": "2", "size": "2286", "license": "apache-2.0", "hash": -254989722260366850, "line_mean": 27.9367088608, "line_max": 75, "alpha_frac": 0.6675415573, "autogenerated": false, "ratio": 3.4170403587443947, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5084581916044395, "avg_score": null, "num_lines": null }
#Assign variables import pygame,random from pygame.locals import * clock = pygame.time.Clock() pygame.init() width=680 height=420 up=False down=False left=False right=False score=0 g=False running=True pieceSize=50 y=(height/2)-(pieceSize/2) x=(width/2)-(pieceSize/2) dir_y=0 dir_x=0 qwerty=0 strobeRegulation=0 blue=0,0,225 red=255,0,0 black=0,0,0 green=0,225,0,100 white=255,255,255 yellow=255,255,0 bgcolor=False bgCode=5 colorCode=1 strobe=False pygame.display.set_caption('Squares Game') go=False bounce=False portals=False eaten=False lives=5 redFlash=6 event = pygame.event.poll() onX=False onY=False mouseControl=False mouseClick=False speedLimit=True winner=False bounceWithoutFood=0 def getBgColor(): global bgCode global redFlash if redFlash<6: return red if bgCode==1: bgcolor=white elif bgCode==2: bgcolor=red elif bgCode==3: bgcolor=green elif bgCode==4: bgcolor=blue elif bgCode==5: bgcolor=black elif bgCode==6: bgcolor=yellow else: bgCode=1 return getBgColor() return bgcolor def getPieceColor(): global colorCode if colorCode==1: pieceColor=white elif colorCode==2: pieceColor=red elif colorCode==3: pieceColor=green elif colorCode==4: pieceColor=blue elif colorCode==5: pieceColor=black elif colorCode==6: pieceColor=yellow else: colorCode=1 return getPieceColor() return pieceColor def isOut(): global y global x global width global height global pieceSize if y<1: y=0 if x<0: x=0 if (y+pieceSize)>height: y=height-pieceSize if x+pieceSize>width: x=width-pieceSize def tooBig(): global pieceSize global height global width if pieceSize<10: pieceSize=10 elif pieceSize>width or pieceSize>height: if height>width: pieceSize=width else: pieceSize=height def movePiece(): global x global y global dir_x global dir_y x += dir_x y += dir_y def getRandomLocation(): return {'x': random.randint(0, width - 11), 'y': random.randint(31, height - 11)} food=getRandomLocation() while running: xcol=False ycol=False if bounceWithoutFood==100: x=food['x'] y=food['y'] screen = pygame.display.set_mode((width,height)) isOut() tooBig() pieceColor=getPieceColor() bgcolor=getBgColor() #Fill and print screen.fill(bgcolor) if not winner: pygame.draw.rect(screen,pieceColor,(x,y,(pieceSize),(pieceSize))) piece=(x,y,pieceSize,pieceSize) if speedLimit: if dir_x>50 or dir_y>50: dir_y=0 dir_x=0 if score<0: screen.fill((red)) font=pygame.font.Font(None,100) text = font.render('GAME OVER', True, (black), (red)) textRect = text.get_rect() textRect.centerx = screen.get_rect().centerx textRect.centery = screen.get_rect().centery screen.blit(text, textRect) pygame.display.update() break elif score>999: winner=True if winner: screen.fill(bgcolor) font=pygame.font.Font(None,100) text = font.render('YOU WIN!!!', True, (pieceColor), (bgcolor)) textRect = text.get_rect() textRect.centerx = screen.get_rect().centerx textRect.centery = screen.get_rect().centery screen.blit(text, textRect) pygame.display.update() strobe=True if go or g and pieceSize>(height-50): pieceSize=10 if winner: food=False elif eaten: food=getRandomLocation() pieceSize+=10 score+=1 bounceWithoutFood=0 if not score%25: lives +=1 if pieceSize==height or pieceSize==width: pieceSize=10 if y<1: y=1 if x<0: x=1 if (y+pieceSize)>height: y=height-pieceSize-1 if x+pieceSize>width: x=width-pieceSize-1 eaten=False if food: if go or g and not winner: pygame.draw.rect(screen,pieceColor,(food['x'],food['y'],(10),(10))) for px in range (x, (x+pieceSize)): if px>=food['x'] and px<=food['x']+10: xcol=True for px in range (y, (y+pieceSize)): if px>=food['y'] and px<=food['y']+10: ycol=True if xcol and ycol and g or go: eaten=True font=pygame.font.Font(None,30) print fps=(str(clock.get_fps())[0:5]) if not winner: text = font.render('%i, %i %s %s %s %s %s%s %s %s %s %s %s' % (x,y,fps,'fps','W' if go else '', 'B' if bounce else '','G' if g and not go else '','C' if mouseClick else '','M' if mouseControl or mouseClick else '','Score:'if g or go else '',score if g or go else '', 'Lives' if go else '',lives if go else ''), True, pieceColor, bgcolor) textRect = text.get_rect() screen.blit(text, textRect) if colorCode==bgCode: colorCode+=1 bgColor=getBgColor() movePiece() # On wall related actions if x <= 0 or x >= (width-pieceSize): onWall=True if go: lives-=1 dir_x*=-1 redFlash=0 bgColor=red if lives==0: screen.fill((red)) font=pygame.font.Font(None,100) text = font.render('GAME OVER', True, (black), (red)) textRect = text.get_rect() textRect.centerx = screen.get_rect().centerx textRect.centery = screen.get_rect().centery screen.blit(text, textRect) pygame.display.update() break elif bounce and dir_x: dir_x*=-1 bgCode+=1 bounceWithoutFood+=1 elif bounce and not dir_x and x==0: dir_x=1 elif bounce and not dir_x and x+pieceSize==width: dir_x=-1 else: dir_x=0 if y <= 0 or y >= (height-pieceSize): onWall=True dir_yb=dir_y if go: lives-=1 dir_y*=-1 redFlash=0 if lives==0: screen.fill((red)) font=pygame.font.Font(None,100) text = font.render('GAME OVER', True, (black), (red)) textRect = text.get_rect() textRect.centerx = screen.get_rect().centerx textRect.centery = screen.get_rect().centery screen.blit(text, textRect) pygame.display.update() break elif bounce and dir_y: dir_y*=-1 colorCode+=1 bounceWithoutFood+=1 elif bounce and not dir_y and y==0: dir_y=1 elif bounce and not dir_y and y+pieceSize==height: dir_y=-1 #Get actions for event in pygame.event.get(): if event.type == pygame.QUIT: running = False elif event.type == pygame.KEYDOWN: if event.key == pygame.K_UP: up=True elif event.key == pygame.K_DOWN: down=True elif event.key == pygame.K_LEFT: left=True elif event.key == pygame.K_RIGHT: right=True elif event.key== pygame.K_s: dir_x=0 dir_y=0 elif event.key== pygame.K_SPACE: if not strobe: strobe=True else: strobe=False elif event.key==pygame.K_c: colorCode+=1 bgCode+=1 elif event.key==pygame.K_ESCAPE: running=False elif event.key==pygame.K_x: colorCode+=1 elif event.key==pygame.K_z: bgCode+=1 elif event.key==pygame.K_EQUALS and not go: pieceSize+=10 elif event.key==pygame.K_MINUS and not go: pieceSize-=10 elif event.key==pygame.K_w: if not go: go=True pieceSize=10 score=0 else: go=False elif event.key==pygame.K_b: if bounce: bounce=False else: bounce=True elif event.key==pygame.K_p: if portals: portals=False else: portals=True elif event.key==pygame.K_g: if g: g=0 pieceSize=10 else: g=True pieceSize=10 elif event.key==pygame.K_m: if mouseControl: mouseControl=False mouseClick=True elif mouseClick: mouseClick=False else: mouseControl=True elif event.key==pygame.K_l: if speedLimit: speedLimit=False else: speedLimit=True elif event.type == pygame.MOUSEMOTION: click_x=event.pos[0] click_y=event.pos[1] if mouseControl: if click_x<x: dir_x-=1 elif click_x>(x+pieceSize): dir_x+=1 if click_y<y: dir_y-=1 elif click_y>(y+pieceSize): dir_y+=1 elif event.type==pygame.MOUSEBUTTONUP: click_x=event.pos[0] click_y=event.pos[1] if mouseClick: if click_x<x: dir_x-=1 elif click_x>(x+pieceSize): dir_x+=1 if click_y<y: dir_y-=1 elif click_y>(y+pieceSize): dir_y+=1 if down: dir_y+=2 if up: dir_y-=2 if left: dir_x-=2 if right: dir_x+=2 up=False down=False left=False right=False strobeRegulation+=1 redFlash+=1 if strobe and not strobeRegulation%40: bgCode+=1 colorCode+=2 pygame.display.update() pygame.display.flip() clock.tick(31)
{ "repo_name": "nocturnalbadgr/SquaresGame", "path": "Squares.py", "copies": "1", "size": "11456", "license": "apache-2.0", "hash": -7086733680725893000, "line_mean": 28.4734042553, "line_max": 349, "alpha_frac": 0.4653456704, "autogenerated": false, "ratio": 3.938123066345823, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9724295701205157, "avg_score": 0.03583460710813338, "num_lines": 376 }
"""Assimp-based analyzer.""" from __future__ import absolute_import import os import logging import subprocess import pyassimp from damn_at import ( mimetypes, MetaDataType, MetaDataValue, FileId, FileDescription, AssetDescription, AssetId ) from damn_at.pluginmanager import IAnalyzer from six.moves import map from io import open LOG = logging.getLogger(__name__) def get_assimp_types(): """Extract all possible formats and store their mime types""" # TODO: not exactly reliable, a lot of unknown mimetypes # for those extensions :/ try: pro = subprocess.Popen( ['assimp', 'listext'], stdout=subprocess.PIPE, stderr=subprocess.PIPE ) out, err = pro.communicate() if pro.returncode != 0: LOG.debug("'assimp listext' failed with error code %d! " % pro.returncode, out, err ) return [] except OSError as oserror: LOG.debug("'assimp listext' failed! %s", oserror) return [] extensions = out.split(';') mimes = [] for ext in extensions: mime = mimetypes.guess_type('file.' + ext, False)[0] LOG.info('Mimetype Info:\n\tExtension: %s\n\tMime: %s', ext, mime) mimes.append(mime) return mimes class AssimpAnalyzer(IAnalyzer): """Assimp-based analyzer.""" handled_types = ['application/wavefront-obj', 'application/fbx'] def __init__(self): IAnalyzer.__init__(self) def activate(self): pass def analyze(self, an_uri): fileid = FileId(filename=os.path.abspath(an_uri)) file_descr = FileDescription(file=fileid) file_descr.assets = [] assimp_mimetype = 'application/assimp' scene = None try: scene = pyassimp.load(an_uri) textures = {} materials = {} from damn_at.analyzers.mesh.metadata import ( MetaDataAssimpTexture, MetaDataAssimpMesh ) for i, texture in enumerate(scene.textures): name = texture.name if texture.name else 'texture-'+str(i) asset_descr = AssetDescription(asset=AssetId( subname=name, mimetype=assimp_mimetype + ".texture", file=fileid )) asset_descr.metadata = MetaDataAssimpTexture.extract(texture) file_descr.assets.append(asset_descr) textures[i] = asset_descr for i, material in enumerate(scene.materials): properties = {} for key, value in material.properties.items(): properties[key] = value name = properties.get('name', 'material-'+str(i)) asset_descr = AssetDescription(asset=AssetId( subname=name, mimetype=assimp_mimetype + ".material", file=fileid )) asset_descr.metadata = {} for key, value in properties.items(): if key == 'name' or key == 'file': continue asset_descr.metadata[key] = MetaDataValue( type=MetaDataType.STRING, string_value=str(value) ) file_descr.assets.append(asset_descr) materials[i] = asset_descr for i, mesh in enumerate(scene.meshes): name = mesh.name if mesh.name else 'mesh-' + str(i) asset_descr = AssetDescription(asset=AssetId( subname=name, mimetype=assimp_mimetype + ".mesh", file=fileid )) asset_descr.metadata = MetaDataAssimpMesh.extract(mesh) asset_descr.dependencies = [] # Dependencies if mesh.materialindex is not None: if mesh.materialindex in materials: asset_descr.dependencies.append( materials[mesh.materialindex].asset ) file_descr.assets.append(asset_descr) finally: pyassimp.release(scene) ''' obj = Loader(an_uri) from damn_at.analyzers.mesh.metadata import ( MetaDataWaveFrontDefault, MetaDataWaveFrontGroup ) d_asset_descr = AssetDescription(asset=AssetId( subname='default', mimetype="application/wavefront-obj", file=fileid )) d_asset_descr.metadata = MetaDataWaveFrontDefault.extract(obj) file_descr.assets.append(d_asset_descr) for name, group in obj.groups.items(): if name != 'default': asset_descr = AssetDescription(asset=AssetId( subname=name, mimetype="application/wavefront-obj.group", file=fileid )) asset_descr.metadata = MetaDataWaveFrontGroup.extract(group) asset_descr.dependencies = [d_asset_descr.asset] file_descr.assets.append(asset_descr)''' return file_descr class Loader(object): def __init__(self, path): vertices = [] normals = [] texcoords = [] default = {'faces': []} current = default self.groups = {'default': default} for line in open(path, "r"): if line.startswith('#'): continue values = line.split() if not values: continue if values[0] == 'g': current = {'faces': []} group_name = values[1] LOG.info("Group:\n%s\n%s", group_name, values) self.groups[group_name] = current elif values[0] == 'v': vertices.append(tuple(map(float, values[1:4]))) elif values[0] == 'vn': normals.append(tuple(map(float, values[1:4]))) elif values[0] == 'vt': texcoords.append(tuple(map(float, values[1:3]))) elif values[0] == 's': current['smooth'] = bool(values[2:3]) elif values[0] == 'f': faces = current['faces'] face = [] for v in values[1:]: w = [int(x) if x else None for x in v.split('/')] w = [x-1 if x is not None and x > 0 else x for x in w] face.append(tuple(w)) faces.append(tuple(face)) else: LOG.info('Loader value not known: %s - %s' % (values[0], line)) # save result self.vertices = vertices self.normals = normals self.texcoords = texcoords
{ "repo_name": "peragro/peragro-at", "path": "src/damn_at/analyzers/mesh/analyzer_assimp.py", "copies": "1", "size": "7008", "license": "bsd-3-clause", "hash": 8537190452146612000, "line_mean": 32.6923076923, "line_max": 79, "alpha_frac": 0.508847032, "autogenerated": false, "ratio": 4.44106463878327, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0, "num_lines": 208 }
# Assisting definisions for the pong game # By Anders Busch 2014 import pygame global FUN_COUNTER,RED,GREEN,BLUE,WHITE,BLACK,HORIZONTAL,VERTICAL,LEFT_PLAYER,RIGHT_PLAYER,MAXIMUM_BALL_SPEED,MINIMUM_BALL_SPEED global MAIN_MENU,START_SCREEN,GAME_SCREEN_TIME,GAME_SCREEN_GOAL,GAME_SCREEN_INF,COUNTER_PANEL_HEIGHT,COUNTER_UPPER_LIMIT,COUNTER_LOWER_LIMIT global DEFAULT_BALL_SPEED,DEFAULT_BALL_RADIUS,DEFAULT_BAT_WIDTH,DEFAULT_BAT_LENGTH,DEFAULT_FPS_RATE,DEFAULT_CENTERLINE_WIDTH global Ball,Bat,MUSIC_LOOP,MUSIC_VOLUME,SOUND_EFFECT_VOLUME # some constants RED = pygame.Color(255,0,0) GREEN = pygame.Color(0,255,0) BLUE = pygame.Color(0,0,255) WHITE = pygame.Color(255,255,255) BLACK = pygame.Color(0,0,0) HORIZONTAL = 0 VERTICAL = 1 LEFT_PLAYER = 0 RIGHT_PLAYER = 1 START_SCREEN = 0 MAIN_MENU = 1 GAME_MODE_TIME = 1 GAME_MODE_GOAL = 2 GAME_MODE_INF = 3 FUN_COUNTER = 4 MENU_SPACING = 20 MUSIC_LOOP = 1 MUSIC_VOLUME = 1 SOUND_EFFECT_VOLUME = 1 MAXIMUM_BALL_SPEED = 9 MINIMUM_BALL_SPEED = -MAXIMUM_BALL_SPEED DEFAULT_BALL_SPEED = 5 DEFAULT_BALL_RADIUS = 10 DEFAULT_BAT_SPEED = 5 DEFAULT_FPS_RATE = 60 DEFAULT_BAT_LENGTH = 80 DEFAULT_BAT_WIDTH = 15 DEFAULT_CENTERLINE_WIDTH = 10 COUNTER_PANEL_HEIGHT = 100 COUNTER_UPPER_LIMIT = 999 COUNTER_LOWER_LIMIT = 0 class Ball: def __init__(self,position,size): self.x = position[0] self.y = position[1] self.randCounter = 0 self.size = size # direction 0: x-axis 1: y-axis def move(self,direction,length): if(direction == 0): self.x = self.x + length elif (direction == 1): self.y = self.y + length # returns a tuple with the position of the ball def getPosition(self): return (self.x,self.y) def setPosition(self,position): self.x = position[0] self.y = position[1] def getRandomCounter(self): return self.randCounter def incrementRandomCounter(self): self.randCounter += 1 def resetRandomCounter(self): self.randCounter = 0 class Bat: def __init__(self,position,idNumb): self.x = position[0] self.y = position[1] self.id = idNumb self.width = DEFAULT_BAT_WIDTH self.length = DEFAULT_BAT_LENGTH def move(self,direction,length): if(direction == 0): self.x = self.x + length elif (direction == 1): self.y = self.y + length def getParameters(self): return (self.x,self.y,self.width,self.length) def getAreal(self): return width*length def setWidthLength(self,width,length): self.width = width self.length = length def setPosition(self,position): self.x = position[0] self.y = position[1]
{ "repo_name": "Ezbob/bluPong", "path": "ass_def.py", "copies": "1", "size": "2503", "license": "mit", "hash": 3433467456054023000, "line_mean": 26.8222222222, "line_max": 140, "alpha_frac": 0.7279264882, "autogenerated": false, "ratio": 2.6942949407965555, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.39222214289965557, "avg_score": null, "num_lines": null }
"""assist setup script""" import imp import os.path as pt from setuptools import setup def get_version(): "Get version & version_info without importing assist.__init__ " path = pt.join(pt.dirname(__file__), 'assist', '__version__.py') mod = imp.load_source('assist_version', path) return mod.VERSION, mod.VERSION_INFO VERSION, VERSION_INFO = get_version() DESCRIPTION = "A stochastic simulation toolkit" LONG_DESCRIPTION = """ Assist is a library for stochastic simulation of birth-death processes, though much of its development has been motivated by the simulation of gene regulatory networks. """ DEV_STATUS_MAP = { 'alpha': '3 - Alpha', 'beta': '4 - Beta', 'rc': '4 - Beta', 'final': '5 - Production/Stable' } if VERSION_INFO[3] == 'alpha' and VERSION_INFO[4] == 0: DEVSTATUS = '2 - Pre-Alpha' else: DEVSTATUS = DEV_STATUS_MAP[VERSION_INFO[3]] setup(name='assist', version=VERSION, description=DESCRIPTION, long_description=LONG_DESCRIPTION, keywords='stochastic simulation', author='Manuel Lopez, Chinmaya Gupta', author_email='jmlopez.rod@gmail.com', url='http://assist.readthedocs.org', license='BSD License', packages=[ 'assist', 'assist.command', ], platforms=['Darwin', 'Linux'], scripts=['bin/assist'], install_requires=[ 'python-dateutil>=2.0', ], package_data={'': ['*.h']}, include_package_data=True, classifiers=[ 'Development Status :: %s' % DEVSTATUS, 'License :: OSI Approved :: BSD License', 'Operating System :: MacOS :: MacOS X', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python :: 3', 'Topic :: Scientific/Engineering :: Bio-Informatics', 'Topic :: Scientific/Engineering :: Mathematics', ], )
{ "repo_name": "jmlopez-rod/assist", "path": "setup.py", "copies": "1", "size": "1910", "license": "bsd-2-clause", "hash": 867598012022083700, "line_mean": 28.84375, "line_max": 68, "alpha_frac": 0.6083769634, "autogenerated": false, "ratio": 3.687258687258687, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4795635650658687, "avg_score": null, "num_lines": null }
"""associate figure with labnumber Revision ID: 3609361b4b95 Revises: 22c2aac814b7 Create Date: 2014-01-05 12:21:23.261708 """ # revision identifiers, used by Alembic. from sqlalchemy.orm import sessionmaker revision = '3609361b4b95' down_revision = '22c2aac814b7' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('proc_FigureSamplesTable',sa.Column('lab_id', sa.Integer, sa.ForeignKey('gen_LabTable.id'))) sql = """ update proc_FigureSamplesTable as f join gen_sampletable as s on f.sample_id=s.id join gen_labtable as l on s.id=l.sample_id set lab_id=l.id """ # engine = create_engine('mysql://alexander:@localhost/alembic_content_migration_example') Session = sessionmaker(bind=op.get_bind()) session = Session() session.execute(sql) session.commit() session.close() op.rename_table('proc_FigureSamplesTable','proc_FigureLabTable') def downgrade(): op.drop_constraint('proc_figurelabtable_ibfk_1', 'proc_FigureLabTable', 'foreignkey') op.drop_column('proc_FigureLabTable', 'lab_id') op.rename_table('proc_FigureLabTable','proc_FigureSamplesTable')
{ "repo_name": "USGSDenverPychron/pychron", "path": "migration/versions/3609361b4b95_associate_figure_with_labnumber.py", "copies": "1", "size": "1282", "license": "apache-2.0", "hash": 8857746002253123000, "line_mean": 28.1363636364, "line_max": 94, "alpha_frac": 0.647425897, "autogenerated": false, "ratio": 3.53168044077135, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.467910633777135, "avg_score": null, "num_lines": null }
"""Associate file types with editable types Revision ID: 6444c893a21f Revises: 56a26a721717 Create Date: 2020-03-27 15:01:29.736555 """ import sqlalchemy as sa from alembic import op from indico.core.db.sqlalchemy import PyIntEnum from indico.modules.events.editing.models.editable import EditableType # revision identifiers, used by Alembic. revision = '6444c893a21f' down_revision = '56a26a721717' branch_labels = None depends_on = None def upgrade(): op.add_column('file_types', sa.Column('type', PyIntEnum(EditableType), nullable=False, server_default='1'), schema='event_editing') op.alter_column('file_types', 'type', server_default=None, schema='event_editing') op.drop_index('ix_uq_file_types_event_id_name_lower', 'file_types', schema='event_editing') op.create_index('ix_uq_file_types_event_id_type_name_lower', 'file_types', ['event_id', 'type', sa.text('lower(name)')], unique=True, schema='event_editing') def downgrade(): op.drop_index('ix_uq_file_types_event_id_type_name_lower', 'file_types', schema='event_editing') op.execute('DELETE FROM event_editing.file_types WHERE type != 1') op.drop_column('file_types', 'type', schema='event_editing') op.create_index('ix_uq_file_types_event_id_name_lower', 'file_types', ['event_id', sa.text('lower(name)')], unique=True, schema='event_editing')
{ "repo_name": "indico/indico", "path": "indico/migrations/versions/20200327_1501_6444c893a21f_associate_file_types_with_editable_types.py", "copies": "5", "size": "1429", "license": "mit", "hash": 8246042401571069000, "line_mean": 38.6944444444, "line_max": 111, "alpha_frac": 0.6822953114, "autogenerated": false, "ratio": 3.1826280623608016, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.6364923373760801, "avg_score": null, "num_lines": null }
"""Associate groups with files. Revision ID: a3fe8c8a344 Revises: 525162a280bd Create Date: 2013-11-05 13:55:04.498181 """ # revision identifiers, used by Alembic. revision = 'a3fe8c8a344' down_revision = '525162a280bd' from alembic import op from collections import defaultdict from sqlalchemy.sql import table, column import sqlalchemy as sa submission = table('submission', column('group_id', sa.Integer), column('id', sa.Integer)) subtofile = table('submissiontofile', column('file_id', sa.Integer), column('submission_id')) usertofile = table('user_to_file', column('user_id', sa.Integer), column('file_id', sa.Integer)) usertogroup = table('user_to_group', column('group_id', sa.Integer), column('user_id', sa.Integer)) def upgrade(): conn = op.get_bind() group_files = defaultdict(set) group_users = defaultdict(list) sub_to_group = {} to_add = set() user_files = defaultdict(set) # Fetch mapping of users to files for (user_id, file_id) in conn.execute(usertofile.select()): user_files[user_id].add(file_id) # Fetch mapping of groups to users for (group_id, user_id) in conn.execute(usertogroup.select()): group_users[group_id].append(user_id) # Fetch mapping of submissions to groups for (group_id, sub_id) in conn.execute(submission.select()): sub_to_group[sub_id] = group_id # Build mapping of groups to files for (file_id, sub_id) in conn.execute(subtofile.select()): group_files[sub_to_group[sub_id]].add(file_id) # Build set of user to file associations to add for group_id, files in group_files.items(): for user_id in group_users[group_id]: for file_id in files - user_files[user_id]: to_add.add((user_id, file_id)) if to_add: op.bulk_insert(usertofile, [{'user_id': x[0], 'file_id': x[1]} for x in to_add]) def downgrade(): pass
{ "repo_name": "ucsb-cs/submit", "path": "submit/migrations/versions/a3fe8c8a344_associate_groups_wit.py", "copies": "1", "size": "2089", "license": "bsd-2-clause", "hash": 4587324464094041000, "line_mean": 27.6164383562, "line_max": 76, "alpha_frac": 0.6103398755, "autogenerated": false, "ratio": 3.4245901639344263, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.4534930039434426, "avg_score": null, "num_lines": null }
"""Associate submissions to users through project groups. Revision ID: a364e6e9c14 Revises: 34ac45196731 Create Date: 2013-09-18 16:24:20.626636 """ # revision identifiers, used by Alembic. revision = 'a364e6e9c14' down_revision = '34ac45196731' from alembic import op from sqlalchemy.sql import table, column import sqlalchemy as sa submission = table('submission', column('id', sa.Integer), column('created_at', sa.DateTime(timezone=True)), column('user_id', sa.Integer), column('group_id', sa.Integer), column('project_id', sa.Integer)) group = table('group', column('id', sa.Integer), column('created_at', sa.DateTime(timezone=True)), column('project_id', sa.Integer)) usertogroup = table('user_to_group', column('created_at', sa.DateTime(timezone=True)), column('group_id', sa.Integer), column('project_id', sa.Integer), column('user_id', sa.Integer)) project_view = table('projectview') def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('group', sa.Column('id', sa.Integer(), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), sa.Column('project_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['project_id'], [u'project.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table(u'user_to_group', sa.Column('created_at', sa.DateTime(timezone=True), nullable=False), sa.Column('group_id', sa.Integer(), nullable=False), sa.Column('project_id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['group_id'], [u'group.id'], ), sa.ForeignKeyConstraint(['project_id'], [u'project.id'], ), sa.ForeignKeyConstraint(['user_id'], [u'user.id'], ), sa.PrimaryKeyConstraint('project_id', 'user_id') ) op.add_column(u'submission', sa.Column('group_id', sa.Integer())) # Migrate all the data conn = op.get_bind() groups = [] group_mapping = {} subs = [] user_to_group = [] for (key, created_at, user_id, _, project_id) \ in conn.execute(submission.select()): group_key = (user_id, project_id) sub = (key, created_at, user_id, project_id) if group_key not in group_mapping: group_id = len(groups) groups.append({'id': group_id, 'created_at': created_at, 'project_id': project_id}) group_mapping[group_key] = group_id user_to_group.append({'created_at': created_at, 'group_id': group_id, 'project_id': project_id, 'user_id': user_id}) else: group_id = group_mapping[group_key] subs.append({'s_id': key, 'group_id': group_id}) # Create all the groups and associations op.bulk_insert(group, groups) op.bulk_insert(usertogroup, user_to_group) # Update submission to point to group conn.execute(submission.update() .where(submission.c.id == sa.bindparam('s_id')) .values(group_id=sa.bindparam('group_id')), subs) # Make the group_id not nullable op.alter_column(u'submission', u'group_id', existing_type=sa.INTEGER(), nullable=False) # Rename the user mapping column op.alter_column(u'submission', u'user_id', new_column_name=u'created_by_id') # Update the foreign keys op.drop_constraint(u'submission_user_id_fkey', u'submission') op.create_foreign_key(u'submission_created_by_id_fkey', u'submission', u'user', [u'created_by_id'], [u'id']) op.create_foreign_key(u'submission_group_id_fkey', u'submission', u'group', [u'group_id'], [u'id']) # Clear the project view table (should be emptied every once in a while) op.execute(project_view.delete()) # Associate with groups not users op.alter_column(u'projectview', u'user_id', new_column_name=u'group_id') op.drop_constraint(u'projectview_user_id_fkey', u'projectview') op.create_foreign_key(u'projectview_group_id_fkey', u'projectview', u'group', [u'group_id'], [u'id']) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### # Clear the project view table (should be emptied every once in a while) op.execute(project_view.delete()) # Associate projectview with users not groups op.alter_column(u'projectview', u'group_id', new_column_name=u'user_id') op.drop_constraint(u'projectview_group_id_fkey', u'projectview') op.create_foreign_key(u'projectview_user_id_fkey', u'projectview', u'user', [u'user_id'], [u'id']) # Update submission objects and group groups op.alter_column(u'submission', u'created_by_id', new_column_name=u'user_id') # Update the foreign keys op.drop_constraint(u'submission_group_id_fkey', u'submission') op.drop_constraint(u'submission_created_by_id_fkey', u'submission') op.create_foreign_key(u'submission_user_id_fkey', u'submission', u'user', [u'user_id'], [u'id']) op.drop_column(u'submission', 'group_id') op.drop_table(u'user_to_group') op.drop_table('group') ### end Alembic commands ###
{ "repo_name": "ucsb-cs/submit", "path": "submit/migrations/versions/a364e6e9c14_associate_submission.py", "copies": "1", "size": "5651", "license": "bsd-2-clause", "hash": -426090743251291970, "line_mean": 37.4421768707, "line_max": 79, "alpha_frac": 0.5940541497, "autogenerated": false, "ratio": 3.512119328775637, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9589005912086139, "avg_score": 0.00343351327789959, "num_lines": 147 }
"""Associate test results to users. Revision ID: 428e5aef5534 Revises: 534e20be9964 Create Date: 2015-11-03 00:51:34.096598 """ # revision identifiers, used by Alembic. revision = '428e5aef5534' down_revision = '534e20be9964' MYSQL_CHARSET = 'utf8' from alembic import op import sqlalchemy as sa def upgrade(): """Upgrade DB.""" conn = op.get_bind() res = conn.execute("select openid,format,pubkey from pubkeys") results = res.fetchall() # Get public key to user mappings. pubkeys = {} for result in results: pubkeys[result[1] + " " + result[2]] = result[0] res = conn.execute("select test_id,value from meta where " "meta_key='public_key'") results = res.fetchall() for result in results: test_id = result[0] if result[1] in pubkeys: openid = pubkeys[result[1]] conn.execute(sa.text("update meta set meta_key='user', " "value=:value where " "test_id=:testid and meta_key='public_key'" ), value=openid, testid=test_id) def downgrade(): """Downgrade DB.""" pass
{ "repo_name": "openstack/refstack", "path": "refstack/db/migrations/alembic/versions/428e5aef5534_associate_test_result.py", "copies": "2", "size": "1215", "license": "apache-2.0", "hash": -5209827904758360000, "line_mean": 25.4130434783, "line_max": 76, "alpha_frac": 0.5687242798, "autogenerated": false, "ratio": 3.605341246290801, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5174065526090802, "avg_score": null, "num_lines": null }
"""Association analysis functions for rdf_dm""" from . import helpers import os import re import subprocess from rdflib import Graph, namespace, URIRef, RDF from slugify import slugify _pwd = os.path.dirname(os.path.realpath(__file__)) _itemset_separator = '|' _predicate_object_separator = '-->' _namespace_prefixes = dict(((str(uri), prefix + ':') for prefix, uri in namespace.NamespaceManager(Graph()).namespaces())) _namespace_prefixes.update({'http://ldf.fi/schema/narc-menehtyneet1939-45/': 'narcs:', 'http://ldf.fi/narc-menehtyneet1939-45/': 'narc:', 'http://xmlns.com/foaf/0.1/': 'foaf:', }) def freq_items_by_class(graph, cl, ns_prefixes=_namespace_prefixes, minsup1=50, minsup2=25, minconf=90, minlift=200): """ Get frequent items and rules by class. Uses FIM package's implementation of fpgrowth algorithm. Currently stores generated information also to files in 'itemsets' directory, which should exist when run. :param ns_prefixes: Namespace prefixes to use for shortening URIs :param minsup1: Minimum support for frequent itemsets :param minsup2: Minimum support for frequent rules :param minconf: Minimum confidence for frequent rules :param minlift: Minimum lift for frequent rules :param graph: An RDF graph :param cl: class resource :type graph: rdflib.Graph """ po_items = [] # Predicate + object pairs (list of lists) if ns_prefixes: pattern = re.compile(r'\b(' + '|'.join(ns_prefixes.keys()) + r')\b') instances = helpers.get_class_instances(graph, cl) for i in instances: pos = [] for (p, o) in graph.predicate_objects(i): if (p, o) == (URIRef(RDF.type), URIRef(cl)): continue if ns_prefixes: p = pattern.sub(lambda x: ns_prefixes[x.group()], p) o = pattern.sub(lambda x: ns_prefixes[x.group()], o) pos.append("{p}{sep}{o}".format(p=str(p), o=str(o), sep=_predicate_object_separator)) po_items.append(pos) basket_file = '{pwd}/itemsets/rdf.{slug}.basket'.format(pwd=_pwd, slug=slugify(cl)) # Write itemsets file with open(basket_file, encoding='UTF-8', mode='w+') as f: for po in po_items: if any(_itemset_separator in item for item in po): raise Exception('Separator symbol | found in items') f.write(_itemset_separator.join(po) + "\n") # Generate frequent itemsets return_code = subprocess.call("fpgrowth -ts -f\"|\" -s{sup} -v\" %s\" {file} {file}.freq_itemsets". format(sup=minsup1, file=basket_file), shell=True) if return_code: print(return_code) raise Exception('Error while running fpgrowth.') # Generate frequent association rules return_code = subprocess.\ call("fpgrowth -tr -f\"|\" -m2 -v\" %s,%c,%e\" -s{sup} -c{conf} -el -d{lift} {file} {file}.freq_rules". format(sup=minsup2, conf=minconf, lift=minlift, file=basket_file), shell=True) if return_code: print(return_code) # Parse frequent itemsets with open("{file}.freq_itemsets".format(file=basket_file), encoding='UTF-8', mode='r') as f: freq_itemsets = [] for row in f.readlines(): pred_objs = [po.split(sep=_predicate_object_separator) for po in row.split()[:-1]] support = float(row.split()[-1]) freq_itemsets += [(pred_objs, support)] # Parse frequent rules with open("{file}.freq_rules".format(file=basket_file), encoding='UTF-8', mode='r') as f: freq_rules = [] for row in f.readlines(): row_parts = row.split() supp, conf, lift = (float(part) for part in row_parts[-1].split(',')) consequents_string, antecedents_string = ' '.join(row_parts[:-1]).split(' <- ') ante = [po.split(_predicate_object_separator) for po in antecedents_string.split()] cons = [po.split(_predicate_object_separator) for po in consequents_string.split()] # freq_rules += [(ante, cons, supp, conf, lift)] freq_rules += [{'antecedents': ante, 'consequents': cons, 'support': supp, 'confidence': conf, 'lift': lift, }] return freq_itemsets, freq_rules
{ "repo_name": "SemanticComputing/rdf_dm", "path": "rdf_dm/association.py", "copies": "1", "size": "4509", "license": "mit", "hash": -4007834412678563000, "line_mean": 40.3669724771, "line_max": 117, "alpha_frac": 0.5874916833, "autogenerated": false, "ratio": 3.662875710804224, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9742243738333214, "avg_score": 0.0016247311542020376, "num_lines": 109 }
"""Association mining -- apriori algo""" __author__ = 'thor' from numpy import * # Modified from: # Everaldo Aguiar & Reid Johnson (https://github.com/cse40647/cse40647/blob/sp.14/10%20-%20Apriori.ipynb) # # Itself Modified from: # Marcel Caraciolo (https://gist.github.com/marcelcaraciolo/1423287) # # Functions to compute and extract association rules from a given frequent # itemset generated by the Apriori algorithm. import pandas as pd from statsmodels.stats.proportion import samplesize_confint_proportion def choose_sample_size(min_confidence, alpha=0.05, half_length=None): if half_length is None: t = 0.20 * min_confidence if min_confidence < 0.5 else 0.20 * (1 - min_confidence) half_length = max(0.01, t) # choose half length to be a proportion (0.2) of min_confidence return samplesize_confint_proportion( proportion=min_confidence, half_length=half_length, alpha=alpha, method='normal') def association_rules(dataset, min_confidence=0.2, min_support=None, output='dataframe', verbose=False): assert min_confidence > 0 and min_confidence <= 1, "min_confidence must be between 0 and 1" if min_support is None: # if no min_support is given, choose it to be the sample size you need to get 95% conf in proportion estimate min_support = choose_sample_size(min_confidence, alpha=0.05, half_length=None) if min_support > 1: min_support /= float(len(dataset)) F, support_data = apriori(dataset, min_support=min_support, verbose=False) H = generate_rules(F, support_data, min_confidence=min_confidence, verbose=verbose) if output == 'triple': return H elif output == 'dataframe': def set_to_string(s): return str(", ".join(s)) support_df = pd.DataFrame({'condition': list(map(set_to_string, list(support_data.keys()))), 'condition_frequency': list(support_data.values())}) support_df['condition_count'] = len(dataset) * support_df['condition_frequency'] d = pd.DataFrame([{'condition': set_to_string(condition), 'effect': set_to_string(effect), 'effect_frequency': support} for condition, effect, support in H]) d = pd.merge(d, support_df, how='inner', on='condition') d['condition_and_effect_count'] = d['effect_frequency'] * d['condition_count'] d = d[['condition', 'effect', 'effect_frequency', 'condition_count', 'condition_and_effect_count', 'condition_frequency']] return d.sort('effect_frequency', ascending=False).reset_index(drop=True) def apriori(dataset, min_support=0.5, verbose=False): """Implements the Apriori algorithm. The Apriori algorithm will iteratively generate new candidate k-itemsets using the frequent (k-1)-itemsets found in the previous iteration. Parameters ---------- dataset : list The dataset (a list of transactions) from which to generate candidate itemsets. min_support : float The minimum support threshold. Defaults to 0.5. Returns ------- F : list The list of frequent itemsets. support_data : dict The support data for all candidate itemsets. References ---------- .. [1] R. Agrawal, R. Srikant, "Fast Algorithms for Mining Association Rules", 1994. """ C1 = create_candidates(dataset) D = list(map(set, dataset)) F1, support_data = support_prune(D, C1, min_support, verbose=False) # prune candidate 1-itemsets F = [F1] # list of frequent itemsets; initialized to frequent 1-itemsets k = 2 # the itemset cardinality while (len(F[k - 2]) > 0): Ck = apriori_gen(F[k-2], k) # generate candidate itemsets Fk, supK = support_prune(D, Ck, min_support) # prune candidate itemsets support_data.update(supK) # update the support counts to reflect pruning F.append(Fk) # add the pruned candidate itemsets to the list of frequent itemsets k += 1 if verbose: # Print a list of all the frequent itemsets. for kset in F: for item in kset: print(("" \ + "{" \ + "".join(str(i) + ", " for i in iter(item)).rstrip(', ') \ + "}" \ + ": sup = " + str(round(support_data[item], 3)))) return F, support_data def create_candidates(dataset, verbose=False): """Creates a list of candidate 1-itemsets from a list of transactions. Parameters ---------- dataset : list The dataset (a list of transactions) from which to generate candidate itemsets. Returns ------- The list of candidate itemsets (c1) passed as a frozenset (a set that is immutable and hashable). """ c1 = [] # list of all items in the database of transactions for transaction in dataset: for item in transaction: if not [item] in c1: c1.append([item]) c1.sort() if verbose: # Print a list of all the candidate items. print(("" \ + "{" \ + "".join(str(i[0]) + ", " for i in iter(c1)).rstrip(', ') \ + "}")) # Map c1 to a frozenset because it will be the key of a dictionary. return list(map(frozenset, c1)) def support_prune(dataset, candidates, min_support, verbose=False): """Returns all candidate itemsets that meet a minimum support threshold. By the apriori principle, if an itemset is frequent, then all of its subsets must also be frequent. As a result, we can perform support-based pruning to systematically control the exponential growth of candidate itemsets. Thus, itemsets that do not meet the minimum support level are pruned from the input list of itemsets (dataset). Parameters ---------- dataset : list The dataset (a list of transactions) from which to generate candidate itemsets. candidates : frozenset The list of candidate itemsets. min_support : float The minimum support threshold. Returns ------- retlist : list The list of frequent itemsets. support_data : dict The support data for all candidate itemsets. """ sscnt = {} # set for support counts for tid in dataset: for can in candidates: if can.issubset(tid): sscnt.setdefault(can, 0) sscnt[can] += 1 num_items = float(len(dataset)) # total number of transactions in the dataset retlist = [] # array for unpruned itemsets support_data = {} # set for support data for corresponding itemsets for key in sscnt: # Calculate the support of itemset key. support = sscnt[key] / num_items if support >= min_support: retlist.insert(0, key) support_data[key] = support # Print a list of the pruned itemsets. if verbose: for kset in retlist: for item in kset: print(("{" + str(item) + "}")) print("") for key in sscnt: print(("" \ + "{" \ + "".join([str(i) + ", " for i in iter(key)]).rstrip(', ') \ + "}" \ + ": sup = " + str(support_data[key]))) return retlist, support_data def apriori_gen(freq_sets, k): """Generates candidate itemsets (via the F_k-1 x F_k-1 method). This operation generates new candidate k-itemsets based on the frequent (k-1)-itemsets found in the previous iteration. The candidate generation procedure merges a pair of frequent (k-1)-itemsets only if their first k-2 items are identical. Parameters ---------- freq_sets : list The list of frequent (k-1)-itemsets. k : integer The cardinality of the current itemsets being evaluated. Returns ------- retlist : list The list of merged frequent itemsets. """ retList = [] # list of merged frequent itemsets lenLk = len(freq_sets) # number of frequent itemsets for i in range(lenLk): for j in range(i+1, lenLk): a=list(freq_sets[i]) b=list(freq_sets[j]) a.sort() b.sort() F1 = a[:k-2] # first k-2 items of freq_sets[i] F2 = b[:k-2] # first k-2 items of freq_sets[j] if F1 == F2: # if the first k-2 items are identical # Merge the frequent itemsets. retList.append(freq_sets[i] | freq_sets[j]) return retList def rules_from_conseq(freq_set, H, support_data, rules, min_confidence=0.5, verbose=False): """Generates a set of candidate rules. Parameters ---------- freq_set : frozenset The complete list of frequent itemsets. H : list A list of frequent itemsets (of a particular length). support_data : dict The support data for all candidate itemsets. rules : list A potentially incomplete set of candidate rules above the minimum confidence threshold. min_confidence : float The minimum confidence threshold. Defaults to 0.5. """ m = len(H[0]) if m == 1: Hmp1 = calc_confidence(freq_set, H, support_data, rules, min_confidence, verbose) if (len(freq_set) > (m+1)): Hmp1 = apriori_gen(H, m+1) # generate candidate itemsets Hmp1 = calc_confidence(freq_set, Hmp1, support_data, rules, min_confidence, verbose) if len(Hmp1) > 1: # If there are candidate rules above the minimum confidence # threshold, recurse on the list of these candidate rules. rules_from_conseq(freq_set, Hmp1, support_data, rules, min_confidence, verbose) def calc_confidence(freq_set, H, support_data, rules, min_confidence=0.5, verbose=False): """Evaluates the generated rules. One measurement for quantifying the goodness of association rules is confidence. The confidence for a rule 'P implies H' (P -> H) is defined as the support for P and H divided by the support for P (support (P|H) / support(P)), where the | symbol denotes the set union (thus P|H means all the items in set P or in set H). To calculate the confidence, we iterate through the frequent itemsets and associated support data. For each frequent itemset, we divide the support of the itemset by the support of the antecedent (left-hand-side of the rule). Parameters ---------- freq_set : frozenset The complete list of frequent itemsets. H : list A list of frequent itemsets (of a particular length). min_support : float The minimum support threshold. rules : list A potentially incomplete set of candidate rules above the minimum confidence threshold. min_confidence : float The minimum confidence threshold. Defaults to 0.5. Returns ------- pruned_H : list The list of candidate rules above the minimum confidence threshold. """ pruned_H = [] # list of candidate rules above the minimum confidence threshold for conseq in H: # iterate over the frequent itemsets conf = support_data[freq_set] / support_data[freq_set - conseq] if conf >= min_confidence: rules.append((freq_set - conseq, conseq, conf)) pruned_H.append(conseq) if verbose: print(("" \ + "{" \ + "".join([str(i) + ", " for i in iter(freq_set-conseq)]).rstrip(', ') \ + "}" \ + " ---> " \ + "{" \ + "".join([str(i) + ", " for i in iter(conseq)]).rstrip(', ') \ + "}" \ + ": conf = " + str(round(conf, 3)) \ + ", sup = " + str(round(support_data[freq_set], 3)))) return pruned_H def generate_rules(F, support_data, min_confidence=0.5, verbose=True): """Generates a set of candidate rules from a list of frequent itemsets. For each frequent itemset, we calculate the confidence of using a particular item as the rule consequent (right-hand-side of the rule). By testing and merging the remaining rules, we recursively create a list of pruned rules. Parameters ---------- F : list A list of frequent itemsets. support_data : dict The corresponding support data for the frequent itemsets (L). min_confidence : float The minimum confidence threshold. Defaults to 0.5. Returns ------- rules : list The list of candidate rules above the minimum confidence threshold. """ rules = [] for i in range(1, len(F)): for freq_set in F[i]: H1 = [frozenset([itemset]) for itemset in freq_set] if (i > 1): rules_from_conseq(freq_set, H1, support_data, rules, min_confidence, verbose) else: calc_confidence(freq_set, H1, support_data, rules, min_confidence, verbose) return rules
{ "repo_name": "thorwhalen/ut", "path": "ml/association/apriori.py", "copies": "1", "size": "13195", "license": "mit", "hash": -8540431141282688000, "line_mean": 34.1866666667, "line_max": 117, "alpha_frac": 0.6046987495, "autogenerated": false, "ratio": 3.9494163424124515, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.007795337174135474, "num_lines": 375 }
"""Association mining""" __author__ = 'thor' # Modified from Reid Johnson (http://nbviewer.ipython.org/github/cse40647/cse40647/blob/sp.14/11%20-%20FP-Growth.ipynb) # ... Itself Modified from: # Eric Naeseth <eric@naeseth.com> # (https://github.com/enaeseth/python-fp-growth/blob/master/fp_growth.py) # # A Python implementation of the FP-growth algorithm. from collections import defaultdict, namedtuple def print_rules(rules_tuples): for h in rules_tuples: print(("{} --> {} (sup = {})".format(", ".join(h[0]), ", ".join(h[1]), h[2]))) def fpgrowth(dataset, min_support=0.5, include_support=False, verbose=False): """Implements the FP-growth algorithm. The `dataset` parameter can be any iterable of iterables of items. `min_support` should be an integer specifying the minimum number of occurrences of an itemset for it to be accepted. Each item must be hashable (i.e., it must be valid as a member of a dictionary or a set). If `include_support` is true, yield (itemset, support) pairs instead of just the itemsets. Parameters ---------- dataset : list The dataset (a list of transactions) from which to generate candidate itemsets. min_support : float The minimum support threshold. Defaults to 0.5. include_support : bool Include support in output (default=False). References ---------- .. [1] J. Han, J. Pei, Y. Yin, "Mining Frequent Patterns without Candidate Generation," 2000. """ F = [] support_data = {} for k, v in find_frequent_itemsets(dataset, min_support=min_support, include_support=include_support, verbose=verbose): F.append(frozenset(k)) support_data[frozenset(k)] = v # Create one array with subarrays that hold all transactions of equal length. def bucket_list(nested_list, sort=True): bucket = defaultdict(list) for sublist in nested_list: bucket[len(sublist)].append(sublist) return [v for k,v in sorted(bucket.items())] if sort else list(bucket.values()) F = bucket_list(F) return F, support_data def find_frequent_itemsets(dataset, min_support, include_support=False, verbose=False): """ Find frequent itemsets in the given transactions using FP-growth. This function returns a generator instead of an eagerly-populated list of items. The `dataset` parameter can be any iterable of iterables of items. `min_support` should be an integer specifying the minimum number of occurrences of an itemset for it to be accepted. Each item must be hashable (i.e., it must be valid as a member of a dictionary or a set). If `include_support` is true, yield (itemset, support) pairs instead of just the itemsets. Parameters ---------- dataset : list The dataset (a list of transactions) from which to generate candidate itemsets. min_support : float The minimum support threshold. Defaults to 0.5. include_support : bool Include support in output (default=False). """ items = defaultdict(lambda: 0) # mapping from items to their supports processed_transactions = [] # Load the passed-in transactions and count the support that individual # items have. for transaction in dataset: processed = [] for item in transaction: items[item] += 1 processed.append(item) processed_transactions.append(processed) # Remove infrequent items from the item support dictionary. items = dict((item, support) for item, support in items.items() if support >= min_support) # Build our FP-tree. Before any transactions can be added to the tree, they # must be stripped of infrequent items and their surviving items must be # sorted in decreasing order of frequency. def clean_transaction(transaction): transaction = [v for v in transaction if v in items] transaction.sort(key=lambda v: items[v], reverse=True) return transaction master = FPTree() for transaction in map(clean_transaction, processed_transactions): master.add(transaction) support_data = {} def find_with_suffix(tree, suffix): for item, nodes in list(tree.items()): support = float(sum(n.count for n in nodes)) / len(dataset) if support >= min_support and item not in suffix: # New winner! found_set = [item] + suffix support_data[frozenset(found_set)] = support yield (found_set, support) if include_support else found_set # Build a conditional tree and recursively search for frequent # itemsets within it. cond_tree = conditional_tree_from_paths(tree.prefix_paths(item), min_support) for s in find_with_suffix(cond_tree, found_set): yield s # pass along the good news to our caller if verbose: # Print a list of all the frequent itemsets. for itemset, support in find_with_suffix(master, []): print(("" \ + "{" \ + "".join(str(i) + ", " for i in iter(itemset)).rstrip(', ') \ + "}" \ + ": sup = " + str(round(support_data[frozenset(itemset)], 3)))) # Search for frequent itemsets, and yield the results we find. for itemset in find_with_suffix(master, []): yield itemset class FPTree(object): """ An FP tree. This object may only store transaction items that are hashable (i.e., all items must be valid as dictionary keys or set members). """ Route = namedtuple('Route', 'head tail') def __init__(self): # The root node of the tree. self._root = FPNode(self, None, None) # A dictionary mapping items to the head and tail of a path of # "neighbors" that will hit every node containing that item. self._routes = {} @property def root(self): """The root node of the tree.""" return self._root def add(self, transaction): """ Adds a transaction to the tree. """ point = self._root for item in transaction: next_point = point.search(item) if next_point: # There is already a node in this tree for the current # transaction item; reuse it. next_point.increment() else: # Create a new point and add it as a child of the point we're # currently looking at. next_point = FPNode(self, item) point.add(next_point) # Update the route of nodes that contain this item to include # our new node. self._update_route(next_point) point = next_point def _update_route(self, point): """Add the given node to the route through all nodes for its item.""" assert self is point.tree try: route = self._routes[point.item] route[1].neighbor = point # route[1] is the tail self._routes[point.item] = self.Route(route[0], point) except KeyError: # First node for this item; start a new route. self._routes[point.item] = self.Route(point, point) def items(self): """ Generate one 2-tuples for each item represented in the tree. The first element of the tuple is the item itself, and the second element is a generator that will yield the nodes in the tree that belong to the item. """ for item in self._routes.keys(): yield (item, self.nodes(item)) def nodes(self, item): """ Generates the sequence of nodes that contain the given item. """ try: node = self._routes[item][0] except KeyError: return while node: yield node node = node.neighbor def prefix_paths(self, item): """Generates the prefix paths that end with the given item.""" def collect_path(node): path = [] while node and not node.root: path.append(node) node = node.parent path.reverse() return path return (collect_path(node) for node in self.nodes(item)) def inspect(self): print("Tree:") self.root.inspect(1) print("") print("Routes:") for item, nodes in list(self.items()): print((" %r" % item)) for node in nodes: print((" %r" % node)) def _removed(self, node): """Called when `node` is removed from the tree; performs cleanup.""" head, tail = self._routes[node.item] if node is head: if node is tail or not node.neighbor: # It was the sole node. del self._routes[node.item] else: self._routes[node.item] = self.Route(node.neighbor, tail) else: for n in self.nodes(node.item): if n.neighbor is node: n.neighbor = node.neighbor # skip over if node is tail: self._routes[node.item] = self.Route(head, n) break def conditional_tree_from_paths(paths, min_support): """Builds a conditional FP-tree from the given prefix paths.""" tree = FPTree() condition_item = None items = set() # Import the nodes in the paths into the new tree. Only the counts of the # leaf notes matter; the remaining counts will be reconstructed from the # leaf counts. for path in paths: if condition_item is None: condition_item = path[-1].item point = tree.root for node in path: next_point = point.search(node.item) if not next_point: # Add a new node to the tree. items.add(node.item) count = node.count if node.item == condition_item else 0 next_point = FPNode(tree, node.item, count) point.add(next_point) tree._update_route(next_point) point = next_point assert condition_item is not None # Calculate the counts of the non-leaf nodes. for path in tree.prefix_paths(condition_item): count = path[-1].count for node in reversed(path[:-1]): node._count += count # Eliminate the nodes for any items that are no longer frequent. for item in items: support = sum(n.count for n in tree.nodes(item)) if support < min_support: # Doesn't make the cut anymore for node in tree.nodes(item): if node.parent is not None: node.parent.remove(node) # Finally, remove the nodes corresponding to the item for which this # conditional tree was generated. for node in tree.nodes(condition_item): if node.parent is not None: # the node might already be an orphan node.parent.remove(node) return tree class FPNode(object): """A node in an FP tree.""" def __init__(self, tree, item, count=1): self._tree = tree self._item = item self._count = count self._parent = None self._children = {} self._neighbor = None def add(self, child): """Adds the given FPNode `child` as a child of this node.""" if not isinstance(child, FPNode): raise TypeError("Can only add other FPNodes as children") if not child.item in self._children: self._children[child.item] = child child.parent = self def search(self, item): """ Checks to see if this node contains a child node for the given item. If so, that node is returned; otherwise, `None` is returned. """ try: return self._children[item] except KeyError: return None def remove(self, child): try: if self._children[child.item] is child: del self._children[child.item] child.parent = None self._tree._removed(child) for sub_child in child.children: try: # Merger case: we already have a child for that item, so # add the sub-child's count to our child's count. self._children[sub_child.item]._count += sub_child.count sub_child.parent = None # it's an orphan now except KeyError: # Turns out we don't actually have a child, so just add # the sub-child as our own child. self.add(sub_child) child._children = {} else: raise ValueError("that node is not a child of this node") except KeyError: raise ValueError("that node is not a child of this node") def __contains__(self, item): return item in self._children @property def tree(self): """The tree in which this node appears.""" return self._tree @property def item(self): """The item contained in this node.""" return self._item @property def count(self): """The count associated with this node's item.""" return self._count def increment(self): """Increments the count associated with this node's item.""" if self._count is None: raise ValueError("Root nodes have no associated count.") self._count += 1 @property def root(self): """True if this node is the root of a tree; false if otherwise.""" return self._item is None and self._count is None @property def leaf(self): """True if this node is a leaf in the tree; false if otherwise.""" return len(self._children) == 0 def parent(): doc = "The node's parent." def fget(self): return self._parent def fset(self, value): if value is not None and not isinstance(value, FPNode): raise TypeError("A node must have an FPNode as a parent.") if value and value.tree is not self.tree: raise ValueError("Cannot have a parent from another tree.") self._parent = value return locals() parent = property(**parent()) def neighbor(): doc = """ The node's neighbor; the one with the same value that is "to the right" of it in the tree. """ def fget(self): return self._neighbor def fset(self, value): if value is not None and not isinstance(value, FPNode): raise TypeError("A node must have an FPNode as a neighbor.") if value and value.tree is not self.tree: raise ValueError("Cannot have a neighbor from another tree.") self._neighbor = value return locals() neighbor = property(**neighbor()) @property def children(self): """The nodes that are children of this node.""" return tuple(self._children.values()) def inspect(self, depth=0): print(((' ' * depth) + repr(self))) for child in self.children: child.inspect(depth + 1) def __repr__(self): if self.root: return "<%s (root)>" % type(self).__name__ return "<%s %r (%r)>" % (type(self).__name__, self.item, self.count) def rules_from_conseq(freq_set, H, support_data, rules, min_confidence=0.5, verbose=False): """Generates a set of candidate rules. Parameters ---------- freq_set : frozenset The complete list of frequent itemsets. H : list A list of frequent itemsets (of a particular length). support_data : dict The support data for all candidate itemsets. rules : list A potentially incomplete set of candidate rules above the minimum confidence threshold. min_confidence : float The minimum confidence threshold. Defaults to 0.5. """ m = len(H[0]) if m == 1: Hmp1 = calc_confidence(freq_set, H, support_data, rules, min_confidence, verbose) if (len(freq_set) > (m+1)): Hmp1 = apriori_gen(H, m+1) # generate candidate itemsets Hmp1 = calc_confidence(freq_set, Hmp1, support_data, rules, min_confidence, verbose) if len(Hmp1) > 1: # If there are candidate rules above the minimum confidence # threshold, recurse on the list of these candidate rules. rules_from_conseq(freq_set, Hmp1, support_data, rules, min_confidence, verbose) def apriori_gen(freq_sets, k): """Generates candidate itemsets (via the F_k-1 x F_k-1 method). This operation generates new candidate k-itemsets based on the frequent (k-1)-itemsets found in the previous iteration. The candidate generation procedure merges a pair of frequent (k-1)-itemsets only if their first k-2 items are identical. Parameters ---------- freq_sets : list The list of frequent (k-1)-itemsets. k : integer The cardinality of the current itemsets being evaluated. Returns ------- retlist : list The list of merged frequent itemsets. """ retList = [] # list of merged frequent itemsets lenLk = len(freq_sets) # number of frequent itemsets for i in range(lenLk): for j in range(i+1, lenLk): a=list(freq_sets[i]) b=list(freq_sets[j]) a.sort() b.sort() F1 = a[:k-2] # first k-2 items of freq_sets[i] F2 = b[:k-2] # first k-2 items of freq_sets[j] if F1 == F2: # if the first k-2 items are identical # Merge the frequent itemsets. retList.append(freq_sets[i] | freq_sets[j]) return retList def calc_confidence(freq_set, H, support_data, rules, min_confidence=0.5, verbose=False): """Evaluates the generated rules. One measurement for quantifying the goodness of association rules is confidence. The confidence for a rule 'P implies H' (P -> H) is defined as the support for P and H divided by the support for P (support (P|H) / support(P)), where the | symbol denotes the set union (thus P|H means all the items in set P or in set H). To calculate the confidence, we iterate through the frequent itemsets and associated support data. For each frequent itemset, we divide the support of the itemset by the support of the antecedent (left-hand-side of the rule). Parameters ---------- freq_set : frozenset The complete list of frequent itemsets. H : list A list of frequent itemsets (of a particular length). min_support : float The minimum support threshold. rules : list A potentially incomplete set of candidate rules above the minimum confidence threshold. min_confidence : float The minimum confidence threshold. Defaults to 0.5. Returns ------- pruned_H : list The list of candidate rules above the minimum confidence threshold. """ pruned_H = [] # list of candidate rules above the minimum confidence threshold for conseq in H: # iterate over the frequent itemsets conf = support_data[freq_set] / support_data[freq_set - conseq] if conf >= min_confidence: rules.append((freq_set - conseq, conseq, conf)) pruned_H.append(conseq) if verbose: print(("" \ + "{" \ + "".join([str(i) + ", " for i in iter(freq_set-conseq)]).rstrip(', ') \ + "}" \ + " ---> " \ + "{" \ + "".join([str(i) + ", " for i in iter(conseq)]).rstrip(', ') \ + "}" \ + ": conf = " + str(round(conf, 3)) \ + ", sup = " + str(round(support_data[freq_set], 3)))) return pruned_H def generate_rules(F, support_data, min_confidence=0.5, verbose=True): """Generates a set of candidate rules from a list of frequent itemsets. For each frequent itemset, we calculate the confidence of using a particular item as the rule consequent (right-hand-side of the rule). By testing and merging the remaining rules, we recursively create a list of pruned rules. Parameters ---------- F : list A list of frequent itemsets. support_data : dict The corresponding support data for the frequent itemsets (L). min_confidence : float The minimum confidence threshold. Defaults to 0.5. Returns ------- rules : list The list of candidate rules above the minimum confidence threshold. """ rules = [] for i in range(1, len(F)): for freq_set in F[i]: H1 = [frozenset([item]) for item in freq_set] if i > 1: rules_from_conseq(freq_set, H1, support_data, rules, min_confidence, verbose) else: calc_confidence(freq_set, H1, support_data, rules, min_confidence, verbose) return rules
{ "repo_name": "thorwhalen/ut", "path": "ml/association/fp_tree.py", "copies": "1", "size": "21503", "license": "mit", "hash": -9147807926118305000, "line_mean": 33.0792393027, "line_max": 123, "alpha_frac": 0.5851741617, "autogenerated": false, "ratio": 4.2312081857536405, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5316382347453641, "avg_score": null, "num_lines": null }
# Association Rules Mining - hw3 # Advanced Database Systems # Pedro Ferro Freitas - pff2108 # Roberto Jose de Amorim - rja2139 import csv from collections import defaultdict from sys import argv # data_file = 'INTEGRATED-DATASET.csv' # min_sup = 0.4 # min_conf = 0.7 data_file = argv[1] min_sup = float(argv[2]) min_conf = float(argv[3]) class MiningAlgorithm(object): # Object responsible for the association rules mining def __init__(self): self.elements = set() self.max_elements = 0 self.n_transactions = 0 self.transactions = defaultdict(set) self.counts = defaultdict(int) self.l_itemsets = defaultdict(list) self.c_itemsets = defaultdict(list) self.sorted_support_list = [] self.sorted_confidence_list = [] def read_dataset(self, name): # P1: Read and process dataset file. try: with open(name, 'rb') as csvfile: data = csv.reader(csvfile, delimiter=',') t_id = 0 for row in data: t_id += 1 # # Compute number of elements of the transaction with the largest number of elements n_elements = len(row) if n_elements > self.max_elements: self.max_elements = n_elements # Process each line of csv file for key in row: self.elements.add(key) self.transactions[t_id].add(key) self.counts[(key,)] += 1 self.n_transactions = t_id except IOError: print "File not found or unreadable" exit(1) def association_rules_mining(self): # P2: Algorithm core, calling the most important functions. # Compute L_1: for entry in sorted(self.elements): support = float(self.counts[(entry,)]) / self.n_transactions if support >= min_sup: self.l_itemsets[1].append([entry]) # Compute L_k for k>1: # # Largest itemset has, at most, number of elements equal to transaction with the largest number of elements for i in range(2, self.max_elements + 1): self.candidate_generation(i) self.support_update(i) self.compute_large_itemsets(i) # Interrupt the loop if L_k is empty if len(self.l_itemsets[i]) == 0: break def candidate_generation(self, iteration): # P2-a: Generate new candidates from the large itemsets from last iteration. # Join step: for element1 in self.l_itemsets[iteration - 1]: for element2 in self.l_itemsets[iteration - 1]: if element1[0:iteration - 2] == element2[0:iteration - 2]: if element1[iteration - 2] < element2[iteration - 2]: self.c_itemsets[iteration].append(element1 + element2[iteration - 2:]) # Prune step: for entry in self.c_itemsets[iteration]: for bkpoint in range(len(entry)): subset = entry[:bkpoint] + entry[bkpoint + 1:] if subset not in self.l_itemsets[iteration - 1]: bkpoint2 = self.c_itemsets[iteration].index(entry) temp = self.c_itemsets[iteration][:bkpoint2] + self.c_itemsets[iteration][bkpoint2 + 1:] self.c_itemsets[iteration] = temp break def support_update(self, iteration): # P2-b: Calculate the support for the candidates in the k-th iteration. for t_id in self.transactions.keys(): set1 = self.transactions[t_id] for entry in self.c_itemsets[iteration]: set2 = set(entry) if len(set1.intersection(set2)) == iteration: self.counts[tuple(sorted(set1.intersection(set2)))] += 1 def compute_large_itemsets(self, iteration): # P2-c: Determine the itemsets for the k-th iteration, with k elements. for entry in self.c_itemsets[iteration]: support = float(self.counts[tuple(entry)]) / self.n_transactions # Compare support for each candidate with threshold if support >= min_sup: self.l_itemsets[iteration].append(entry) def print_sorted_results(self): # P3: Print support and confidence in the desired format. # Priting Support: print '\n==Frequent itemsets (min_sup=%.0f%%)' % (100 * min_sup) support_list = [] for iteration in self.l_itemsets.keys(): for key in self.l_itemsets[iteration]: count = self.counts[tuple(key)] support_list.append((iteration, key, count)) self.sorted_support_list = sorted(support_list, key=lambda entry: entry[2], reverse=True) for iteration, key, count in self.sorted_support_list: support = int(100. * count / self.n_transactions) print '[%s], %.0f%%' % (','.join(key), support) # Printing Confidence: print '\n==High-confidence association rules (min_conf=%.0f%%)' % (100 * min_conf) confidence_list = [] for iteration in self.l_itemsets.keys(): if iteration == 1: continue for key2 in self.l_itemsets[iteration]: count2 = self.counts[tuple(key2)] sup = float(count2) / self.n_transactions for key in key2: temp = set(key2) temp.remove(key) temp2 = sorted(temp) count = self.counts[tuple(temp2)] conf = float(count2) / count confidence_list.append((key, temp2, sup, conf)) self.sorted_confidence_list = sorted(confidence_list, key=lambda entry: entry[3], reverse=True) for key, temp2, sup, conf in self.sorted_confidence_list: if conf >= min_conf and sup > min_sup: conf = round(100. * conf, 1) sup = round(100. * sup, 0) result = ','.join(list(temp2)) print '[%s] => [%s] (Conf: %.1f%%, Supp: %.0f%%)' % (result, key, conf, sup) ARMalgo = MiningAlgorithm() ARMalgo.read_dataset(data_file) ARMalgo.association_rules_mining() ARMalgo.print_sorted_results()
{ "repo_name": "rjamorim/advdb-hw3", "path": "assoc.py", "copies": "1", "size": "6391", "license": "bsd-3-clause", "hash": 327617201536174200, "line_mean": 42.1824324324, "line_max": 117, "alpha_frac": 0.5629791895, "autogenerated": false, "ratio": 3.937769562538509, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5000748752038509, "avg_score": null, "num_lines": null }
"""Association testing for DIMSE-N services""" from io import BytesIO import queue import time import pytest from pydicom.dataset import Dataset from pydicom.tag import Tag from pydicom.uid import UID, ImplicitVRLittleEndian, ExplicitVRLittleEndian from pynetdicom import AE, debug_logger, evt from pynetdicom.dimse_primitives import ( N_EVENT_REPORT, N_GET, N_SET, N_ACTION, N_CREATE, N_DELETE ) from pynetdicom.dsutils import encode, decode from pynetdicom.sop_class import ( DisplaySystem, Verification, PrintJob, ModalityPerformedProcedureStepNotification, ModalityPerformedProcedureStepRetrieve, ModalityPerformedProcedureStep, ProceduralEventLogging, BasicFilmSession, BasicGrayscalePrintManagementMeta, BasicColorPrintManagementMeta, Printer, ) from pynetdicom.service_class import ServiceClass #debug_logger() class DummyDIMSE: def __init__(self): self.status = None def send_msg(self, req, context_id): self.req = req self.context_id = context_id def get_msg(self, block=False): return None, None class TestAssociationSendNEventReport: """Run tests on Assocation send_n_event_report.""" def setup(self): self.ae = None def teardown(self): """Clear any active threads""" if self.ae: self.ae.shutdown() def test_must_be_associated(self): """Test can't send without association.""" def handle(event): return 0x0000, Dataset() self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStepNotification) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_EVENT_REPORT, handle)] ) ae.add_requested_context(ModalityPerformedProcedureStepNotification) assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.release() assert not assoc.is_established with pytest.raises(RuntimeError): assoc.send_n_event_report(None, None, None, None) scp.shutdown() def test_no_abstract_syntax_match(self): """Test SCU when no accepted abstract syntax""" def handle(event): return 0x0000, Dataset() self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStepNotification) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_EVENT_REPORT, handle)] ) ae.add_requested_context(ModalityPerformedProcedureStepNotification) assoc = ae.associate('localhost', 11112) assert assoc.is_established msg = ( r"No presentation context for 'Verification SOP Class' has been " r"accepted by the peer for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc.send_n_event_report( None, None, Verification, None ) assoc.release() assert assoc.is_released scp.shutdown() def test_rq_bad_dataset_raises(self): """Test sending bad dataset raises exception.""" def handle(event): return 0x0000, Dataset() self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStepNotification) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_EVENT_REPORT, handle)] ) ae.add_requested_context( ModalityPerformedProcedureStepNotification, ExplicitVRLittleEndian ) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PerimeterValue = b'\x00\x01' msg = r"Unable to encode the supplied 'Event Information' dataset" with pytest.raises(ValueError, match=msg): assoc.send_n_event_report( ds, 1, ModalityPerformedProcedureStepNotification, '1.2.3' ) assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_none(self): """Test no response from peer""" def handle(event): time.sleep(5) return 0x0000, Dataset() self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 0.2 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStepNotification) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_EVENT_REPORT, handle)] ) ae.add_requested_context(ModalityPerformedProcedureStepNotification) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_event_report( ds, 1, ModalityPerformedProcedureStepNotification, '1.2.840.10008.5.1.1.40.1' ) assert status == Dataset() assert ds is None assert assoc.is_aborted scp.shutdown() def test_rsp_invalid(self): """Test invalid DIMSE message received from peer""" def handle(event): return 0x0000, Dataset() self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStepNotification) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_EVENT_REPORT, handle)] ) ae.add_requested_context(ModalityPerformedProcedureStepNotification) assoc = ae.associate('localhost', 11112) assert assoc.is_established class DummyResponse(): is_valid_response = False class DummyDIMSE(): msg_queue = queue.Queue() gotten = False def send_msg(*args, **kwargs): return def get_msg(self, *args, **kwargs): if not self.gotten: self.gotten = True return None, DummyResponse() return None, None assoc._reactor_checkpoint.clear() while not assoc._is_paused: time.sleep(0.01) assoc.dimse = DummyDIMSE() ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_event_report( ds, 1, ModalityPerformedProcedureStepNotification, '1.2.840.10008.5.1.1.40.1' ) assert status == Dataset() assert ds is None assert assoc.is_aborted scp.shutdown() def test_rsp_failure(self): """Test receiving a failure response from the peer""" def handle(event): return 0x0112, None self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStepNotification) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_EVENT_REPORT, handle)] ) ae.add_requested_context(ModalityPerformedProcedureStepNotification) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_event_report( ds, 1, ModalityPerformedProcedureStepNotification, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0x0112 assert ds is None assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_warning(self): """Test receiving a warning response from the peer""" def handle(event): return 0x0116, event.event_information self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStepNotification) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_EVENT_REPORT, handle)] ) ae.add_requested_context(ModalityPerformedProcedureStepNotification) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_event_report( ds, 1, ModalityPerformedProcedureStepNotification, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0x0116 assert ds.PatientName == 'Test^test' assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_success(self): """Test receiving a success response from the peer""" def handle(event): return 0x0000, event.event_information self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStepNotification) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_EVENT_REPORT, handle)] ) ae.add_requested_context(ModalityPerformedProcedureStepNotification) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_event_report( ds, 1, ModalityPerformedProcedureStepNotification, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0x0000 assert ds.PatientName == 'Test^test' assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_unknown_status(self): """Test unknown status value returned by peer""" def handle(event): return 0xFFF0, event.event_information self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStepNotification) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_EVENT_REPORT, handle)] ) ae.add_requested_context(ModalityPerformedProcedureStepNotification) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_event_report( ds, 1, ModalityPerformedProcedureStepNotification, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0xFFF0 assert ds is None assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_bad_dataset(self): """Test handler returns bad dataset""" def handle(event): def test(): pass return 0x0000, test self.ae = ae = AE() ae.add_requested_context(ModalityPerformedProcedureStepNotification) ae.add_supported_context(ModalityPerformedProcedureStepNotification) handlers = [(evt.EVT_N_EVENT_REPORT, handle)] scp = ae.start_server(('', 11112), evt_handlers=handlers, block=False) ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112) assert assoc.is_established # Event Information ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_event_report( ds, 1, ModalityPerformedProcedureStepNotification, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0x0110 assert ds is None assoc.release() scp.shutdown() def test_decode_failure(self): """Test being unable to decode received dataset""" def handle(event): def test(): pass return 0x0000, test self.ae = ae = AE() ae.add_requested_context( ModalityPerformedProcedureStepNotification, ExplicitVRLittleEndian ) ae.add_supported_context(ModalityPerformedProcedureStepNotification) handlers = [(evt.EVT_N_EVENT_REPORT, handle)] scp = ae.start_server(('', 11112), evt_handlers=handlers, block=False) ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112) class DummyReply(): def getvalue(self): def test(): pass return test class DummyMessage(): is_valid_response = True EventReply = DummyReply() Status = 0x0000 STATUS_OPTIONAL_KEYWORDS = [] class DummyDIMSE(): msg_queue = queue.Queue() gotten = False def send_msg(*args, **kwargs): return def get_msg(self, *args, **kwargs): if not self.gotten: self.gotten = True return 1, DummyMessage() return None, None assoc._reactor_checkpoint.clear() while not assoc._is_paused: time.sleep(0.01) assoc.dimse = DummyDIMSE() assert assoc.is_established # Event Information ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_event_report( ds, 1, ModalityPerformedProcedureStepNotification, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0x0110 assert ds is None assoc.release() scp.shutdown() def test_extra_status(self): """Test extra status elements are available.""" def handle(event): status = Dataset() status.Status = 0xFFF0 status.ErrorComment = 'Some comment' status.ErrorID = 12 status.AffectedSOPClassUID = '1.2.3' status.AffectedSOPInstanceUID = '1.2.3.4' return status, event.event_information self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStepNotification) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_EVENT_REPORT, handle)] ) ae.add_requested_context(ModalityPerformedProcedureStepNotification) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_event_report( ds, 1, ModalityPerformedProcedureStepNotification, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0xFFF0 assert status.ErrorComment == 'Some comment' assert status.ErrorID == 12 assert status.AffectedSOPClassUID == '1.2.3' assert status.AffectedSOPInstanceUID == '1.2.3.4' assert ds is None assoc.release() assert assoc.is_released scp.shutdown() def test_meta_uid(self): """Test using a Meta SOP Class""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicGrayscalePrintManagementMeta) ae.add_supported_context(Printer) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(BasicGrayscalePrintManagementMeta) ae.add_requested_context(Printer) assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.dimse = DummyDIMSE() ds = Dataset() ds.PatientName = 'Test^test' # Receives None, None from DummyDIMSE, aborts status, ds = assoc.send_n_event_report( ds, 1, Printer, '1.2.840.10008.5.1.1.40.1', meta_uid=BasicGrayscalePrintManagementMeta ) assert assoc.is_aborted scp.shutdown() assert assoc.dimse.req.AffectedSOPClassUID == Printer assert assoc.dimse.context_id == 1 assert assoc._accepted_cx[1].abstract_syntax == BasicGrayscalePrintManagementMeta def test_meta_uid_good(self): """Test sending a request using a Meta SOP Class.""" handler_data = [] def handle(event): handler_data.append(event) return 0x0000, event.event_information self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicGrayscalePrintManagementMeta) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_EVENT_REPORT, handle)] ) ae.add_requested_context(BasicGrayscalePrintManagementMeta) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_event_report( ds, 1, BasicFilmSession, '1.2.840.10008.5.1.1.40.1', meta_uid=BasicGrayscalePrintManagementMeta ) assert status.Status == 0x0000 assert ds.PatientName == 'Test^test' assoc.release() assert assoc.is_released scp.shutdown() req = handler_data[0].request cx = handler_data[0].context assert req.AffectedSOPClassUID == BasicFilmSession assert cx.abstract_syntax == BasicGrayscalePrintManagementMeta def test_meta_uid_bad(self): """Test sending a request using a Meta SOP Class.""" def handle(event): return 0x0000, event.event_information self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicGrayscalePrintManagementMeta) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_EVENT_REPORT, handle)] ) ae.add_requested_context(BasicGrayscalePrintManagementMeta) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' msg = ( r"No presentation context for 'Basic Color Print Management " r"Meta SOP Class' has been " r"accepted by the peer " r"for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc.send_n_event_report( ds, 1, BasicFilmSession, '1.2.840.10008.5.1.1.40.1', meta_uid=BasicColorPrintManagementMeta ) assoc.release() assert assoc.is_released scp.shutdown() class TestAssociationSendNGet: """Run tests on Assocation send_n_get.""" def setup(self): """Run prior to each test""" self.ae = None def teardown(self): """Clear any active threads""" if self.ae: self.ae.shutdown() def test_must_be_associated(self): """Test can't send without association.""" def handle(event): ds = Dataset() ds.PatientName = 'Test^test' return 0x0000, ds self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(DisplaySystem) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_GET, handle)] ) ae.add_requested_context(DisplaySystem) assoc = ae.associate('localhost', 11112) assoc.release() assert assoc.is_released assert not assoc.is_established with pytest.raises(RuntimeError): assoc.send_n_get(None, None, None) scp.shutdown() def test_no_abstract_syntax_match(self): """Test SCU when no accepted abstract syntax""" def handle(event): ds = Dataset() ds.PatientName = 'Test^test' return 0x0000, ds self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(DisplaySystem) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_GET, handle)] ) ae.add_requested_context(DisplaySystem) assoc = ae.associate('localhost', 11112) assert assoc.is_established msg = ( r"No presentation context for 'Verification SOP Class' has been " r"accepted by the peer " r"for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc.send_n_get(None, Verification, None) assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_none(self): """Test no response from peer""" def handle(event): ds = Dataset() ds.PatientName = 'Test^test' time.sleep(5) return 0x0000, ds self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 0.1 ae.network_timeout = 5 ae.add_supported_context(DisplaySystem) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_GET, handle)] ) ae.add_requested_context(DisplaySystem) assoc = ae.associate('localhost', 11112) assert assoc.is_established status, ds = assoc.send_n_get( [(0x7fe0,0x0010)], DisplaySystem, '1.2.840.10008.5.1.1.40.1' ) assert status == Dataset() assert ds is None assert assoc.is_aborted scp.shutdown() def test_rsp_invalid(self): """Test invalid DIMSE message received from peer""" def handle(event): ds = Dataset() ds.PatientName = 'Test^test' return 0x0000, ds self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(DisplaySystem) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_GET, handle)] ) ae.add_requested_context(DisplaySystem) assoc = ae.associate('localhost', 11112) assert assoc.is_established class DummyResponse(): is_valid_response = False class DummyDIMSE(): msg_queue = queue.Queue() gotten = False def send_msg(*args, **kwargs): return def get_msg(self, *args, **kwargs): if not self.gotten: self.gotten = True return None, DummyResponse() return None, None assoc._reactor_checkpoint.clear() while not assoc._is_paused: time.sleep(0.01) assoc.dimse = DummyDIMSE() assert assoc.is_established status, ds = assoc.send_n_get([(0x7fe0,0x0010)], DisplaySystem, '1.2.840.10008.5.1.1.40.1') assert status == Dataset() assert ds is None assert assoc.is_aborted scp.shutdown() def test_rsp_failure(self): """Test receiving a failure response from the peer""" def handle(event): return 0x0112, None self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(DisplaySystem) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_GET, handle)] ) ae.add_requested_context(DisplaySystem) assoc = ae.associate('localhost', 11112) assert assoc.is_established status, ds = assoc.send_n_get([(0x7fe0, 0x0010)], DisplaySystem, '1.2.840.10008.5.1.1.40.1') assert status.Status == 0x0112 assert ds is None assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_warning(self): """Test receiving a warning response from the peer""" def handle(event): ds = Dataset() ds.PatientName = 'Test' ds.SOPClassUID = DisplaySystem ds.SOPInstanceUID = '1.2.3.4' return 0x0116, ds self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(DisplaySystem) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_GET, handle)] ) ae.add_requested_context(DisplaySystem) assoc = ae.associate('localhost', 11112) assert assoc.is_established status, ds = assoc.send_n_get([(0x7fe0,0x0010)], DisplaySystem, '1.2.840.10008.5.1.1.40.1') assert status.Status == 0x0116 assert ds is not None assert isinstance(ds, Dataset) assert ds.PatientName == 'Test' assert ds.SOPClassUID == DisplaySystem assert ds.SOPInstanceUID == '1.2.3.4' assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_success(self): """Test receiving a success response from the peer""" def handle(event): ds = Dataset() ds.PatientName = 'Test' ds.SOPClassUID = DisplaySystem ds.SOPInstanceUID = '1.2.3.4' return 0x0000, ds self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(DisplaySystem) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_GET, handle)] ) ae.add_requested_context(DisplaySystem) assoc = ae.associate('localhost', 11112) assert assoc.is_established status, ds = assoc.send_n_get([(0x7fe0,0x0010)], DisplaySystem, '1.2.840.10008.5.1.1.40.1') assert status.Status == 0x0000 assert ds is not None assert isinstance(ds, Dataset) assert ds.PatientName == 'Test' assert ds.SOPClassUID == DisplaySystem assert ds.SOPInstanceUID == '1.2.3.4' assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_unknown_status(self): """Test unknown status value returned by peer""" def handle(event): ds = Dataset() ds.PatientName = 'Test' ds.SOPClassUID = DisplaySystem ds.SOPInstanceUID = '1.2.3.4' return 0xFFF0, ds self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(DisplaySystem) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_GET, handle)] ) ae.add_requested_context(DisplaySystem) assoc = ae.associate('localhost', 11112) assert assoc.is_established status, ds = assoc.send_n_get([(0x7fe0,0x0010)], DisplaySystem, '1.2.840.10008.5.1.1.40.1') assert status.Status == 0xFFF0 assert ds is None assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_bad_dataset(self): """Test handler returns bad dataset""" def handle(event): def test(): pass return 0x0000, test self.ae = ae = AE() ae.add_requested_context(ModalityPerformedProcedureStepRetrieve) ae.add_supported_context(ModalityPerformedProcedureStepRetrieve) handlers = [(evt.EVT_N_GET, handle)] scp = ae.start_server(('', 11112), evt_handlers=handlers, block=False) ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112) assert assoc.is_established # Event Information attrs = [0x00100010, 0x00100020] status, ds = assoc.send_n_get( attrs, ModalityPerformedProcedureStepRetrieve, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0x0110 assert ds is None assoc.release() scp.shutdown() def test_decode_failure(self): """Test bad dataset received from peer""" def handle(event): ds = Dataset() ds.PatientName = 'Test^test' return 0x0000, ds self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(DisplaySystem) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_GET, handle)] ) ae.add_requested_context(DisplaySystem) assoc = ae.associate('localhost', 11112) assert assoc.is_established class DummyReply(): def getvalue(self): def test(): pass return test class DummyMessage(): is_valid_response = True AttributeList = DummyReply() Status = 0x0000 STATUS_OPTIONAL_KEYWORDS = [] class DummyDIMSE(): msg_queue = queue.Queue() gotten = False def send_msg(*args, **kwargs): return def get_msg(self, *args, **kwargs): if not self.gotten: self.gotten = True return 1, DummyMessage() return None, None assoc._reactor_checkpoint.clear() while not assoc._is_paused: time.sleep(0.01) assoc.dimse = DummyDIMSE() assert assoc.is_established status, ds = assoc.send_n_get( [(0x7fe0,0x0010)], DisplaySystem, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0x0110 assert ds is None scp.shutdown() def test_extra_status(self): """Test extra status elements are available.""" def handle(event): ds = Dataset() ds.Status = 0xFFF0 ds.ErrorComment = 'Some comment' ds.ErrorID = 12 ds.AttributeIdentifierList = [0x00100020] return ds, None self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(DisplaySystem) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_GET, handle)] ) ae.add_requested_context(DisplaySystem) assoc = ae.associate('localhost', 11112) assert assoc.is_established status, ds = assoc.send_n_get([(0x7fe0,0x0010)], DisplaySystem, '1.2.840.10008.5.1.1.40.1') assert status.Status == 0xFFF0 assert status.ErrorComment == 'Some comment' assert status.ErrorID == 12 assert status.AttributeIdentifierList == [0x00100020] assert ds is None assoc.release() assert assoc.is_released scp.shutdown() def test_meta_uid(self): """Test using a Meta SOP Class""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicGrayscalePrintManagementMeta) ae.add_supported_context(Printer) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(BasicGrayscalePrintManagementMeta) ae.add_requested_context(Printer) assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.dimse = DummyDIMSE() ds = Dataset() ds.PatientName = 'Test^test' # Receives None, None from DummyDIMSE, aborts status, ds = assoc.send_n_get( [(0x00100010)], Printer, '1.2.840.10008.5.1.1.40.1', meta_uid=BasicGrayscalePrintManagementMeta ) assert assoc.is_aborted scp.shutdown() assert assoc.dimse.req.RequestedSOPClassUID == Printer assert assoc.dimse.context_id == 1 assert assoc._accepted_cx[1].abstract_syntax == BasicGrayscalePrintManagementMeta def test_meta_uid_good(self): """Test sending a request using a Meta SOP Class.""" handler_data = [] def handle(event): handler_data.append(event) ds = Dataset() ds.PatientName = 'Test' ds.SOPClassUID = DisplaySystem ds.SOPInstanceUID = '1.2.3.4' return 0x0000, ds self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicGrayscalePrintManagementMeta) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_GET, handle)] ) ae.add_requested_context(BasicGrayscalePrintManagementMeta) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_get( [(0x7fe0,0x0010)], DisplaySystem, '1.2.840.10008.5.1.1.40.1', meta_uid=BasicGrayscalePrintManagementMeta ) assert status.Status == 0x0000 assert ds is not None assert isinstance(ds, Dataset) assert ds.PatientName == 'Test' assert ds.SOPClassUID == DisplaySystem assert ds.SOPInstanceUID == '1.2.3.4' assoc.release() assert assoc.is_released scp.shutdown() req = handler_data[0].request cx = handler_data[0].context assert req.RequestedSOPClassUID == DisplaySystem assert cx.abstract_syntax == BasicGrayscalePrintManagementMeta def test_meta_uid_bad(self): """Test sending a request using a Meta SOP Class.""" def handle(event): ds = Dataset() ds.PatientName = 'Test' ds.SOPClassUID = DisplaySystem ds.SOPInstanceUID = '1.2.3.4' return 0x0000, ds self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicGrayscalePrintManagementMeta) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_GET, handle)] ) ae.add_requested_context(BasicGrayscalePrintManagementMeta) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' msg = ( r"No presentation context for 'Basic Color Print Management " r"Meta SOP Class' has been " r"accepted by the peer " r"for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc.send_n_get( [(0x7fe0,0x0010)], DisplaySystem, '1.2.840.10008.5.1.1.40.1', meta_uid=BasicColorPrintManagementMeta ) assoc.release() assert assoc.is_released scp.shutdown() class TestAssociationSendNSet: """Run tests on Assocation send_n_set.""" def setup(self): self.ae = None def teardown(self): """Clear any active threads""" if self.ae: self.ae.shutdown() def test_must_be_associated(self): """Test can't send without association.""" def handle(event): ds = Dataset() ds.PatientName = 'Test^test' return 0x0000, ds self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStep) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_SET, handle)] ) ae.add_requested_context(ModalityPerformedProcedureStep) assoc = ae.associate('localhost', 11112) assoc.release() assert assoc.is_released assert not assoc.is_established with pytest.raises(RuntimeError): assoc.send_n_set(None, None, None) scp.shutdown() def test_no_abstract_syntax_match(self): """Test SCU when no accepted abstract syntax""" def handle(event): ds = Dataset() ds.PatientName = 'Test^test' return 0x0000, ds self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStep) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_SET, handle)] ) ae.add_requested_context(ModalityPerformedProcedureStep) assoc = ae.associate('localhost', 11112) assert assoc.is_established msg = ( r"No presentation context for 'Verification SOP Class' has been " r"accepted by the peer " r"for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc.send_n_set(None, Verification, None) assoc.release() assert assoc.is_released scp.shutdown() def test_rq_bad_dataset_raises(self): """Test sending bad dataset raises exception.""" def handle(event): ds = Dataset() ds.PatientName = 'Test^test' return 0x0000, ds self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStep) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_SET, handle)] ) ae.add_requested_context( ModalityPerformedProcedureStep, ExplicitVRLittleEndian ) assoc = ae.associate('localhost', 11112) assert assoc.is_established mod_list = Dataset() mod_list.PerimeterValue = b'\x00\x01' msg = r"Failed to encode the supplied 'Modification List' dataset" with pytest.raises(ValueError, match=msg): assoc.send_n_set( mod_list, ModalityPerformedProcedureStep, '1.2.3' ) assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_none(self): """Test no response from peer""" def handle(event): ds = Dataset() ds.PatientName = 'Test^test' time.sleep(5) return 0x0000, ds self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 0.2 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStep) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_SET, handle)] ) ae.add_requested_context(ModalityPerformedProcedureStep) assoc = ae.associate('localhost', 11112) assert assoc.is_established mod_list = Dataset() mod_list.PatientName = 'Test^test' status, ds = assoc.send_n_set( mod_list, ModalityPerformedProcedureStep, '1.2.840.10008.5.1.1.40.1' ) assert status == Dataset() assert ds is None assert assoc.is_aborted scp.shutdown() def test_rsp_invalid(self): """Test invalid DIMSE message received from peer""" def handle(event): ds = Dataset() ds.PatientName = 'Test^test' return 0x0000, ds self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 0.4 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStep) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_SET, handle)] ) ae.add_requested_context(ModalityPerformedProcedureStep) assoc = ae.associate('localhost', 11112) assert assoc.is_established class DummyResponse(): is_valid_response = False class DummyDIMSE(): msg_queue = queue.Queue() gotten = False def send_msg(*args, **kwargs): return def get_msg(self, *args, **kwargs): if not self.gotten: self.gotten = True return None, DummyResponse() return None, None assoc._reactor_checkpoint.clear() while not assoc._is_paused: time.sleep(0.01) assoc.dimse = DummyDIMSE() mod_list = Dataset() mod_list.PatientName = 'Test^test' status, ds = assoc.send_n_set( mod_list, ModalityPerformedProcedureStep, '1.2.840.10008.5.1.1.40.1' ) assert status == Dataset() assert ds is None assert assoc.is_aborted scp.shutdown() def test_rsp_failure(self): """Test receiving a failure response from the peer""" def handle(event): return 0x0112, None self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStep) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_SET, handle)] ) ae.add_requested_context(ModalityPerformedProcedureStep) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_set( ds, ModalityPerformedProcedureStep, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0x0112 assert ds is None assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_warning(self): """Test receiving a warning response from the peer""" def handle(event): return 0x0116, event.modification_list self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStep) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_SET, handle)] ) ae.add_requested_context(ModalityPerformedProcedureStep) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_set( ds, ModalityPerformedProcedureStep, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0x0116 assert ds.PatientName == 'Test^test' assoc.release() assert assoc.is_released def test_rsp_success(self): """Test receiving a success response from the peer""" def handle(event): return 0x0000, event.modification_list self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStep) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_SET, handle)] ) ae.add_requested_context(ModalityPerformedProcedureStep) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_set( ds, ModalityPerformedProcedureStep, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0x0000 assert ds.PatientName == 'Test^test' assoc.release() assert assoc.is_released def test_rsp_unknown_status(self): """Test unknown status value returned by peer""" def handle(event): return 0xFFF0, event.modification_list self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStep) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_SET, handle)] ) ae.add_requested_context(ModalityPerformedProcedureStep) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_set( ds, ModalityPerformedProcedureStep, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0xFFF0 assert ds is None assoc.release() assert assoc.is_released def test_rsp_bad_dataset(self): """Test handler returns bad dataset""" def handle(event): def test(): pass return 0x0000, test self.ae = ae = AE() ae.add_requested_context(ModalityPerformedProcedureStep) ae.add_supported_context(ModalityPerformedProcedureStep) handlers = [(evt.EVT_N_SET, handle)] scp = ae.start_server(('', 11112), evt_handlers=handlers, block=False) ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112) assert assoc.is_established # Event Information ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_set( ds, ModalityPerformedProcedureStep, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0x0110 assert ds is None assoc.release() scp.shutdown() def test_decode_failure(self): """Test bad dataset received from peer""" def handle(event): ds = Dataset() ds.PatientName = 'Test^test' return 0x0000, ds self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 0.4 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStep) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_SET, handle)] ) ae.add_requested_context( ModalityPerformedProcedureStep, ExplicitVRLittleEndian ) assoc = ae.associate('localhost', 11112) assert assoc.is_established class DummyReply(): def getvalue(self): def test(): pass return test class DummyMessage(): is_valid_response = True AttributeList = DummyReply() Status = 0x0000 STATUS_OPTIONAL_KEYWORDS = [] class DummyDIMSE(): msg_queue = queue.Queue() gotten = False def send_msg(*args, **kwargs): return def get_msg(self, *args, **kwargs): if not self.gotten: self.gotten = True return 1, DummyMessage() return None, None assoc._reactor_checkpoint.clear() while not assoc._is_paused: time.sleep(0.01) assoc.dimse = DummyDIMSE() assert assoc.is_established mod_list = Dataset() mod_list.PatientName = 'Test^test' status, ds = assoc.send_n_set( mod_list, ModalityPerformedProcedureStep, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0x0110 assert ds is None scp.shutdown() def test_extra_status(self): """Test extra status elements are available.""" def handle(event): status = Dataset() status.Status = 0xFFF0 status.ErrorComment = 'Some comment' status.ErrorID = 12 status.AttributeIdentifierList = [0x00100010] return status, event.modification_list self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStep) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_SET, handle)] ) ae.add_requested_context(ModalityPerformedProcedureStep) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_set( ds, ModalityPerformedProcedureStep, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0xFFF0 assert status.ErrorComment == 'Some comment' assert status.ErrorID == 12 assert status.AttributeIdentifierList == Tag(0x00100010) assert ds is None assoc.release() assert assoc.is_released scp.shutdown() def test_meta_uid(self): """Test using a Meta SOP Class""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicGrayscalePrintManagementMeta) ae.add_supported_context(Printer) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(BasicGrayscalePrintManagementMeta) ae.add_requested_context(Printer) assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.dimse = DummyDIMSE() ds = Dataset() ds.PatientName = 'Test^test' # Receives None, None from DummyDIMSE, aborts status, ds = assoc.send_n_set( ds, Printer, '1.2.840.10008.5.1.1.40.1', meta_uid=BasicGrayscalePrintManagementMeta ) assert assoc.is_aborted scp.shutdown() assert assoc.dimse.req.RequestedSOPClassUID == Printer assert assoc.dimse.context_id == 1 assert assoc._accepted_cx[1].abstract_syntax == BasicGrayscalePrintManagementMeta def test_meta_uid_good(self): """Test sending a request using a Meta SOP Class.""" handler_data = [] def handle(event): handler_data.append(event) return 0x0000, event.modification_list self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicGrayscalePrintManagementMeta) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_SET, handle)] ) ae.add_requested_context(BasicGrayscalePrintManagementMeta) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_set( ds, ModalityPerformedProcedureStep, '1.2.840.10008.5.1.1.40.1', meta_uid=BasicGrayscalePrintManagementMeta ) assert status.Status == 0x0000 assert ds is not None assert isinstance(ds, Dataset) assert status.Status == 0x0000 assert ds.PatientName == 'Test^test' assoc.release() assert assoc.is_released scp.shutdown() req = handler_data[0].request cx = handler_data[0].context assert req.RequestedSOPClassUID == ModalityPerformedProcedureStep assert cx.abstract_syntax == BasicGrayscalePrintManagementMeta def test_meta_uid_bad(self): """Test sending a request using a Meta SOP Class.""" def handle(event): ds = Dataset() ds.PatientName = 'Test' ds.SOPClassUID = DisplaySystem ds.SOPInstanceUID = '1.2.3.4' return 0x0000, ds self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicGrayscalePrintManagementMeta) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_SET, handle)] ) ae.add_requested_context(BasicGrayscalePrintManagementMeta) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' msg = ( r"No presentation context for 'Basic Color Print Management " r"Meta SOP Class' has been " r"accepted by the peer " r"for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc.send_n_set( ds, ModalityPerformedProcedureStep, '1.2.840.10008.5.1.1.40.1', meta_uid=BasicColorPrintManagementMeta ) assoc.release() assert assoc.is_released scp.shutdown() class TestAssociationSendNAction: """Run tests on Assocation send_n_action.""" def setup(self): self.ae = None def teardown(self): """Clear any active threads""" if self.ae: self.ae.shutdown() def test_must_be_associated(self): """Test can't send without association.""" def handle(event): return 0x0000, event.action_information self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ProceduralEventLogging) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_ACTION, handle)] ) ae.add_requested_context(ProceduralEventLogging) assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.release() assert assoc.is_released assert not assoc.is_established with pytest.raises(RuntimeError): assoc.send_n_action(None, None, None, None) scp.shutdown() def test_no_abstract_syntax_match(self): """Test SCU when no accepted abstract syntax""" def handle(event): return 0x0000, event.action_information self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ProceduralEventLogging) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_ACTION, handle)] ) ae.add_requested_context(ProceduralEventLogging) assoc = ae.associate('localhost', 11112) assert assoc.is_established msg = ( r"No presentation context for 'Verification SOP Class' has been " r"accepted by the peer for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc.send_n_action(None, 1, Verification, None) assoc.release() assert assoc.is_released scp.shutdown() def test_rq_bad_dataset_raises(self): """Test sending bad dataset raises exception.""" def handle(event): return 0x0000, event.action_information self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ProceduralEventLogging) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_ACTION, handle)] ) ae.add_requested_context( ProceduralEventLogging, ExplicitVRLittleEndian ) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PerimeterValue = b'\x00\x01' msg = r"Failed to encode the supplied 'Action Information' dataset" with pytest.raises(ValueError, match=msg): assoc.send_n_action(ds, 1, ProceduralEventLogging, '1.2.3') assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_none(self): """Test no response from peer""" def handle(event): time.sleep(5) return 0x0000, event.action_information self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 0.2 ae.network_timeout = 5 ae.add_supported_context(ProceduralEventLogging) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_ACTION, handle)] ) ae.add_requested_context(ProceduralEventLogging) assoc = ae.associate('localhost', 11112) assert assoc.is_established class DummyDIMSE(): msg_queue = queue.Queue() def send_msg(*args, **kwargs): return def get_msg(*args, **kwargs): return None, None assoc._reactor_checkpoint.clear() while not assoc._is_paused: time.sleep(0.01) assoc.dimse = DummyDIMSE() ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_action( ds, 1, ProceduralEventLogging, '1.2.840.10008.5.1.1.40.1' ) assert status == Dataset() assert ds is None assert assoc.is_aborted scp.shutdown() def test_rsp_invalid(self): """Test invalid DIMSE message received from peer""" def handle(event): return 0x0000, event.action_information self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ProceduralEventLogging) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_ACTION, handle)] ) ae.add_requested_context(ProceduralEventLogging) assoc = ae.associate('localhost', 11112) assert assoc.is_established class DummyResponse(): is_valid_response = False class DummyDIMSE(): msg_queue = queue.Queue() gotten = False def send_msg(*args, **kwargs): return def get_msg(self, *args, **kwargs): if not self.gotten: self.gotten = True return None, DummyResponse() return None, None assoc._reactor_checkpoint.clear() while not assoc._is_paused: time.sleep(0.01) assoc.dimse = DummyDIMSE() ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_action( ds, 1, ProceduralEventLogging, '1.2.840.10008.5.1.1.40.1' ) assert status == Dataset() assert ds is None assert assoc.is_aborted scp.shutdown() def test_rsp_failure(self): """Test receiving a failure response from the peer""" def handle(event): return 0x0112, None self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ProceduralEventLogging) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_ACTION, handle)] ) ae.add_requested_context(ProceduralEventLogging) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_action( ds, 1, ProceduralEventLogging, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0x0112 assert ds is None assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_warning(self): """Test receiving a warning response from the peer""" def handle(event): return 0x0116, event.action_information self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ProceduralEventLogging) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_ACTION, handle)] ) ae.add_requested_context(ProceduralEventLogging) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_action( ds, 1, ProceduralEventLogging, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0x0116 assert ds.PatientName == 'Test^test' assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_success(self): """Test receiving a success response from the peer""" def handle(event): return 0x0000, event.action_information self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ProceduralEventLogging) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_ACTION, handle)] ) ae.add_requested_context(ProceduralEventLogging) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_action( ds, 1, ProceduralEventLogging, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0x0000 assert ds.PatientName == 'Test^test' assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_unknown_status(self): """Test unknown status value returned by peer""" def handle(event): return 0xFFF0, event.action_information self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ProceduralEventLogging) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_ACTION, handle)] ) ae.add_requested_context(ProceduralEventLogging) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_action( ds, 1, ProceduralEventLogging, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0xFFF0 assert ds is None assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_bad_dataset(self): """Test bad dataset received from peer""" def handle(event): return 0x0000, event.action_information self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PrintJob) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_ACTION, handle)] ) ae.add_requested_context(PrintJob) assoc = ae.associate('localhost', 11112) assert assoc.is_established class DummyReply(): def getvalue(self): def test(): pass return test class DummyMessage(): is_valid_response = True is_valid_request = False msg_type = None ActionReply = DummyReply() Status = 0x0000 STATUS_OPTIONAL_KEYWORDS = [] class DummyDIMSE(): msg_queue = queue.Queue() def send_msg(*args, **kwargs): return def get_msg(*args, **kwargs): rsp = DummyMessage() return 1, rsp assoc._reactor_checkpoint.clear() while not assoc._is_paused: time.sleep(0.01) assoc.dimse = DummyDIMSE() ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_action( ds, 1, PrintJob, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0x0110 assert ds is None scp.shutdown() def test_extra_status(self): """Test extra status elements are available.""" def handle(event): ds = Dataset() ds.Status = 0xFFF0 ds.ErrorComment = 'Some comment' ds.ErrorID = 12 return ds, event.action_information self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PrintJob) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_ACTION, handle)] ) ae.add_requested_context(PrintJob) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_action( ds, 1, PrintJob, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0xFFF0 assert status.ErrorComment == 'Some comment' assert status.ErrorID == 12 assert ds is None assoc.release() assert assoc.is_released scp.shutdown() def test_meta_uid(self): """Test using a Meta SOP Class""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicGrayscalePrintManagementMeta) ae.add_supported_context(Printer) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(BasicGrayscalePrintManagementMeta) ae.add_requested_context(Printer) assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.dimse = DummyDIMSE() ds = Dataset() ds.PatientName = 'Test^test' # Receives None, None from DummyDIMSE, aborts status, ds = assoc.send_n_action( ds, 1, Printer, '1.2.840.10008.5.1.1.40.1', meta_uid=BasicGrayscalePrintManagementMeta ) assert assoc.is_aborted scp.shutdown() assert assoc.dimse.req.RequestedSOPClassUID == Printer assert assoc.dimse.context_id == 1 assert assoc._accepted_cx[1].abstract_syntax == BasicGrayscalePrintManagementMeta def test_meta_uid_good(self): """Test sending a request using a Meta SOP Class.""" handler_data = [] def handle(event): handler_data.append(event) return 0x0000, event.action_information self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicGrayscalePrintManagementMeta) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_ACTION, handle)] ) ae.add_requested_context(BasicGrayscalePrintManagementMeta) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_action( ds, 1, ProceduralEventLogging, '1.2.840.10008.5.1.1.40.1', meta_uid=BasicGrayscalePrintManagementMeta ) assert status.Status == 0x0000 assert ds.PatientName == 'Test^test' assoc.release() assert assoc.is_released scp.shutdown() req = handler_data[0].request cx = handler_data[0].context assert req.RequestedSOPClassUID == ProceduralEventLogging assert cx.abstract_syntax == BasicGrayscalePrintManagementMeta def test_meta_uid_bad(self): """Test sending a request using a Meta SOP Class.""" def handle(event): ds = Dataset() ds.PatientName = 'Test' ds.SOPClassUID = DisplaySystem ds.SOPInstanceUID = '1.2.3.4' return 0x0000, ds self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicGrayscalePrintManagementMeta) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_ACTION, handle)] ) ae.add_requested_context(BasicGrayscalePrintManagementMeta) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' msg = ( r"No presentation context for 'Basic Color Print Management " r"Meta SOP Class' has been " r"accepted by the peer " r"for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc.send_n_action( ds, 1, ProceduralEventLogging, '1.2.840.10008.5.1.1.40.1', meta_uid=BasicColorPrintManagementMeta ) assoc.release() assert assoc.is_released scp.shutdown() class TestAssociationSendNCreate: """Run tests on Assocation send_n_create.""" def setup(self): self.ae = None def teardown(self): """Clear any active threads""" if self.ae: self.ae.shutdown() def test_must_be_associated(self): """Test can't send without association.""" def handle(event): return 0x0000, Dataset() self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStep) handlers = [(evt.EVT_N_CREATE, handle)] scp = ae.start_server(('', 11112), evt_handlers=handlers, block=False) ae.add_requested_context(ModalityPerformedProcedureStep) assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.release() assert assoc.is_released assert not assoc.is_established with pytest.raises(RuntimeError): assoc.send_n_create(None, None, None) scp.shutdown() def test_no_abstract_syntax_match(self): """Test SCU when no accepted abstract syntax""" def handle(event): return 0x0000, Dataset() self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStep) handlers = [(evt.EVT_N_CREATE, handle)] scp = ae.start_server(('', 11112), evt_handlers=handlers, block=False) ae.add_requested_context(ModalityPerformedProcedureStep) assoc = ae.associate('localhost', 11112) assert assoc.is_established msg = ( r"No presentation context for 'Verification SOP Class' has been " r"accepted by the peer for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc.send_n_create(None, Verification, None) assoc.release() assert assoc.is_released scp.shutdown() def test_rq_bad_dataset_raises(self): """Test sending bad dataset raises exception.""" def handle(event): return 0x0000, Dataset() self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStep) handlers = [(evt.EVT_N_CREATE, handle)] scp = ae.start_server(('', 11112), evt_handlers=handlers, block=False) ae.add_requested_context( ModalityPerformedProcedureStep, ExplicitVRLittleEndian ) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PerimeterValue = b'\x00\x01' msg = r"Failed to encode the supplied 'Attribute List' dataset" with pytest.raises(ValueError, match=msg): assoc.send_n_create( ds, ModalityPerformedProcedureStep, '1.2.3' ) assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_none(self): """Test no response from peer""" def handle(event): time.sleep(5) return 0x0000, Dataset() self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 0.2 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStep) handlers = [(evt.EVT_N_CREATE, handle)] scp = ae.start_server(('', 11112), evt_handlers=handlers, block=False) ae.add_requested_context(ModalityPerformedProcedureStep) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_create( ds, ModalityPerformedProcedureStep, '1.2.840.10008.5.1.1.40.1' ) assert status == Dataset() assert ds is None assert assoc.is_aborted scp.shutdown() def test_rsp_invalid(self): """Test invalid DIMSE message received from peer""" def handle(event): return 0x0000, Dataset() self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStep) handlers = [(evt.EVT_N_CREATE, handle)] scp = ae.start_server(('', 11112), evt_handlers=handlers, block=False) ae.add_requested_context(ModalityPerformedProcedureStep) assoc = ae.associate('localhost', 11112) assert assoc.is_established class DummyResponse(): is_valid_response = False class DummyDIMSE(): msg_queue = queue.Queue() gotten = False def send_msg(*args, **kwargs): return def get_msg(self, *args, **kwargs): if not self.gotten: self.gotten = True return None, DummyResponse() return None, None assoc._reactor_checkpoint.clear() while not assoc._is_paused: time.sleep(0.01) assoc.dimse = DummyDIMSE() ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_create( ds, ModalityPerformedProcedureStep, '1.2.840.10008.5.1.1.40.1' ) assert status == Dataset() assert ds is None assert assoc.is_aborted scp.shutdown() def test_rsp_failure(self): """Test receiving a failure response from the peer""" def handle(event): return 0x0112, Dataset() self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStep) handlers = [(evt.EVT_N_CREATE, handle)] scp = ae.start_server(('', 11112), evt_handlers=handlers, block=False) ae.add_requested_context(ModalityPerformedProcedureStep) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_create( ds, ModalityPerformedProcedureStep, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0x0112 assert ds is None assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_warning(self): """Test receiving a warning response from the peer""" def handle(event): return 0x0116, event.attribute_list self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStep) handlers = [(evt.EVT_N_CREATE, handle)] scp = ae.start_server(('', 11112), evt_handlers=handlers, block=False) ae.add_requested_context(ModalityPerformedProcedureStep) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_create( ds, ModalityPerformedProcedureStep, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0x0116 assert ds.PatientName == 'Test^test' assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_success(self): """Test receiving a success response from the peer""" def handle(event): return 0x0000, event.attribute_list self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStep) handlers = [(evt.EVT_N_CREATE, handle)] scp = ae.start_server(('', 11112), evt_handlers=handlers, block=False) ae.add_requested_context(ModalityPerformedProcedureStep) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_create( ds, ModalityPerformedProcedureStep, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0x0000 assert ds.PatientName == 'Test^test' assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_unknown_status(self): """Test unknown status value returned by peer""" def handle(event): return 0xFFF0, event.attribute_list self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStep) handlers = [(evt.EVT_N_CREATE, handle)] scp = ae.start_server(('', 11112), evt_handlers=handlers, block=False) ae.add_requested_context(ModalityPerformedProcedureStep) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_create( ds, ModalityPerformedProcedureStep, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0xFFF0 assert ds is None assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_bad_dataset(self): """Test handler returns bad dataset""" def handle(event): def test(): pass return 0x0000, test self.ae = ae = AE() ae.add_requested_context(ModalityPerformedProcedureStep) ae.add_supported_context(ModalityPerformedProcedureStep) handlers = [(evt.EVT_N_CREATE, handle)] scp = ae.start_server(('', 11112), evt_handlers=handlers, block=False) ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112) assert assoc.is_established # Event Information ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_create( ds, ModalityPerformedProcedureStep, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0x0110 assert ds is None assoc.release() scp.shutdown() def test_decode_failure(self): """Test bad dataset received from peer""" def handle(event): return 0x0000, event.attribute_list self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStep) handlers = [(evt.EVT_N_CREATE, handle)] scp = ae.start_server(('', 11112), evt_handlers=handlers, block=False) ae.add_requested_context(ModalityPerformedProcedureStep) assoc = ae.associate('localhost', 11112) assert assoc.is_established class DummyReply(): def getvalue(self): def test(): pass return test class DummyMessage(): is_valid_response = True is_valid_request = False AttributeList = DummyReply() Status = 0x0000 STATUS_OPTIONAL_KEYWORDS = [] class DummyDIMSE(): msg_queue = queue.Queue() gotten = False def send_msg(*args, **kwargs): return def get_msg(self, *args, **kwargs): if not self.gotten: self.gotten = True return 1, DummyMessage() return None, None assoc._reactor_checkpoint.clear() while not assoc._is_paused: time.sleep(0.01) assoc.dimse = DummyDIMSE() ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_create( ds, ModalityPerformedProcedureStep, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0x0110 assert ds is None assoc.release() assert assoc.is_released scp.shutdown() def test_extra_status(self): """Test extra status elements are available.""" def handle(event): status = Dataset() status.Status = 0xFFF0 status.ErrorComment = 'Some comment' status.ErrorID = 12 return status, event.attribute_list self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(ModalityPerformedProcedureStep) handlers = [(evt.EVT_N_CREATE, handle)] scp = ae.start_server(('', 11112), evt_handlers=handlers, block=False) ae.add_requested_context(ModalityPerformedProcedureStep) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_create( ds, ModalityPerformedProcedureStep, '1.2.840.10008.5.1.1.40.1' ) assert status.Status == 0xFFF0 assert status.ErrorComment == 'Some comment' assert status.ErrorID == 12 assert ds is None assoc.release() assert assoc.is_released scp.shutdown() def test_meta_uid(self): """Test using a Meta SOP Class""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicGrayscalePrintManagementMeta) ae.add_supported_context(Printer) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(BasicGrayscalePrintManagementMeta) ae.add_requested_context(Printer) assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.dimse = DummyDIMSE() ds = Dataset() ds.PatientName = 'Test^test' # Receives None, None from DummyDIMSE, aborts status, ds = assoc.send_n_create( ds, Printer, '1.2.840.10008.5.1.1.40.1', meta_uid=BasicGrayscalePrintManagementMeta ) assert assoc.is_aborted scp.shutdown() assert assoc.dimse.req.AffectedSOPClassUID == Printer assert assoc.dimse.context_id == 1 assert assoc._accepted_cx[1].abstract_syntax == BasicGrayscalePrintManagementMeta def test_meta_uid_good(self): """Test sending a request using a Meta SOP Class.""" handler_data = [] def handle(event): handler_data.append(event) return 0x0000, event.attribute_list self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicGrayscalePrintManagementMeta) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_CREATE, handle)] ) ae.add_requested_context(BasicGrayscalePrintManagementMeta) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' status, ds = assoc.send_n_create( ds, ModalityPerformedProcedureStep, '1.2.840.10008.5.1.1.40.1', meta_uid=BasicGrayscalePrintManagementMeta ) assert status.Status == 0x0000 assert ds.PatientName == 'Test^test' assoc.release() assert assoc.is_released scp.shutdown() req = handler_data[0].request cx = handler_data[0].context assert req.AffectedSOPClassUID == ModalityPerformedProcedureStep assert cx.abstract_syntax == BasicGrayscalePrintManagementMeta def test_meta_uid_bad(self): """Test sending a request using a Meta SOP Class.""" def handle(event): ds = Dataset() ds.PatientName = 'Test' ds.SOPClassUID = DisplaySystem ds.SOPInstanceUID = '1.2.3.4' return 0x0000, ds self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicGrayscalePrintManagementMeta) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_CREATE, handle)] ) ae.add_requested_context(BasicGrayscalePrintManagementMeta) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' msg = ( r"No presentation context for 'Basic Color Print Management " r"Meta SOP Class' has been " r"accepted by the peer " r"for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc.send_n_create( ds, ModalityPerformedProcedureStep, '1.2.840.10008.5.1.1.40.1', meta_uid=BasicColorPrintManagementMeta ) assoc.release() assert assoc.is_released scp.shutdown() class TestAssociationSendNDelete: """Run tests on Assocation send_n_delete.""" def setup(self): self.ae = None def teardown(self): """Clear any active threads""" if self.ae: self.ae.shutdown() def test_must_be_associated(self): """Test can't send without association.""" def handle(event): return 0x0000 self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicFilmSession) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_DELETE, handle)] ) ae.add_requested_context(BasicFilmSession) assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.release() assert assoc.is_released assert not assoc.is_established with pytest.raises(RuntimeError): assoc.send_n_delete(None, None) scp.shutdown() def test_no_abstract_syntax_match(self): """Test SCU when no accepted abstract syntax""" def handle(event): return 0x0000 self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicFilmSession) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_DELETE, handle)] ) ae.add_requested_context(BasicFilmSession) assoc = ae.associate('localhost', 11112) assert assoc.is_established msg = ( r"No presentation context for 'Verification SOP Class' has been " r"accepted by the peer for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc.send_n_delete(Verification, None) assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_none(self): """Test no response from peer""" def handle(event): time.sleep(5) return 0x0000 self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 0.2 ae.network_timeout = 5 ae.add_supported_context(BasicFilmSession) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_DELETE, handle)] ) ae.add_requested_context(BasicFilmSession) assoc = ae.associate('localhost', 11112) assert assoc.is_established status = assoc.send_n_delete(BasicFilmSession, '1.2.840.10008.5.1.1.40.1') assert status == Dataset() assert assoc.is_aborted scp.shutdown() def test_rsp_invalid(self): """Test invalid DIMSE message received from peer""" def handle(event): return 0x0000 self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicFilmSession) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_DELETE, handle)] ) ae.add_requested_context(BasicFilmSession) assoc = ae.associate('localhost', 11112) assert assoc.is_established class DummyResponse(): is_valid_response = False class DummyDIMSE(): msg_queue = queue.Queue() gotten = False def send_msg(*args, **kwargs): return def get_msg(self, *args, **kwargs): if not self.gotten: self.gotten = True return None, DummyResponse() return None, None assoc._reactor_checkpoint.clear() while not assoc._is_paused: time.sleep(0.01) assoc.dimse = DummyDIMSE() status = assoc.send_n_delete(BasicFilmSession, '1.2.840.10008.5.1.1.40.1') assert status == Dataset() assert assoc.is_aborted scp.shutdown() def test_rsp_failure(self): """Test receiving a failure response from the peer""" def handle(event): return 0x0112 self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicFilmSession) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_DELETE, handle)] ) ae.add_requested_context(BasicFilmSession) assoc = ae.associate('localhost', 11112) assert assoc.is_established status = assoc.send_n_delete(BasicFilmSession, '1.2.840.10008.5.1.1.40.1') assert status.Status == 0x0112 assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_success(self): """Test receiving a success response from the peer""" def handle(event): return 0x0000 self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicFilmSession) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_DELETE, handle)] ) ae.add_requested_context(BasicFilmSession) assoc = ae.associate('localhost', 11112) assert assoc.is_established status = assoc.send_n_delete(BasicFilmSession, '1.2.840.10008.5.1.1.40.1') assert status.Status == 0x0000 assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_unknown_status(self): """Test unknown status value returned by peer""" def handle(event): return 0xFFF0 self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicFilmSession) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_DELETE, handle)] ) ae.add_requested_context(BasicFilmSession) assoc = ae.associate('localhost', 11112) assert assoc.is_established status = assoc.send_n_delete(BasicFilmSession, '1.2.840.10008.5.1.1.40.1') assert status.Status == 0xFFF0 assoc.release() assert assoc.is_released scp.shutdown() def test_extra_status(self): """Test extra status elements are available.""" def handle(event): ds = Dataset() ds.Status = 0xFFF0 ds.ErrorComment = 'Some comment' ds.ErrorID = 12 return ds self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicFilmSession) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_DELETE, handle)] ) ae.add_requested_context(BasicFilmSession) assoc = ae.associate('localhost', 11112) assert assoc.is_established status = assoc.send_n_delete(BasicFilmSession, '1.2.840.10008.5.1.1.40.1') assert status.Status == 0xFFF0 assert status.ErrorComment == 'Some comment' assert status.ErrorID == 12 assoc.release() assert assoc.is_released scp.shutdown() def test_meta_uid(self): """Test using a Meta SOP Class""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicGrayscalePrintManagementMeta) ae.add_supported_context(Printer) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(BasicGrayscalePrintManagementMeta) ae.add_requested_context(Printer) assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.dimse = DummyDIMSE() ds = Dataset() ds.PatientName = 'Test^test' # Receives None, None from DummyDIMSE, aborts status = assoc.send_n_delete( Printer, '1.2.840.10008.5.1.1.40.1', meta_uid=BasicGrayscalePrintManagementMeta ) assert assoc.is_aborted scp.shutdown() assert assoc.dimse.req.RequestedSOPClassUID == Printer assert assoc.dimse.context_id == 1 assert assoc._accepted_cx[1].abstract_syntax == BasicGrayscalePrintManagementMeta def test_meta_uid_good(self): """Test sending a request using a Meta SOP Class.""" handler_data = [] def handle(event): handler_data.append(event) return 0x0000 self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicGrayscalePrintManagementMeta) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_DELETE, handle)] ) ae.add_requested_context(BasicGrayscalePrintManagementMeta) assoc = ae.associate('localhost', 11112) assert assoc.is_established status = assoc.send_n_delete( BasicFilmSession, '1.2.840.10008.5.1.1.40.1', meta_uid=BasicGrayscalePrintManagementMeta ) assert status.Status == 0x0000 assoc.release() assert assoc.is_released scp.shutdown() req = handler_data[0].request cx = handler_data[0].context assert req.RequestedSOPClassUID == BasicFilmSession assert cx.abstract_syntax == BasicGrayscalePrintManagementMeta def test_meta_uid_bad(self): """Test sending a request using a Meta SOP Class.""" def handle(event): ds = Dataset() ds.PatientName = 'Test' ds.SOPClassUID = DisplaySystem ds.SOPInstanceUID = '1.2.3.4' return 0x0000, ds self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(BasicGrayscalePrintManagementMeta) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_N_DELETE, handle)] ) ae.add_requested_context(BasicGrayscalePrintManagementMeta) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.PatientName = 'Test^test' msg = ( r"No presentation context for 'Basic Color Print Management " r"Meta SOP Class' has been " r"accepted by the peer " r"for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc.send_n_delete( BasicFilmSession, '1.2.840.10008.5.1.1.40.1', meta_uid=BasicColorPrintManagementMeta ) assoc.release() assert assoc.is_released scp.shutdown()
{ "repo_name": "scaramallion/pynetdicom", "path": "pynetdicom/tests/test_assoc_n.py", "copies": "1", "size": "98607", "license": "mit", "hash": 3944409440555189000, "line_mean": 30.1652970923, "line_max": 89, "alpha_frac": 0.5676777511, "autogenerated": false, "ratio": 3.851384603366793, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.49190623544667933, "avg_score": null, "num_lines": null }
"""Association testing""" try: import ctypes HAVE_CTYPES = True except ImportError: HAVE_CTYPES = False from datetime import datetime from io import BytesIO import logging import os from pathlib import Path import queue import socket import sys import time import threading import pytest from pydicom import dcmread from pydicom.dataset import Dataset, FileMetaDataset from pydicom.uid import ( UID, ImplicitVRLittleEndian, ExplicitVRLittleEndian, JPEGBaseline8Bit, JPEG2000, JPEG2000Lossless, DeflatedExplicitVRLittleEndian, ExplicitVRBigEndian ) import pynetdicom from pynetdicom import ( AE, VerificationPresentationContexts, build_context, evt, _config, debug_logger, build_role ) from pynetdicom.association import Association from pynetdicom.dimse_primitives import C_STORE, C_FIND, C_GET, C_MOVE from pynetdicom.dsutils import encode, decode from pynetdicom.events import Event from pynetdicom._globals import MODE_REQUESTOR, MODE_ACCEPTOR from pynetdicom.pdu_primitives import ( UserIdentityNegotiation, SOPClassExtendedNegotiation, SOPClassCommonExtendedNegotiation, SCP_SCU_RoleSelectionNegotiation, AsynchronousOperationsWindowNegotiation, A_ASSOCIATE ) from pynetdicom.sop_class import ( Verification, CTImageStorage, MRImageStorage, RTImageStorage, PatientRootQueryRetrieveInformationModelFind, PatientRootQueryRetrieveInformationModelGet, PatientRootQueryRetrieveInformationModelMove, PatientStudyOnlyQueryRetrieveInformationModelMove, StudyRootQueryRetrieveInformationModelMove, SecondaryCaptureImageStorage, UnifiedProcedureStepPull, UnifiedProcedureStepPush, UnifiedProcedureStepWatch ) from .hide_modules import hide_modules #debug_logger() ON_WINDOWS = sys.platform == "win32" TEST_DS_DIR = os.path.join(os.path.dirname(__file__), 'dicom_files') BIG_DATASET = dcmread(os.path.join(TEST_DS_DIR, 'RTImageStorage.dcm')) # 2.1 M DATASET_PATH = os.path.join(TEST_DS_DIR, 'CTImageStorage.dcm') BAD_DATASET_PATH = os.path.join(TEST_DS_DIR, 'CTImageStorage_bad_meta.dcm') DATASET = dcmread(DATASET_PATH) # JPEG2000Lossless COMP_DATASET = dcmread( os.path.join(TEST_DS_DIR, 'MRImageStorage_JPG2000_Lossless.dcm') ) # DeflatedExplicitVRLittleEndian DEFL_DATASET = dcmread( os.path.join(TEST_DS_DIR, 'SCImageStorage_Deflated.dcm') ) @pytest.fixture() def enable_unrestricted(): _config.UNRESTRICTED_STORAGE_SERVICE = True yield _config.UNRESTRICTED_STORAGE_SERVICE = False class DummyDIMSE: def __init__(self): self.status = None self.msg_queue = queue.Queue() def send_msg(self, rsp, context_id): self.status = rsp.Status self.rsp = rsp def get_msg(self, block=False): return None, None class TestAssociation: """Run tests on Associtation.""" def setup(self): """This function runs prior to all test methods""" self.ae = None def teardown(self): """This function runs after all test methods""" if self.ae: self.ae.shutdown() def test_bad_connection(self): """Test connect to non-AE""" # sometimes causes hangs in Travis ae = AE() ae.add_requested_context(Verification) ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 assoc = ae.associate('localhost', 22) assert not assoc.is_established def test_connection_refused(self): """Test connection refused""" ae = AE() ae.add_requested_context(Verification) ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 assoc = ae.associate('localhost', 11120) assert not assoc.is_established def test_req_no_presentation_context(self): """Test rejection due to no acceptable presentation contexts""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(CTImageStorage) assoc = ae.associate('localhost', 11112) assert not assoc.is_established assert assoc.is_aborted scp.shutdown() def test_peer_releases_assoc(self): """Test peer releases association""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(Verification) assoc = ae.associate('localhost', 11112) assert assoc.is_established scp.active_associations[0].release() assert assoc.is_released assert not assoc.is_established scp.shutdown() def test_peer_aborts_assoc(self): """Test peer aborts association.""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(Verification) assoc = ae.associate('localhost', 11112) assert assoc.is_established scp.active_associations[0].abort() assert assoc.is_aborted assert not assoc.is_established scp.shutdown() def test_peer_rejects_assoc(self): """Test peer rejects assoc""" self.ae = ae = AE() ae.require_calling_aet = ['HAHA NOPE'] ae.add_supported_context(Verification) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(Verification) ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112) time.sleep(0.1) assert assoc.is_rejected assert not assoc.is_established scp.shutdown() def test_assoc_release(self): """Test Association release""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) scp = ae.start_server(('', 11112), block=False) # Simple release ae.add_requested_context(Verification) assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.release() assert assoc.is_released assert not assoc.is_established # Simple release, then release again assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.release() assert assoc.is_released assert not assoc.is_established assert assoc.is_released assoc.release() assert assoc.is_released # Simple release, then abort assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.release() assert assoc.is_released assert assoc.is_released assert not assoc.is_established assoc.abort() assert not assoc.is_aborted scp.shutdown() def test_assoc_abort(self): """Test Association abort""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) scp = ae.start_server(('', 11112), block=False) # Simple abort ae.add_requested_context(Verification) assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.abort() assert not assoc.is_established assert assoc.is_aborted # Simple abort, then release assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.abort() assert not assoc.is_established assert assoc.is_aborted assoc.release() assert assoc.is_aborted assert not assoc.is_released # Simple abort, then abort again assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.abort() assert assoc.is_aborted assert not assoc.is_established assoc.abort() scp.shutdown() def test_scp_removed_ui(self): """Test SCP removes UI negotiation""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) scp = ae.start_server(('', 11112), block=False) ui = UserIdentityNegotiation() ui.user_identity_type = 0x01 ui.primary_field = b'pynetdicom' ae.add_requested_context(Verification) assoc = ae.associate('localhost', 11112, ext_neg=[ui]) assert assoc.is_established assoc.release() assert assoc.is_released scp.shutdown() def test_scp_removed_ext_neg(self): """Test SCP removes ex negotiation""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) scp = ae.start_server(('', 11112), block=False) ext = SOPClassExtendedNegotiation() ext.sop_class_uid = '1.1.1.1' ext.service_class_application_information = b'\x01\x02' ae.add_requested_context(Verification) assoc = ae.associate('localhost', 11112, ext_neg=[ext]) assert assoc.is_established assoc.release() assert assoc.is_released scp.shutdown() def test_scp_removed_com_ext_neg(self): """Test SCP removes common ext negotiation""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) scp = ae.start_server(('', 11112), block=False) ext = SOPClassCommonExtendedNegotiation() ext.related_general_sop_class_identification = ['1.2.1'] ext.sop_class_uid = '1.1.1.1' ext.service_class_uid = '1.1.3' ae.add_requested_context(Verification) assoc = ae.associate('localhost', 11112, ext_neg=[ext]) assert assoc.is_established assoc.release() assert assoc.is_released scp.shutdown() def test_scp_assoc_limit(self): """Test SCP limits associations""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.maximum_associations = 1 ae.add_supported_context(Verification) scp = ae.start_server(('', 11112), block=False) ae = AE() ae.add_requested_context(Verification) assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc_2 = ae.associate('localhost', 11112) assert not assoc_2.is_established assoc.release() assert assoc.is_released scp.shutdown() def test_require_called_aet(self): """SCP requires matching called AET""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) ae.require_called_aet = True scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(Verification) assoc = ae.associate('localhost', 11112) assert not assoc.is_established assert assoc.is_rejected scp.shutdown() def test_require_calling_aet(self): """SCP requires matching called AET""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) ae.require_calling_aet = ['TESTSCP'] scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(Verification) assoc = ae.associate('localhost', 11112) assert not assoc.is_established assert assoc.is_rejected scp.shutdown() def test_dimse_timeout(self): """Test that the DIMSE timeout works""" def handle(event): time.sleep(0.2) return 0x0000 self.ae = ae = AE() ae.add_supported_context(Verification) ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.dimse_timeout = 0.1 scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_ECHO, handle)] ) ae.add_requested_context(Verification) assoc = ae.associate('localhost', 11112) assert assoc.dimse_timeout == 0.1 assert assoc.dimse.dimse_timeout == 0.1 assert assoc.is_established assoc.send_c_echo() assoc.release() assert not assoc.is_released assert assoc.is_aborted scp.shutdown() def test_multiple_association_release_cycles(self): """Test repeatedly associating and releasing""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(Verification) for ii in range(10): assoc = ae.associate('localhost', 11112) assert assoc.is_established assert not assoc.is_released assoc.send_c_echo() assoc.release() assert assoc.is_released assert not assoc.is_established scp.shutdown() def test_local(self): """Test Association.local.""" ae = AE() assoc = Association(ae, 'requestor') assoc.requestor.ae_title = ae.ae_title assert assoc.local['ae_title'] == 'PYNETDICOM' assoc = Association(ae, 'acceptor') assoc.acceptor.ae_title = ae.ae_title assert assoc.local['ae_title'] == 'PYNETDICOM' def test_remote(self): """Test Association.local.""" ae = AE() assoc = Association(ae, 'requestor') assert assoc.remote['ae_title'] == '' assoc = Association(ae, 'acceptor') assert assoc.remote['ae_title'] == '' def test_mode_raises(self): """Test exception is raised if invalid mode.""" msg = ( r"Invalid association `mode` value, must be either 'requestor' or " "'acceptor'" ) with pytest.raises(ValueError, match=msg): assoc = Association(None, 'nope') def test_setting_socket_override_raises(self): """Test that set_socket raises exception if socket set.""" ae = AE() assoc = Association(ae, MODE_REQUESTOR) assoc.dul.socket = 'abc' msg = r"The Association already has a socket set" with pytest.raises(RuntimeError, match=msg): assoc.set_socket('cba') assert assoc.dul.socket == 'abc' def test_invalid_context(self, caplog): """Test receiving an message with invalid context ID""" with caplog.at_level(logging.INFO, logger='pynetdicom'): ae = AE() ae.add_requested_context(Verification) ae.add_requested_context(CTImageStorage) ae.add_supported_context(Verification) scp = ae.start_server(('', 11112), block=False) assoc = ae.associate('localhost', 11112) assoc.dimse_timeout = 0.1 assert assoc.is_established assoc._accepted_cx[3] = assoc._rejected_cx[0] assoc._accepted_cx[3].result = 0x00 assoc._accepted_cx[3]._as_scu = True assoc._accepted_cx[3]._as_scp = True ds = Dataset() ds.SOPClassUID = CTImageStorage ds.SOPInstanceUID = '1.2.3.4' ds.file_meta = FileMetaDataset() ds.file_meta.TransferSyntaxUID = ImplicitVRLittleEndian result = assoc.send_c_store(ds) time.sleep(0.1) assert assoc.is_aborted assert ( 'Received DIMSE message with invalid or rejected context ID' ) in caplog.text scp.shutdown() def test_get_events(self): """Test Association.get_events().""" ae = AE() ae.add_requested_context(Verification) assoc = ae.associate('localhost', 11112) assert evt.EVT_C_STORE in assoc.get_events() assert evt.EVT_USER_ID in assoc.get_events() def test_requested_handler_abort(self): """Test the EVT_REQUESTED handler sending abort.""" def handle_req(event): event.assoc.acse.send_abort(0x00) time.sleep(0.1) self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) hh = [(evt.EVT_REQUESTED, handle_req)] scp = ae.start_server(('', 11112), block=False, evt_handlers=hh) ae.add_requested_context(Verification) assoc = ae.associate('localhost', 11112) assert not assoc.is_established assert assoc.is_aborted scp.shutdown() def test_requested_handler_reject(self): """Test the EVT_REQUESTED handler sending reject.""" def handle_req(event): event.assoc.acse.send_reject(0x02, 0x01, 0x01) # Give the requestor time to process the message before killing # the connection time.sleep(0.1) self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) hh = [(evt.EVT_REQUESTED, handle_req)] scp = ae.start_server(('', 11112), block=False, evt_handlers=hh) ae.add_requested_context(Verification) assoc = ae.associate('localhost', 11112) assert not assoc.is_established assert assoc.is_rejected scp.shutdown() def test_unknown_abort_source(self): """Test an unknown abort source handled correctly #561""" def handle_req(event): pdu = b"\x07\x00\x00\x00\x00\x04\x00\x00\x01\x00" event.assoc.dul.socket.send(pdu) # Give the requestor time to process the message before killing # the connection time.sleep(0.1) self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) hh = [(evt.EVT_REQUESTED, handle_req)] scp = ae.start_server(('', 11112), block=False, evt_handlers=hh) ae.add_requested_context(Verification) assoc = ae.associate('localhost', 11112) assert not assoc.is_established assert assoc.is_aborted scp.shutdown() class TestCStoreSCP: """Tests for Association._c_store_scp().""" # Used with C-GET (always) and C-MOVE (over the same association) def setup(self): self.ae = None def teardown(self): if self.ae: self.ae.shutdown() def test_no_context(self): """Test correct response if no valid presentation context.""" def handle(event): return 0x0000 self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage) ae.add_supported_context(RTImageStorage) # Storage SCP scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_STORE, handle)] ) ae.add_requested_context(RTImageStorage) role = build_role(CTImageStorage, scu_role=False, scp_role=True) assoc = ae.associate('localhost', 11112, ext_neg=[role]) assert assoc.is_established req = C_STORE() req.MessageID = 1 req.AffectedSOPClassUID = DATASET.SOPClassUID req.AffectedSOPInstanceUID = DATASET.SOPInstanceUID req.Priority = 1 req._context_id = 1 bytestream = encode(DATASET, True, True) req.DataSet = BytesIO(bytestream) assoc.dimse = DummyDIMSE() assoc._c_store_scp(req) assert assoc.dimse.status == 0x0122 assoc.release() assert assoc.is_released scp.shutdown() def test_handler_exception(self): """Test correct response if exception raised by handler.""" def handle(event): raise ValueError() return 0x0000 self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage, scp_role=True, scu_role=True) # Storage SCP scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(CTImageStorage) role = build_role(CTImageStorage, scu_role=False, scp_role=True) assoc = ae.associate( 'localhost', 11112, ext_neg=[role], evt_handlers=[(evt.EVT_C_STORE, handle)] ) assert assoc.is_established req = C_STORE() req.MessageID = 1 req.AffectedSOPClassUID = DATASET.SOPClassUID req.AffectedSOPInstanceUID = DATASET.SOPInstanceUID req.Priority = 1 req._context_id = 1 bytestream = encode(DATASET, True, True) req.DataSet = BytesIO(bytestream) assoc.dimse = DummyDIMSE() assoc._c_store_scp(req) assert assoc.dimse.status == 0xC211 assoc.release() assert assoc.is_released scp.shutdown() def test_handler_status_ds_no_status(self): """Test handler with status dataset with no Status element.""" def handle(event): return Dataset() self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage, scp_role=True, scu_role=True) # Storage SCP scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(CTImageStorage) role = build_role(CTImageStorage, scu_role=False, scp_role=True) assoc = ae.associate( 'localhost', 11112, ext_neg=[role], evt_handlers=[(evt.EVT_C_STORE, handle)] ) assert assoc.is_established req = C_STORE() req.MessageID = 1 req.AffectedSOPClassUID = DATASET.SOPClassUID req.AffectedSOPInstanceUID = DATASET.SOPInstanceUID req.Priority = 1 req._context_id = 1 bytestream = encode(DATASET, True, True) req.DataSet = BytesIO(bytestream) assoc.dimse = DummyDIMSE() assoc._c_store_scp(req) assert assoc.dimse.status == 0xC001 assoc.release() assert assoc.is_released scp.shutdown() def test_handler_status_ds_unknown_elems(self): """Test handler with status dataset with an unknown element.""" def handle(event): ds = Dataset() ds.Status = 0x0000 ds.PatientName = 'ABCD' return ds self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage, scp_role=True, scu_role=True) # Storage SCP scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(CTImageStorage) role = build_role(CTImageStorage, scu_role=False, scp_role=True) assoc = ae.associate( 'localhost', 11112, ext_neg=[role], evt_handlers=[(evt.EVT_C_STORE, handle)] ) assert assoc.is_established req = C_STORE() req.MessageID = 1 req.AffectedSOPClassUID = DATASET.SOPClassUID req.AffectedSOPInstanceUID = DATASET.SOPInstanceUID req.Priority = 1 req._context_id = 1 bytestream = encode(DATASET, True, True) req.DataSet = BytesIO(bytestream) assoc.dimse = DummyDIMSE() assoc._c_store_scp(req) rsp = assoc.dimse.rsp assert rsp.Status == 0x0000 assert not hasattr(rsp, 'PatientName') assoc.release() assert assoc.is_released scp.shutdown() def test_handler_invalid_status(self): """Test handler with invalid status.""" def handle(event): return 'abcd' self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage, scp_role=True, scu_role=True) # Storage SCP scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(CTImageStorage) role = build_role(CTImageStorage, scu_role=False, scp_role=True) assoc = ae.associate( 'localhost', 11112, ext_neg=[role], evt_handlers=[(evt.EVT_C_STORE, handle)] ) assert assoc.is_established req = C_STORE() req.MessageID = 1 req.AffectedSOPClassUID = DATASET.SOPClassUID req.AffectedSOPInstanceUID = DATASET.SOPInstanceUID req.Priority = 1 req._context_id = 1 bytestream = encode(DATASET, True, True) req.DataSet = BytesIO(bytestream) assoc.dimse = DummyDIMSE() assoc._c_store_scp(req) assert assoc.dimse.status == 0xC002 assoc.release() assert assoc.is_released scp.shutdown() def test_handler_unknown_status(self): """Test handler with invalid status.""" def handle(event): return 0xDEFA self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage, scp_role=True, scu_role=True) # Storage SCP scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(CTImageStorage) role = build_role(CTImageStorage, scu_role=False, scp_role=True) assoc = ae.associate( 'localhost', 11112, ext_neg=[role], evt_handlers=[(evt.EVT_C_STORE, handle)] ) assert assoc.is_established req = C_STORE() req.MessageID = 1 req.AffectedSOPClassUID = DATASET.SOPClassUID req.AffectedSOPInstanceUID = DATASET.SOPInstanceUID req.Priority = 1 req._context_id = 1 bytestream = encode(DATASET, True, True) req.DataSet = BytesIO(bytestream) assoc.dimse = DummyDIMSE() assoc._c_store_scp(req) assert assoc.dimse.status == 0xDEFA assoc.release() assert assoc.is_released scp.shutdown() class TestAssociationSendCEcho: """Run tests on Assocation evt.EVT_C_ECHO handler.""" def setup(self): """Run prior to each test""" self.ae = None def teardown(self): """Clear any active threads""" if self.ae: self.ae.shutdown() def test_must_be_associated(self): """Test can't send without association.""" # Test raise if assoc not established self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(Verification) assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.release() assert assoc.is_released assert not assoc.is_established with pytest.raises(RuntimeError): assoc.send_c_echo() scp.shutdown() def test_no_abstract_syntax_match(self): """Test SCU when no accepted abstract syntax""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(CTImageStorage) assoc = ae.associate('localhost', 11112) assert assoc.is_established with pytest.raises(ValueError): assoc.send_c_echo() assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_none(self): """Test no response from peer""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(Verification) assoc = ae.associate('localhost', 11112) class DummyDIMSE(): msg_queue = queue.Queue() def send_msg(*args, **kwargs): return def get_msg(*args, **kwargs): return None, None assoc._reactor_checkpoint.clear() while not assoc._is_paused: time.sleep(0.01) assoc.dimse = DummyDIMSE() if assoc.is_established: assoc.send_c_echo() assert assoc.is_aborted scp.shutdown() def test_rsp_invalid(self): """Test invalid response received from peer""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(Verification) assoc = ae.associate('localhost', 11112) class DummyResponse(): is_valid_response = False is_valid_request = False msg_type = None class DummyDIMSE(): msg_queue = queue.Queue() def send_msg(*args, **kwargs): return def get_msg(*args, **kwargs): return None, DummyResponse() assoc._reactor_checkpoint.clear() while not assoc._is_paused: time.sleep(0.01) assoc.dimse = DummyDIMSE() if assoc.is_established: assoc.send_c_echo() assert assoc.is_aborted scp.shutdown() def test_rsp_success(self): """Test receiving a success response from the peer""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(Verification) assoc = ae.associate('localhost', 11112) assert assoc.is_established result = assoc.send_c_echo() assert result.Status == 0x0000 assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_failure(self): """Test receiving a failure response from the peer""" def handler(event): return 0x0210 self.ae = ae = AE() ae.add_supported_context(Verification) handlers = [(evt.EVT_C_ECHO, handler)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) ae.add_requested_context(Verification) ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112) assert assoc.is_established result = assoc.send_c_echo() assert result.Status == 0x0210 assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_unknown_status(self): """Test unknown status value returned by peer""" def handler(event): return 0xFFF0 self.ae = ae = AE() ae.add_supported_context(Verification) handlers = [(evt.EVT_C_ECHO, handler)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) ae.add_requested_context(Verification) ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112) assert assoc.is_established result = assoc.send_c_echo() assert result.Status == 0xFFF0 assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_multi_status(self): """Test receiving a status with extra elements""" def handler(event): ds = Dataset() ds.Status = 0x0122 ds.ErrorComment = 'Some comment' return ds self.ae = ae = AE() ae.add_supported_context(Verification) handlers = [(evt.EVT_C_ECHO, handler)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) ae.add_requested_context(Verification) ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112) assert assoc.is_established result = assoc.send_c_echo() assert result.Status == 0x0122 assert result.ErrorComment == 'Some comment' assoc.release() assert assoc.is_released scp.shutdown() def test_abort_during(self): """Test aborting the association during message exchange""" def handle(event): event.assoc.abort() return 0x0000 self.ae = ae = AE() ae.acse_timeout = 1 ae.dimse_timeout = 1 ae.network_timeout = 1 ae.add_supported_context(Verification) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_ECHO, handle)] ) ae.add_requested_context(Verification) assoc = ae.associate('localhost', 11112) assert assoc.is_established result = assoc.send_c_echo() assert result == Dataset() time.sleep(0.1) assert assoc.is_aborted scp.shutdown() def test_run_accept_scp_not_implemented(self): """Test association is aborted if non-implemented SCP requested.""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) ae.add_supported_context('1.2.3.4') scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(Verification) ae.add_requested_context('1.2.3.4') ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112) assert assoc.is_established status = assoc.send_n_delete('1.2.3.4', '1.2.3') assert status == Dataset() time.sleep(0.1) assert assoc.is_aborted scp.shutdown() def test_rejected_contexts(self): """Test receiving a success response from the peer""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(Verification) ae.add_requested_context(CTImageStorage) assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(assoc.rejected_contexts) == 1 cx = assoc.rejected_contexts[0] assert cx.abstract_syntax == CTImageStorage assoc.release() assert assoc.is_released scp.shutdown() def test_common_ext_neg_no_general_sop(self): """Test sending SOP Class Common Extended Negotiation.""" # With no Related General SOP Classes self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) scp = ae.start_server(('', 11112), block=False) ae.acse_timeout = 5 ae.dimse_timeout = 5 item = SOPClassCommonExtendedNegotiation() item.sop_class_uid = '1.2.3' item.service_class_uid = '2.3.4' assoc = ae.associate('localhost', 11112, ext_neg=[item]) assert assoc.is_established result = assoc.send_c_echo() assert result.Status == 0x0000 assoc.release() assert assoc.is_released scp.shutdown() def test_changing_network_timeout(self): """Test changing timeout after associated.""" self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) scp = ae.start_server(('', 11112), block=False) ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112) assert assoc.is_established ae.network_timeout = 1 assert assoc.dul.network_timeout == 1 assoc.release() assert assoc.is_released scp.shutdown() def test_network_times_out_requestor(self): """Regression test for #286.""" self.ae = ae = AE() ae.add_requested_context(Verification) ae.add_supported_context(Verification) scp = ae.start_server(('', 11112), block=False) assoc = ae.associate('localhost', 11112) assert assoc.is_established assert assoc.network_timeout == 60 assoc.network_timeout = 0.5 assert assoc.network_timeout == 0.5 time.sleep(1.0) assert assoc.is_aborted scp.shutdown() def test_network_times_out_acceptor(self): """Regression test for #286.""" self.ae = ae = AE() ae.add_requested_context(Verification) ae.add_supported_context(Verification) scp = ae.start_server(('', 11113), block=False) assoc = ae.associate('localhost', 11113) ae.network_timeout = 0.5 assoc.network_timeout = 60 assert assoc.network_timeout == 60 assert assoc.is_established time.sleep(1.0) assert assoc.is_aborted scp.shutdown() class TestAssociationSendCStore: """Run tests on Assocation send_c_store.""" def setup(self): """Run prior to each test""" self.ae = None def teardown(self): """Clear any active threads""" if self.ae: self.ae.shutdown() _config.STORE_SEND_CHUNKED_DATASET = False def test_must_be_associated(self): """Test SCU can't send without association.""" # Test raise if assoc not established def handle_store(event): return 0x0000 handlers = [(evt.EVT_C_STORE, handle_store)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage) scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) ae.add_requested_context(CTImageStorage) assoc = ae.associate('localhost', 11112) assoc.release() assert assoc.is_released assert not assoc.is_established with pytest.raises(RuntimeError): assoc.send_c_store(DATASET) scp.shutdown() def test_no_abstract_syntax_match(self): """Test SCU when no accepted abstract syntax""" def handle_store(event): return 0x0000 handlers = [(evt.EVT_C_STORE, handle_store)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) ae.add_requested_context(Verification) assoc = ae.associate('localhost', 11112) assert assoc.is_established with pytest.raises(ValueError): assoc.send_c_store(DATASET) assoc.release() assert assoc.is_released scp.shutdown() def test_bad_priority(self): """Test bad priority raises exception""" def handle_store(event): return 0x0000 handlers = [(evt.EVT_C_STORE, handle_store)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage) scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) ae.add_requested_context(CTImageStorage) assoc = ae.associate('localhost', 11112) assert assoc.is_established with pytest.raises(ValueError): assoc.send_c_store(DATASET, priority=0x0003) assoc.release() assert assoc.is_released scp.shutdown() def test_fail_encode_dataset(self): """Test failure if unable to encode dataset""" def handle_store(event): return 0x0000 handlers = [(evt.EVT_C_STORE, handle_store)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage) scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) ae.add_requested_context(CTImageStorage, ExplicitVRLittleEndian) assoc = ae.associate('localhost', 11112) assert assoc.is_established ds = Dataset() ds.SOPClassUID = CTImageStorage ds.SOPInstanceUID = '1.2.3' ds.PerimeterValue = b'\x00\x01' ds.file_meta = FileMetaDataset() ds.file_meta.TransferSyntaxUID = ImplicitVRLittleEndian msg = r"Failed to encode the supplied dataset" with pytest.raises(ValueError, match=msg): assoc.send_c_store(ds) assoc.release() assert assoc.is_released scp.shutdown() def test_encode_compressed_dataset(self): """Test sending a dataset with a compressed transfer syntax """ def handle_store(event): return 0x0000 handlers = [(evt.EVT_C_STORE, handle_store)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(MRImageStorage, JPEG2000Lossless) scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) ae.add_requested_context(MRImageStorage, JPEG2000Lossless) assoc = ae.associate('localhost', 11112) assert assoc.is_established result = assoc.send_c_store(COMP_DATASET) assert result.Status == 0x0000 assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_none(self): """Test no response from peer""" def handle_store(event): return 0x0000 handlers = [(evt.EVT_C_STORE, handle_store)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage) scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) ae.add_requested_context(CTImageStorage) assoc = ae.associate('localhost', 11112) class DummyDIMSE(): msg_queue = queue.Queue() def send_msg(*args, **kwargs): return def get_msg(*args, **kwargs): return None, None assoc._reactor_checkpoint.clear() while not assoc._is_paused: time.sleep(0.01) assoc.dimse = DummyDIMSE() assert assoc.is_established status = assoc.send_c_store(DATASET) assert status == Dataset() assert assoc.is_aborted scp.shutdown() def test_rsp_invalid(self): """Test invalid DIMSE message received from peer""" def handle_store(event): return 0x0000 handlers = [(evt.EVT_C_STORE, handle_store)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage) scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) ae.add_requested_context(CTImageStorage) assoc = ae.associate('localhost', 11112) class DummyResponse(): is_valid_response = False class DummyDIMSE(): msg_queue = queue.Queue() def send_msg(*args, **kwargs): return def get_msg(*args, **kwargs): return DummyResponse(), None assoc._reactor_checkpoint.clear() while not assoc._is_paused: time.sleep(0.01) assoc.dimse = DummyDIMSE() assert assoc.is_established status = assoc.send_c_store(DATASET) assert assoc.is_aborted assert status == Dataset() scp.shutdown() def test_rsp_failure(self): """Test receiving a failure response from the peer""" def handle_store(event): return 0xC000 handlers = [(evt.EVT_C_STORE, handle_store)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage) scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) ae.add_requested_context(CTImageStorage) assoc = ae.associate('localhost', 11112) assert assoc.is_established status = assoc.send_c_store(DATASET) assert status.Status == 0xC000 assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_warning(self): """Test receiving a warning response from the peer""" def handle_store(event): return 0xB000 handlers = [(evt.EVT_C_STORE, handle_store)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage) scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) ae.add_requested_context(CTImageStorage) assoc = ae.associate('localhost', 11112) assert assoc.is_established status = assoc.send_c_store(DATASET) assert status.Status == 0xB000 assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_success(self): """Test receiving a success response from the peer""" def handle_store(event): return 0x0000 handlers = [(evt.EVT_C_STORE, handle_store)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage) scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) ae.add_requested_context(CTImageStorage) assoc = ae.associate('localhost', 11112) assert assoc.is_established status = assoc.send_c_store(DATASET) assert status.Status == 0x0000 assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_unknown_status(self): """Test unknown status value returned by peer""" def handle_store(event): return 0xFFF0 handlers = [(evt.EVT_C_STORE, handle_store)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage) scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) ae.add_requested_context(CTImageStorage) assoc = ae.associate('localhost', 11112) assert assoc.is_established status = assoc.send_c_store(DATASET) assert status.Status == 0xFFF0 assoc.release() assert assoc.is_released scp.shutdown() def test_dataset_no_sop_class_raises(self): """Test sending a dataset without SOPClassUID raises.""" def handle_store(event): return 0x0000 handlers = [(evt.EVT_C_STORE, handle_store)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage) scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) ae.add_requested_context(CTImageStorage) assoc = ae.associate('localhost', 11112) ds = Dataset() ds.SOPInstanceUID = '1.2.3.4' ds.file_meta = FileMetaDataset() ds.file_meta.TransferSyntaxUID = ImplicitVRLittleEndian assert assoc.is_established assert 'SOPClassUID' not in ds msg = ( f"Unable to send the dataset as one or more required " f"element are missing: SOPClassUID" ) with pytest.raises(AttributeError, match=msg): assoc.send_c_store(ds) assoc.release() assert assoc.is_released scp.shutdown() def test_dataset_no_transfer_syntax_raises(self): """Test sending a dataset without TransferSyntaxUID raises.""" def handle_store(event): return 0x0000 handlers = [(evt.EVT_C_STORE, handle_store)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage) scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) ae.add_requested_context(CTImageStorage) assoc = ae.associate('localhost', 11112) ds = Dataset() ds.SOPInstanceUID = '1.2.3.4' ds.SOPClassUID = CTImageStorage assert not hasattr(ds, 'file_meta') msg = ( r"Unable to determine the presentation context to use with " r"`dataset` as it contains no '\(0002,0010\) Transfer Syntax " r"UID' file meta information element" ) with pytest.raises(AttributeError, match=msg): assoc.send_c_store(ds) ds.file_meta = FileMetaDataset() assert 'TransferSyntaxUID' not in ds.file_meta msg = ( r"Unable to determine the presentation context to use with " r"`dataset` as it contains no '\(0002,0010\) Transfer Syntax " r"UID' file meta information element" ) with pytest.raises(AttributeError, match=msg): assoc.send_c_store(ds) assoc.release() assert assoc.is_released scp.shutdown() def test_functional_common_ext_neg(self): """Test functioning of the SOP Class Common Extended negotiation.""" def handle_ext(event): return event.items def handle_store(event): return 0x0000 handlers = [ (evt.EVT_C_STORE, handle_store), (evt.EVT_SOP_COMMON, handle_ext) ] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage) ae.add_supported_context('1.2.3') scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) ae.add_requested_context(CTImageStorage) ae.add_requested_context('1.2.3') req = { '1.2.3' : ('1.2.840.10008.4.2', []), '1.2.3.1' : ('1.2.840.10008.4.2', ['1.1.1', '1.4.2']), '1.2.3.4' : ('1.2.111111', []), '1.2.3.5' : ('1.2.111111', ['1.2.4', '1.2.840.10008.1.1']), } ext_neg = [] for kk, vv in req.items(): item = SOPClassCommonExtendedNegotiation() item.sop_class_uid = kk item.service_class_uid = vv[0] item.related_general_sop_class_identification = vv[1] ext_neg.append(item) assoc = ae.associate('localhost', 11112, ext_neg=ext_neg) assert assoc.is_established ds = Dataset() ds.SOPClassUID = '1.2.3' ds.SOPInstanceUID = '1.2.3.4' ds.file_meta = FileMetaDataset() ds.file_meta.TransferSyntaxUID = ImplicitVRLittleEndian status = assoc.send_c_store(ds) assert status.Status == 0x0000 assoc.release() scp.shutdown() def test_using_filepath(self): """Test using a file path to a dataset.""" recv = [] def handle_store(event): recv.append(event.dataset) return 0x0000 handlers = [(evt.EVT_C_STORE, handle_store)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage) scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) ae.add_requested_context(CTImageStorage) assoc = ae.associate('localhost', 11112) assert assoc.is_established assert isinstance(DATASET_PATH, str) status = assoc.send_c_store(DATASET_PATH) assert status.Status == 0x0000 p = Path(DATASET_PATH).resolve() assert isinstance(p, Path) status = assoc.send_c_store(p) assert status.Status == 0x0000 assoc.release() assert assoc.is_released scp.shutdown() assert 2 == len(recv) for ds in recv: assert "CompressedSamples^CT1" == ds.PatientName assert "DataSetTrailingPadding" in ds def test_using_filepath_chunks(self): """Test chunking send.""" _config.STORE_SEND_CHUNKED_DATASET = True recv = [] def handle_store(event): recv.append(event.dataset) return 0x0000 handlers = [(evt.EVT_C_STORE, handle_store)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage) scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) ae.add_requested_context(CTImageStorage, ExplicitVRLittleEndian) assoc = ae.associate('localhost', 11112) assert assoc.is_established assert isinstance(DATASET_PATH, str) status = assoc.send_c_store(DATASET_PATH) assert status.Status == 0x0000 p = Path(DATASET_PATH).resolve() assert isinstance(p, Path) status = assoc.send_c_store(p) assert status.Status == 0x0000 assoc.release() assert assoc.is_released ae.maximum_pdu_size = 0 assoc = ae.associate('localhost', 11112) assert assoc.is_established status = assoc.send_c_store(p) assert status.Status == 0x0000 assoc.release() assert assoc.is_released scp.shutdown() assert 3 == len(recv) for ds in recv: assert not hasattr(ds, "file_meta") assert "CompressedSamples^CT1" == ds.PatientName assert 126 == len(ds.DataSetTrailingPadding) def test_using_filepath_chunks_missing(self): """Test receiving a success response from the peer""" _config.STORE_SEND_CHUNKED_DATASET = True recv = [] def handle_store(event): recv.append(event.dataset) return 0x0000 handlers = [(evt.EVT_C_STORE, handle_store)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage) scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) ae.add_requested_context(CTImageStorage, ExplicitVRLittleEndian) assoc = ae.associate('localhost', 11112) assert assoc.is_established assert isinstance(BAD_DATASET_PATH, str) msg = ( r"one or more required file meta information elements are " r"missing: MediaStorageSOPClassUID" ) with pytest.raises(AttributeError, match=msg): assoc.send_c_store(BAD_DATASET_PATH) assoc.release() assert assoc.is_released scp.shutdown() # Regression tests def test_no_send_mismatch(self): """Test sending a dataset with mismatched transfer syntax (206).""" def handle_store(event): return 0x0000 handlers = [(evt.EVT_C_STORE, handle_store)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage) scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) ae.add_requested_context(CTImageStorage, ImplicitVRLittleEndian) assoc = ae.associate('localhost', 11112) ds = Dataset() ds.SOPClassUID = CTImageStorage ds.SOPInstanceUID = '1.2.3.4' ds.file_meta = FileMetaDataset() ds.file_meta.TransferSyntaxUID = JPEGBaseline8Bit assert assoc.is_established msg = ( r"No presentation context for 'CT Image Storage' has been " r"accepted by the peer with 'JPEG Baseline \(Process 1\)' " r"transfer syntax for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc.send_c_store(ds) assoc.release() assert assoc.is_released scp.shutdown() def test_send_deflated(self): """Test sending a deflated encoded dataset (482).""" recv_ds = [] def handle_store(event): recv_ds.append(event.dataset) return 0x0000 handlers = [(evt.EVT_C_STORE, handle_store)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context( SecondaryCaptureImageStorage, DeflatedExplicitVRLittleEndian ) scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) ae.add_requested_context( SecondaryCaptureImageStorage, DeflatedExplicitVRLittleEndian ) assoc = ae.associate('localhost', 11112) assert assoc.is_established status = assoc.send_c_store(DEFL_DATASET) assoc.release() assert assoc.is_released scp.shutdown() assert '^^^^' == recv_ds[0].PatientName class TestAssociationSendCFind: """Run tests on Assocation send_c_find.""" def setup(self): """Run prior to each test""" self.ds = Dataset() self.ds.PatientName = '*' self.ds.QueryRetrieveLevel = "PATIENT" self.ae = None def teardown(self): """Clear any active threads""" if self.ae: self.ae.shutdown() def test_must_be_associated(self): """Test can't send without association.""" # Test raise if assoc not established self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind) assoc = ae.associate('localhost', 11112) assoc.release() assert assoc.is_released assert not assoc.is_established with pytest.raises(RuntimeError): next(assoc.send_c_find( self.ds, PatientRootQueryRetrieveInformationModelFind) ) scp.shutdown() def test_no_abstract_syntax_match(self): """Test when no accepted abstract syntax""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(Verification) assoc = ae.associate('localhost', 11112) assert assoc.is_established def test(): next(assoc.send_c_find( self.ds, PatientRootQueryRetrieveInformationModelFind) ) with pytest.raises(ValueError): test() assoc.release() assert assoc.is_released scp.shutdown() def test_bad_query_model(self): """Test invalid query_model value""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind) assoc = ae.associate('localhost', 11112) assert assoc.is_established with pytest.raises(ValueError): next(assoc.send_c_find(self.ds, query_model='XXX')) assoc.release() assert assoc.is_released scp.shutdown() def test_fail_encode_identifier(self): """Test a failure in encoding the Identifier dataset""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context( PatientRootQueryRetrieveInformationModelFind, ExplicitVRLittleEndian ) assoc = ae.associate('localhost', 11112) assert assoc.is_established DATASET.PerimeterValue = b'\x00\x01' def test(): next(assoc.send_c_find( DATASET, PatientRootQueryRetrieveInformationModelFind) ) with pytest.raises(ValueError): test() assoc.release() assert assoc.is_released del DATASET.PerimeterValue # Fix up our changes scp.shutdown() def test_rsp_failure(self): """Test receiving a failure response from the peer""" def handle(event): yield 0xA700, None self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_FIND, handle)] ) ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind) assoc = ae.associate('localhost', 11112) assert assoc.is_established for (status, ds) in assoc.send_c_find( self.ds, PatientRootQueryRetrieveInformationModelFind): assert status.Status == 0xA700 assert ds is None assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_pending(self): """Test receiving a pending response from the peer""" def handle(event): yield 0xFF00, self.ds self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_FIND, handle)] ) ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind) assoc = ae.associate('localhost', 11112) assert assoc.is_established result = assoc.send_c_find( self.ds, PatientRootQueryRetrieveInformationModelFind ) (status, ds) = next(result) assert status.Status == 0xFF00 assert 'PatientName' in ds (status, ds) = next(result) assert status.Status == 0x0000 assert ds is None assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_success(self): """Test receiving a success response from the peer""" def handle(event): yield 0x0000, None self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_FIND, handle)] ) ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind) assoc = ae.associate('localhost', 11112) assert assoc.is_established for (status, ds) in assoc.send_c_find( self.ds, PatientRootQueryRetrieveInformationModelFind ): assert status.Status == 0x0000 assert ds is None assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_empty(self): """Test receiving a success response from the peer""" # No matches def handle(event): pass self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_FIND, handle)] ) ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind) assoc = ae.associate('localhost', 11112) assert assoc.is_established for (status, ds) in assoc.send_c_find( self.ds, PatientRootQueryRetrieveInformationModelFind ): assert status.Status == 0x0000 assert ds is None assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_cancel(self): """Test receiving a cancel response from the peer""" def handle(event): yield 0xFE00, None self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_FIND, handle)] ) ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind) assoc = ae.associate('localhost', 11112) assert assoc.is_established for (status, ds) in assoc.send_c_find( self.ds, PatientRootQueryRetrieveInformationModelFind ): assert status.Status == 0xFE00 assert ds is None assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_invalid(self): """Test invalid DIMSE message response received from peer""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind) assoc = ae.associate('localhost', 11112) class DummyResponse(): is_valid_response = False class DummyDIMSE(): def send_msg(*args, **kwargs): return def get_msg(*args, **kwargs): return DummyResponse(), None assoc._reactor_checkpoint.clear() while not assoc._is_paused: time.sleep(0.01) assoc.dimse = DummyDIMSE() assert assoc.is_established for (_, _) in assoc.send_c_find( self.ds, PatientRootQueryRetrieveInformationModelFind ): pass assert assoc.is_aborted scp.shutdown() def test_rsp_unknown_status(self): """Test unknown status value returned by peer""" def handle(event): yield 0xFFF0, None self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_FIND, handle)] ) ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind) assoc = ae.associate('localhost', 11112) assert assoc.is_established for (status, ds) in assoc.send_c_find( self.ds, PatientRootQueryRetrieveInformationModelFind ): assert status.Status == 0xFFF0 assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_bad_dataset(self): """Test bad dataset returned by evt.EVT_C_FIND handler""" def handle(event): def test(): pass yield 0xFF00, test self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context( PatientRootQueryRetrieveInformationModelFind, ExplicitVRLittleEndian ) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_FIND, handle)] ) model = PatientRootQueryRetrieveInformationModelFind ae.add_requested_context(model, ExplicitVRLittleEndian) assoc = ae.associate('localhost', 11112) assert assoc.is_established for (status, ds) in assoc.send_c_find(self.ds, model): assert status.Status in range(0xC000, 0xD000) assoc.release() assert assoc.is_released scp.shutdown() def test_connection_timeout(self): """Test the connection timing out""" def handle(event): yield 0x0000 hh = [(evt.EVT_C_FIND, handle)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind) scp = ae.start_server(('', 11112), block=False, evt_handlers=hh) ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind) assoc = ae.associate('localhost', 11112) class DummyMessage(): is_valid_response = True Identifier = None Status = 0x0000 STATUS_OPTIONAL_KEYWORDS = [] class DummyDIMSE(): def send_msg(*args, **kwargs): return def get_msg(*args, **kwargs): return None, None assoc._reactor_checkpoint.clear() while not assoc._is_paused: time.sleep(0.01) assoc.dimse = DummyDIMSE() assert assoc.is_established results = assoc.send_c_find( self.ds, PatientRootQueryRetrieveInformationModelFind ) assert next(results) == (Dataset(), None) with pytest.raises(StopIteration): next(results) assert assoc.is_aborted scp.shutdown() def test_decode_failure(self): """Test the connection timing out""" def handle(event): yield 0x0000 hh = [(evt.EVT_C_FIND, handle)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind) scp = ae.start_server(('', 11112), block=False, evt_handlers=hh) ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind, ExplicitVRLittleEndian) ae.add_requested_context(CTImageStorage) assoc = ae.associate('localhost', 11112) class DummyDIMSE(): msg_queue = queue.Queue() def send_msg(*args, **kwargs): return def get_msg(*args, **kwargs): def dummy(): pass rsp = C_FIND() rsp.Status = 0xFF00 rsp.MessageIDBeingRespondedTo = 1 rsp._dataset = dummy return 1, rsp assoc._reactor_checkpoint.clear() while not assoc._is_paused: time.sleep(0.01) assoc.dimse = DummyDIMSE() assert assoc.is_established results = assoc.send_c_find( self.ds, PatientRootQueryRetrieveInformationModelFind ) status, ds = next(results) assert status.Status == 0xFF00 assert ds is None scp.shutdown() def test_rsp_not_find(self, caplog): """Test receiving a non C-FIND message in response.""" with caplog.at_level(logging.ERROR, logger='pynetdicom'): ae = AE() assoc = Association(ae, 'requestor') assoc._is_paused = True dimse = assoc.dimse dimse.msg_queue.put((3, C_STORE())) cx = build_context(PatientRootQueryRetrieveInformationModelFind) cx._as_scu = True cx._as_scp = False cx.context_id = 1 assoc._accepted_cx = {1 : cx} identifier = Dataset() identifier.PatientID = '*' assoc.is_established = True results = assoc.send_c_find( identifier, PatientRootQueryRetrieveInformationModelFind ) status, ds = next(results) assert status == Dataset() assert ds is None with pytest.raises(StopIteration): next(results) assert ( 'Received an unexpected C-STORE message from the peer' ) in caplog.text assert assoc.is_aborted def test_rsp_invalid_find(self, caplog): """Test receiving an invalid C-FIND message in response.""" with caplog.at_level(logging.ERROR, logger='pynetdicom'): ae = AE() assoc = Association(ae, 'requestor') assoc._is_paused = True dimse = assoc.dimse dimse.msg_queue.put((3, C_FIND())) cx = build_context(PatientRootQueryRetrieveInformationModelFind) cx._as_scu = True cx._as_scp = False cx.context_id = 1 assoc._accepted_cx = {1 : cx} identifier = Dataset() identifier.PatientID = '*' assoc.is_established = True results = assoc.send_c_find( identifier, PatientRootQueryRetrieveInformationModelFind ) status, ds = next(results) assert status == Dataset() assert ds is None with pytest.raises(StopIteration): next(results) assert ( 'Received an invalid C-FIND response from the peer' ) in caplog.text assert assoc.is_aborted def test_query_uid_public(self): """Test using a public UID for the query model""" def handle(event): yield 0x0000, None self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_FIND, handle)] ) ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind) assoc = ae.associate('localhost', 11112) assert assoc.is_established responses = assoc.send_c_find( self.ds, PatientRootQueryRetrieveInformationModelFind ) for (status, ds) in responses: assert status.Status == 0x0000 assert ds is None assoc.release() assert assoc.is_released scp.shutdown() def test_query_uid_private(self, caplog): """Test using a private UID for the query model""" def handle(event): yield 0x0000, None with caplog.at_level(logging.ERROR, logger='pynetdicom'): self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context('1.2.3.4') scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_FIND, handle)] ) ae.add_requested_context('1.2.3.4') assoc = ae.associate('localhost', 11112) assert assoc.is_established responses = assoc.send_c_find(self.ds, '1.2.3.4') scp.shutdown() msg = ( "No supported service class available for the SOP Class " "UID '1.2.3.4'" ) assert msg in caplog.text class TestAssociationSendCCancel: """Run tests on Assocation send_c_cancel.""" def setup(self): """Run prior to each test""" self.ae = None def teardown(self): """Clear any active threads""" if self.ae: self.ae.shutdown() def test_must_be_associated(self): """Test can't send without association.""" # Test raise if assoc not established self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind) assoc = ae.associate('localhost', 11112) assoc.release() assert assoc.is_released assert not assoc.is_established with pytest.raises(RuntimeError): assoc.send_c_cancel(1, 1) scp.shutdown() def test_context_id(self): """Test using `context_id`""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(PatientRootQueryRetrieveInformationModelFind) assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.send_c_cancel(1, 1) scp.shutdown() def test_query_model(self): """Test using `query_model`""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 model = PatientRootQueryRetrieveInformationModelFind ae.add_supported_context(model) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(model) assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.send_c_cancel(1, query_model=model) scp.shutdown() def test_context_id_and_query_model(self): """Test using `query_model` and `context_id`""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 model = PatientRootQueryRetrieveInformationModelFind ae.add_supported_context(model) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(model) assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.send_c_cancel(1, context_id=1, query_model=model) scp.shutdown() def test_no_context_id_and_query_model_raises(self): """Test exception if unable to determine context ID""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 model = PatientRootQueryRetrieveInformationModelFind ae.add_supported_context(model) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(model) assoc = ae.associate('localhost', 11112) assert assoc.is_established msg = ( "'send_c_cancel' requires either the 'query_model' used for " "the service request or the corresponding 'context_id'" ) with pytest.raises(ValueError, match=msg): assoc.send_c_cancel(1) scp.shutdown() class TestAssociationSendCGet: """Run tests on Assocation send_c_get.""" def setup(self): """Run prior to each test""" self.ds = Dataset() self.ds.PatientName = '*' self.ds.QueryRetrieveLevel = "PATIENT" self.good = Dataset() self.good.file_meta = FileMetaDataset() self.good.file_meta.TransferSyntaxUID = ImplicitVRLittleEndian self.good.SOPClassUID = CTImageStorage self.good.SOPInstanceUID = '1.1.1' self.good.PatientName = 'Test' self.ae = None def teardown(self): """Clear any active threads""" if self.ae: self.ae.shutdown() def test_must_be_associated(self): """Test can't send without association.""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet) assoc = ae.associate('localhost', 11112) assoc.release() assert assoc.is_released assert not assoc.is_established with pytest.raises(RuntimeError): next(assoc.send_c_get( self.ds, PatientRootQueryRetrieveInformationModelGet) ) scp.shutdown() def test_must_be_scp(self): """Test failure if not SCP for storage context.""" store_pname = [] def handle_store(event): store_pname.append(event.dataset.PatientName) return 0x0000 def handle_get(event): yield 2 yield 0xFF00, self.good yield 0xFF00, self.good self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet) ae.add_supported_context( CTImageStorage, scu_role=True, scp_role=True ) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_GET, handle_get)] ) ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet) #ae.add_requested_context(CTImageStorage) role = build_role(CTImageStorage, scu_role=True, scp_role=True) assoc = ae.associate( 'localhost', 11112, ext_neg=[role], evt_handlers=[(evt.EVT_C_STORE, handle_store)] ) assert assoc.is_established result = assoc.send_c_get( self.ds, PatientRootQueryRetrieveInformationModelGet ) (status, ds) = next(result) assert status.Status == 0xff00 assert ds is None (status, ds) = next(result) assert status.Status == 0xff00 assert ds is None (status, ds) = next(result) assert status.Status == 0xA702 assert ds.FailedSOPInstanceUIDList == ['1.1.1', '1.1.1'] assoc.release() assert assoc.is_released scp.shutdown() def test_no_abstract_syntax_match(self): """Test when no accepted abstract syntax""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(CTImageStorage) ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112) assert assoc.is_established with pytest.raises(ValueError): next(assoc.send_c_get( self.ds, PatientRootQueryRetrieveInformationModelGet) ) assoc.release() assert assoc.is_released scp.shutdown() def test_bad_query_model(self): """Test bad query model parameter""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet) assoc = ae.associate('localhost', 11112) assert assoc.is_established with pytest.raises(ValueError): next(assoc.send_c_get(self.ds, query_model='X')) assoc.release() assert assoc.is_released scp.shutdown() def test_fail_encode_identifier(self): """Test a failure in encoding the Identifier dataset""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context( PatientRootQueryRetrieveInformationModelGet, ExplicitVRLittleEndian ) assoc = ae.associate('localhost', 11112) assert assoc.is_established DATASET.PerimeterValue = b'\x00\x01' with pytest.raises(ValueError): next(assoc.send_c_get( DATASET, PatientRootQueryRetrieveInformationModelGet) ) assoc.release() assert assoc.is_released del DATASET.PerimeterValue # Fix up our changes scp.shutdown() def test_rsp_failure(self): """Test receiving a failure response""" store_pname = [] def handle_store(event): store_pname.append(event.dataset.PatientName) return 0x0000 def handle_get(event): yield 1 yield 0xA701, None self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet) ae.add_supported_context(CTImageStorage, scu_role=True, scp_role=True) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_GET, handle_get)] ) ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet) ae.add_requested_context(CTImageStorage) role = build_role(CTImageStorage, scu_role=True, scp_role=True) assoc = ae.associate( 'localhost', 11112, ext_neg=[role], evt_handlers=[(evt.EVT_C_STORE, handle_store)] ) assert assoc.is_established for (status, ds) in assoc.send_c_get(self.ds, PatientRootQueryRetrieveInformationModelGet): assert status.Status == 0xA701 assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_success(self): """Test good send""" store_pname = [] def handle_get(event): yield 2 yield 0xFF00, self.good yield 0xFF00, self.good def handle_store(event): store_pname.append(event.dataset.PatientName) return 0x0000 scu_handler = [(evt.EVT_C_STORE, handle_store)] scp_handler = [(evt.EVT_C_GET, handle_get)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet) ae.add_supported_context(CTImageStorage, scu_role=True, scp_role=True) scp = ae.start_server(('', 11112), block=False, evt_handlers=scp_handler) ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet) ae.add_requested_context(CTImageStorage) role = build_role(CTImageStorage, scp_role=True, scu_role=True) assoc = ae.associate( 'localhost', 11112, evt_handlers=scu_handler, ext_neg=[role] ) assert assoc.is_established result = assoc.send_c_get( self.ds, PatientRootQueryRetrieveInformationModelGet ) (status, ds) = next(result) assert status.Status == 0xff00 assert ds is None (status, ds) = next(result) assert status.Status == 0xff00 assert ds is None (status, ds) = next(result) assert status.Status == 0x0000 assert ds is None assoc.release() assert assoc.is_released assert store_pname == ['Test', 'Test'] scp.shutdown() def test_rsp_pending_send_success(self): """Test receiving a pending response and sending success""" store_pname = [] def handle_get(event): yield 3 yield 0xFF00, self.good yield 0xFF00, self.good def handle_store(event): store_pname.append(event.dataset.PatientName) return 0x0000 scu_handler = [(evt.EVT_C_STORE, handle_store)] scp_handler = [(evt.EVT_C_GET, handle_get)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet) ae.add_supported_context(CTImageStorage, scu_role=True, scp_role=True) scp = ae.start_server(('', 11112), block=False, evt_handlers=scp_handler) ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet) ae.add_requested_context(CTImageStorage) role = build_role(CTImageStorage, scp_role=True, scu_role=True) assoc = ae.associate( 'localhost', 11112, evt_handlers=scu_handler, ext_neg=[role] ) assert assoc.is_established result = assoc.send_c_get(self.ds, PatientRootQueryRetrieveInformationModelGet) (status, ds) = next(result) assert status.Status == 0xff00 assert ds is None (status, ds) = next(result) assert status.Status == 0xff00 assert ds is None (status, ds) = next(result) assert status.Status == 0x0000 assert ds is None assoc.release() assert assoc.is_released assert store_pname == ['Test', 'Test'] scp.shutdown() def test_rsp_pending_send_failure(self): """Test receiving a pending response and sending a failure""" store_pname = [] def handle_store(event): store_pname.append(event.dataset.PatientName) return 0xA700 def handle_get(event): yield 3 yield 0xFF00, self.good yield 0xFF00, self.good yield 0x0000, None self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet) ae.add_supported_context(CTImageStorage, scu_role=True, scp_role=True) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_GET, handle_get)] ) ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet) ae.add_requested_context(CTImageStorage) role = build_role(CTImageStorage, scu_role=True, scp_role=True) assoc = ae.associate( 'localhost', 11112, ext_neg=[role], evt_handlers=[(evt.EVT_C_STORE, handle_store)] ) assert assoc.is_established result = assoc.send_c_get(self.ds, PatientRootQueryRetrieveInformationModelGet) # We have 2 status, ds and 1 success (status, ds) = next(result) assert status.Status == 0xFF00 assert ds is None (status, ds) = next(result) assert status.Status == 0xFF00 assert ds is None (status, ds) = next(result) assert status.Status == 0xB000 assert 'FailedSOPInstanceUIDList' in ds with pytest.raises(StopIteration): next(result) assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_pending_send_warning(self): """Test receiving a pending response and sending a warning""" store_pname = [] def handle_store(event): store_pname.append(event.dataset.PatientName) return 0xB007 def handle_get(event): yield 3 yield 0xFF00, self.good yield 0xFF00, self.good yield 0xB000, None self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet) ae.add_supported_context(CTImageStorage, scu_role=True, scp_role=True) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_GET, handle_get)] ) ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet) ae.add_requested_context(CTImageStorage) role = build_role(CTImageStorage, scu_role=True, scp_role=True) assoc = ae.associate( 'localhost', 11112, ext_neg=[role], evt_handlers=[(evt.EVT_C_STORE, handle_store)] ) assert assoc.is_established result = assoc.send_c_get(self.ds, PatientRootQueryRetrieveInformationModelGet) # We have 2 status, ds and 1 success (status, ds) = next(result) assert status.Status == 0xFF00 assert ds is None (status, ds) = next(result) assert status.Status == 0xFF00 assert ds is None (status, ds) = next(result) assert status.Status == 0xB000 assert 'FailedSOPInstanceUIDList' in ds with pytest.raises(StopIteration): next(result) assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_cancel(self): """Test receiving a cancel response""" store_pname = [] def handle_store(event): store_pname.append(event.dataset.PatientName) return 0x0000 def handle_get(event): yield 1 yield 0xFE00, None self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet) ae.add_supported_context(CTImageStorage, scu_role=True, scp_role=True) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_GET, handle_get)] ) ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet) ae.add_requested_context(CTImageStorage) role = build_role(CTImageStorage, scu_role=True, scp_role=True) assoc = ae.associate( 'localhost', 11112, ext_neg=[role], evt_handlers=[(evt.EVT_C_STORE, handle_store)] ) assert assoc.is_established for (status, ds) in assoc.send_c_get(self.ds, PatientRootQueryRetrieveInformationModelGet): assert status.Status == 0xFE00 assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_warning(self): """Test receiving a warning response""" store_pname = [] def handle_store(event): store_pname.append(event.dataset.PatientName) return 0xB007 def handle_get(event): yield 3 yield 0xFF00, self.good yield 0xFF00, self.good yield 0xB000, None self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet) ae.add_supported_context(CTImageStorage, scu_role=True, scp_role=True) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_GET, handle_get)] ) ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet) ae.add_requested_context(CTImageStorage) role = build_role(CTImageStorage, scu_role=True, scp_role=True) assoc = ae.associate( 'localhost', 11112, ext_neg=[role], evt_handlers=[(evt.EVT_C_STORE, handle_store)] ) assert assoc.is_established result = assoc.send_c_get(self.ds, PatientRootQueryRetrieveInformationModelGet) (status, ds) = next(result) assert status.Status == 0xff00 assert ds is None (status, ds) = next(result) assert status.Status == 0xff00 assert ds is None (status, ds) = next(result) assert status.Status == 0xb000 assert 'FailedSOPInstanceUIDList' in ds with pytest.raises(StopIteration): next(result) assoc.release() assert assoc.is_released scp.shutdown() def test_rsp_unknown_status(self): """Test unknown status value returned by peer""" store_pname = [] def handle_store(event): store_pname.append(event.dataset.PatientName) return 0x0000 def handle_get(event): yield 1 yield 0xFFF0, None self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet) ae.add_supported_context(CTImageStorage, scu_role=True, scp_role=True) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_GET, handle_get)] ) ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet) ae.add_requested_context(CTImageStorage) role = build_role(CTImageStorage, scu_role=True, scp_role=True) assoc = ae.associate( 'localhost', 11112, ext_neg=[role], evt_handlers=[(evt.EVT_C_STORE, handle_store)] ) assert assoc.is_established for (status, ds) in assoc.send_c_get(self.ds, PatientRootQueryRetrieveInformationModelGet): assert status.Status == 0xFFF0 assoc.release() assert assoc.is_released scp.shutdown() def test_connection_timeout(self): """Test the connection timing out""" def handle(event): yield 2 yield 0xFF00, self.good yield 0xFF00, self.good hh = [(evt.EVT_C_GET, handle)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet) ae.add_supported_context(CTImageStorage, scp_role=True, scu_role=True) scp = ae.start_server(('', 11112), block=False, evt_handlers=hh) ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet) ae.add_requested_context(CTImageStorage) role = SCP_SCU_RoleSelectionNegotiation() role.sop_class_uid = CTImageStorage role.scu_role = False role.scp_role = True ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112, ext_neg=[role]) class DummyMessage(): is_valid_response = True DataSet = None Status = 0x0000 STATUS_OPTIONAL_KEYWORDS = [] class DummyDIMSE(): def send_msg(*args, **kwargs): return def get_msg(*args, **kwargs): return None, None assoc._reactor_checkpoint.clear() while not assoc._is_paused: time.sleep(0.01) assoc.dimse = DummyDIMSE() assert assoc.is_established results = assoc.send_c_get( self.ds, PatientRootQueryRetrieveInformationModelGet ) assert next(results) == (Dataset(), None) with pytest.raises(StopIteration): next(results) assert assoc.is_aborted scp.shutdown() def test_decode_failure(self): """Test the connection timing out""" def handle(event): yield 2 yield 0xFF00, self.good yield 0xFF00, self.good hh = [(evt.EVT_C_GET, handle)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet) ae.add_supported_context(CTImageStorage, scp_role=True, scu_role=True) scp = ae.start_server(('', 11112), block=False, evt_handlers=hh) ae.add_requested_context( PatientRootQueryRetrieveInformationModelGet, ExplicitVRLittleEndian ) ae.add_requested_context(CTImageStorage) role = SCP_SCU_RoleSelectionNegotiation() role.sop_class_uid = CTImageStorage role.scu_role = False role.scp_role = True assoc = ae.associate('localhost', 11112, ext_neg=[role]) class DummyMessage(): is_valid_response = True DataSet = None Status = 0x0000 STATUS_OPTIONAL_KEYWORDS = [] class DummyDIMSE(): msg_queue = queue.Queue() def send_msg(*args, **kwargs): return def get_msg(*args, **kwargs): def dummy(): pass rsp = C_GET() rsp.Status = 0xC000 rsp.MessageIDBeingRespondedTo = 1 rsp._dataset = dummy return 1, rsp assoc._reactor_checkpoint.clear() while not assoc._is_paused: time.sleep(0.01) assoc.dimse = DummyDIMSE() assert assoc.is_established results = assoc.send_c_get( self.ds, PatientRootQueryRetrieveInformationModelGet ) status, ds = next(results) assert status.Status == 0xC000 assert ds is None scp.shutdown() def test_rsp_not_get(self, caplog): """Test receiving a non C-GET/C-STORE message in response.""" with caplog.at_level(logging.ERROR, logger='pynetdicom'): ae = AE() assoc = Association(ae, 'requestor') assoc._is_paused = True dimse = assoc.dimse dimse.msg_queue.put((3, C_FIND())) cx = build_context(PatientRootQueryRetrieveInformationModelGet) cx._as_scu = True cx._as_scp = False cx.context_id = 1 assoc._accepted_cx = {1 : cx} identifier = Dataset() identifier.PatientID = '*' assoc.is_established = True results = assoc.send_c_get(identifier, PatientRootQueryRetrieveInformationModelGet) status, ds = next(results) assert status == Dataset() assert ds is None with pytest.raises(StopIteration): next(results) assert ( 'Received an unexpected C-FIND message from the peer' ) in caplog.text assert assoc.is_aborted def test_rsp_invalid_get(self, caplog): """Test receiving an invalid C-GET message in response.""" with caplog.at_level(logging.ERROR, logger='pynetdicom'): ae = AE() assoc = Association(ae, 'requestor') assoc._is_paused = True dimse = assoc.dimse dimse.msg_queue.put((3, C_GET())) cx = build_context(PatientRootQueryRetrieveInformationModelGet) cx._as_scu = True cx._as_scp = False cx.context_id = 1 assoc._accepted_cx = {1 : cx} identifier = Dataset() identifier.PatientID = '*' assoc.is_established = True results = assoc.send_c_get(identifier, PatientRootQueryRetrieveInformationModelGet) status, ds = next(results) assert status == Dataset() assert ds is None with pytest.raises(StopIteration): next(results) assert ( 'Received an invalid C-GET response from the peer' ) in caplog.text assert assoc.is_aborted def test_query_uid_public(self): """Test using a public UID for the query model""" def handle(event): yield 0 yield 0x0000, None self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet) scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_GET, handle)] ) ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet) assoc = ae.associate('localhost', 11112) assert assoc.is_established responses = assoc.send_c_get( self.ds, PatientRootQueryRetrieveInformationModelGet ) for (status, ds) in responses: assert status.Status == 0x0000 assert ds is None assoc.release() assert assoc.is_released scp.shutdown() def test_query_uid_private(self, caplog): """Test using a private UID for the query model""" def handle(event): yield 0 yield 0x0000, None with caplog.at_level(logging.ERROR, logger='pynetdicom'): self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context('1.2.3.4') scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_GET, handle)] ) ae.add_requested_context('1.2.3.4') assoc = ae.associate('localhost', 11112) assert assoc.is_established responses = assoc.send_c_get(self.ds, '1.2.3.4') scp.shutdown() msg = ( "No supported service class available for the SOP Class " "UID '1.2.3.4'" ) assert msg in caplog.text def test_unrestricted_success(self, enable_unrestricted): """Test unrestricted storage""" store_pname = [] def handle_get(event): yield 3 self.good.file_meta.TransferSyntaxUID = ImplicitVRLittleEndian self.good.PatientName = "Known^Public" yield 0xFF00, self.good self.good.SOPClassUID = "1.2.3.4" self.good.PatientName = "Private" yield 0xFF00, self.good self.good.SOPClassUID = "1.2.840.10008.1.1.1.1.1.1.1" self.good.PatientName = "Unknown^Public" yield 0xFF00, self.good def handle_store(event): store_pname.append(event.dataset.PatientName) return 0x0000 scu_handler = [(evt.EVT_C_STORE, handle_store)] scp_handler = [(evt.EVT_C_GET, handle_get)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet) scp = ae.start_server(('', 11112), block=False, evt_handlers=scp_handler) ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet) ae.add_requested_context(CTImageStorage) ae.add_requested_context('1.2.3.4') ae.add_requested_context('1.2.840.10008.1.1.1.1.1.1.1') role_a = build_role(CTImageStorage, scp_role=True, scu_role=True) role_b = build_role('1.2.3.4', scp_role=True, scu_role=True) role_c = build_role( '1.2.840.10008.1.1.1.1.1.1.1', scp_role=True, scu_role=True ) assoc = ae.associate( 'localhost', 11112, evt_handlers=scu_handler, ext_neg=[role_a, role_b, role_c], ) assert assoc.is_established result = assoc.send_c_get( self.ds, PatientRootQueryRetrieveInformationModelGet ) (status, ds) = next(result) assert status.Status == 0xff00 assert ds is None (status, ds) = next(result) assert status.Status == 0xff00 assert ds is None (status, ds) = next(result) assert status.Status == 0xff00 assert ds is None (status, ds) = next(result) assert status.Status == 0x0000 assert ds is None assoc.release() assert assoc.is_released assert store_pname == ["Known^Public", "Private", "Unknown^Public"] scp.shutdown() def test_unrestricted_failure(self, enable_unrestricted): """Test unrestricted storage with failures""" store_pname = [] def handle_get(event): yield 3 self.good.file_meta.TransferSyntaxUID = ImplicitVRLittleEndian self.good.PatientName = "Known^Public" yield 0xFF00, self.good self.good.SOPClassUID = "1.2.3.4" self.good.PatientName = "Private" yield 0xFF00, self.good self.good.SOPClassUID = "1.2.840.10008.1.1.1.1.1.1.1" self.good.PatientName = "Unknown^Public" yield 0xFF00, self.good def handle_store(event): store_pname.append(event.dataset.PatientName) return 0x0000 scu_handler = [(evt.EVT_C_STORE, handle_store)] scp_handler = [(evt.EVT_C_GET, handle_get)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet) scp = ae.start_server(('', 11112), block=False, evt_handlers=scp_handler) ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet) ae.add_requested_context(CTImageStorage) ae.add_requested_context('1.2.3.4') ae.add_requested_context('1.2.840.10008.1.1.1.1.1.1.1') role_c = build_role( '1.2.840.10008.1.1.1.1.1.1.1', scp_role=True, scu_role=True ) assoc = ae.associate( 'localhost', 11112, evt_handlers=scu_handler, ext_neg=[role_c], ) assert assoc.is_established result = assoc.send_c_get( self.ds, PatientRootQueryRetrieveInformationModelGet ) (status, ds) = next(result) assert status.Status == 0xff00 assert ds is None (status, ds) = next(result) assert status.Status == 0xff00 assert ds is None (status, ds) = next(result) assert status.Status == 0xff00 assert ds is None (status, ds) = next(result) assert status.Status == 0xB000 assert ds.FailedSOPInstanceUIDList == ['1.1.1', '1.1.1'] assoc.release() assert assoc.is_released assert store_pname == ["Unknown^Public"] scp.shutdown() class TestAssociationSendCMove: """Run tests on Assocation send_c_move.""" def setup(self): """Run prior to each test""" self.ds = Dataset() self.ds.PatientName = '*' self.ds.QueryRetrieveLevel = "PATIENT" self.good = Dataset() self.good.file_meta = FileMetaDataset() self.good.file_meta.TransferSyntaxUID = ImplicitVRLittleEndian self.good.SOPClassUID = CTImageStorage self.good.SOPInstanceUID = '1.1.1' self.good.PatientName = 'Test' self.ae = None def teardown(self): """Clear any active threads""" if self.ae: self.ae.shutdown() def test_must_be_associated(self): """Test can't send without association.""" # Test raise if assoc not established self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove) ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112) assoc.release() assert assoc.is_released assert not assoc.is_established with pytest.raises(RuntimeError): next(assoc.send_c_move( self.ds, 'TESTMOVE', PatientRootQueryRetrieveInformationModelMove) ) scp.shutdown() def test_no_abstract_syntax_match(self): """Test when no accepted abstract syntax""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(CTImageStorage) assoc = ae.associate('localhost', 11112) assert assoc.is_established with pytest.raises(ValueError): next(assoc.send_c_move( self.ds, 'TESTMOVE', PatientRootQueryRetrieveInformationModelMove) ) assoc.release() assert assoc.is_released scp.shutdown() def test_bad_query_model(self): """Test bad query model parameter""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove) assoc = ae.associate('localhost', 11112) assert assoc.is_established with pytest.raises(ValueError): next(assoc.send_c_move(self.ds, 'TESTMOVE', query_model='X')) assoc.release() assert assoc.is_released scp.shutdown() def test_fail_encode_identifier(self): """Test a failure in encoding the Identifier dataset""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context( PatientRootQueryRetrieveInformationModelMove, ExplicitVRLittleEndian ) assoc = ae.associate('localhost', 11112) assert assoc.is_established DATASET.PerimeterValue = b'\x00\x01' with pytest.raises(ValueError): next(assoc.send_c_move( DATASET, 'SOMEPLACE', PatientRootQueryRetrieveInformationModelMove) ) assoc.release() assert assoc.is_released del DATASET.PerimeterValue # Fix up our changes scp.shutdown() def test_move_destination_no_assoc(self): """Test move destination failed to assoc""" # Move SCP def handle_move(event): yield 'localhost', 11113 yield 2 yield 0xFF00, self.good self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove) move_scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_MOVE, handle_move)] ) ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove) assoc = ae.associate('localhost', 11112) assert assoc.is_established for (status, ds) in assoc.send_c_move( self.ds, 'TESTMOVE', PatientRootQueryRetrieveInformationModelMove): assert status.Status == 0xa801 assoc.release() assert assoc.is_released move_scp.shutdown() def test_move_destination_unknown(self): """Test unknown move destination""" def handle_move(event): yield None, None yield 1 yield 0xFF00, self.good self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove) move_scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_MOVE, handle_move)] ) ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove) assoc = ae.associate('localhost', 11112) assert assoc.is_established for (status, ds) in assoc.send_c_move( self.ds, 'UNKNOWN', PatientRootQueryRetrieveInformationModelMove): assert status.Status == 0xa801 assoc.release() assert assoc.is_released move_scp.shutdown() def test_move_destination_failed_store(self): """Test the destination AE returning failed status""" def handle_store(event): return 0xA700 def handle_move(event): yield 'localhost', 11113 yield 2 yield 0xFF00, self.good yield 0xFF00, self.good self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove) move_scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_MOVE, handle_move)] ) ae.add_supported_context(CTImageStorage) store_scp = ae.start_server( ('', 11113), block=False, evt_handlers=[(evt.EVT_C_STORE, handle_store)] ) ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove) assoc = ae.associate('localhost', 11112) assert assoc.is_established result = assoc.send_c_move( self.ds, 'TESTMOVE', PatientRootQueryRetrieveInformationModelMove ) (status, ds) = next(result) assert status.Status == 0xFF00 (status, ds) = next(result) assert status.Status == 0xFF00 (status, ds) = next(result) assert status.Status == 0xA702 with pytest.raises(StopIteration): next(result) assoc.release() assert assoc.is_released store_scp.shutdown() move_scp.shutdown() def test_move_destination_warning_store(self): """Test the destination AE returning warning status""" def handle_store(event): return 0xB000 def handle_move(event): yield 'localhost', 11113 yield 2 yield 0xFF00, self.good yield 0xFF00, self.good self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove) move_scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_MOVE, handle_move)] ) ae.add_supported_context(CTImageStorage) store_scp = ae.start_server( ('', 11113), block=False, evt_handlers=[(evt.EVT_C_STORE, handle_store)] ) ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove) ae.add_requested_context(CTImageStorage) assoc = ae.associate('localhost', 11112) assert assoc.is_established result = assoc.send_c_move( self.ds, 'TESTMOVE', PatientRootQueryRetrieveInformationModelMove ) (status, ds) = next(result) assert status.Status == 0xFF00 (status, ds) = next(result) assert status.Status == 0xFF00 (status, ds) = next(result) assert status.Status == 0xB000 assoc.release() assert assoc.is_released store_scp.shutdown() move_scp.shutdown() def test_rsp_failure(self): """Test the handler returning failure status""" def handle_store(event): return 0x0000 def handle_move(event): yield 'localhost', 11113 yield 2 yield 0xC000, None yield 0xFF00, self.good self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove) move_scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_MOVE, handle_move)] ) ae.add_supported_context(CTImageStorage) store_scp = ae.start_server( ('', 11113), block=False, evt_handlers=[(evt.EVT_C_STORE, handle_store)] ) ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove) assoc = ae.associate('localhost', 11112) assert assoc.is_established result = assoc.send_c_move( self.ds, 'TESTMOVE', PatientRootQueryRetrieveInformationModelMove ) (status, ds) = next(result) assert status.Status == 0xC000 assert 'FailedSOPInstanceUIDList' in ds with pytest.raises(StopIteration): next(result) assoc.release() assert assoc.is_released store_scp.shutdown() move_scp.shutdown() def test_rsp_warning(self): """Test receiving a warning response from the peer""" def handle_store(event): return 0xB007 def handle_move(event): yield 'localhost', 11113 yield 2 yield 0xFF00, self.good yield 0xFF00, self.good self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove) move_scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_MOVE, handle_move)] ) ae.add_supported_context(CTImageStorage) store_scp = ae.start_server( ('', 11113), block=False, evt_handlers=[(evt.EVT_C_STORE, handle_store)] ) ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove) ae.add_requested_context(CTImageStorage) assoc = ae.associate('localhost', 11112) assert assoc.is_established result = assoc.send_c_move( self.ds, 'TESTMOVE', PatientRootQueryRetrieveInformationModelMove ) (status, ds) = next(result) assert status.Status == 0xFF00 assert ds is None (status, ds) = next(result) assert status.Status == 0xFF00 assert ds is None (status, ds) = next(result) assert status.Status == 0xB000 assert 'FailedSOPInstanceUIDList' in ds with pytest.raises(StopIteration): next(result) assoc.release() assert assoc.is_released store_scp.shutdown() move_scp.shutdown() def test_rsp_cancel(self): """Test the handler returning cancel status""" def handle_store(event): return 0x0000 def handle_move(event): yield 'localhost', 11113 yield 2 yield 0xFE00, self.good yield 0xFF00, self.good self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove) move_scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_MOVE, handle_move)] ) ae.add_supported_context(CTImageStorage) store_scp = ae.start_server( ('', 11113), block=False, evt_handlers=[(evt.EVT_C_STORE, handle_store)] ) ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove) assoc = ae.associate('localhost', 11112) assert assoc.is_established result = assoc.send_c_move( self.ds, 'TESTMOVE', PatientRootQueryRetrieveInformationModelMove ) (status, ds) = next(result) assert status.Status == 0xFE00 assoc.release() assert assoc.is_released store_scp.shutdown() move_scp.shutdown() def test_rsp_success(self): """Test the handler returning success status""" def handle_store(event): return 0x0000 self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 # Storage SCP ae.add_supported_context(CTImageStorage) store_scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_STORE, handle_store)] ) # Move SCP def handle_move(event): yield 'localhost', 11112 yield 2 yield 0xff00, self.good ae.add_requested_context(CTImageStorage) ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove) ae.add_supported_context(StudyRootQueryRetrieveInformationModelMove) ae.add_supported_context(PatientStudyOnlyQueryRetrieveInformationModelMove) move_scp = ae.start_server( ('', 11113), block=False, evt_handlers=[(evt.EVT_C_MOVE, handle_move)] ) # Move SCU ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove) ae.add_requested_context(StudyRootQueryRetrieveInformationModelMove) ae.add_requested_context(PatientStudyOnlyQueryRetrieveInformationModelMove) assoc = ae.associate('localhost', 11113) assert assoc.is_established result = assoc.send_c_move( self.ds, 'TESTMOVE', PatientRootQueryRetrieveInformationModelMove ) (status, ds) = next(result) assert status.Status == 0xFF00 assert ds is None (status, ds) = next(result) assert status.Status == 0x0000 assert ds is None with pytest.raises(StopIteration): next(result) assoc.release() assert assoc.is_released store_scp.shutdown() move_scp.shutdown() def test_rsp_unknown_status(self): """Test unknown status value returned by peer""" def handle_store(event): return 0xA700 def handle_move(event): yield 'localhost', 11113 yield 2 yield 0xFFF0, self.good yield 0xFF00, self.good self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove) move_scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_MOVE, handle_move)] ) ae.add_supported_context(CTImageStorage) store_scp = ae.start_server( ('', 11113), block=False, evt_handlers=[(evt.EVT_C_STORE, handle_store)] ) ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove) assoc = ae.associate('localhost', 11112) assert assoc.is_established for (status, ds) in assoc.send_c_move( self.ds, 'TESTMOVE', PatientRootQueryRetrieveInformationModelMove): assert status.Status == 0xFFF0 assoc.release() assert assoc.is_released store_scp.shutdown() move_scp.shutdown() def test_multiple_c_move(self): """Test multiple C-MOVE operation requests""" def handle_store(event): return 0x0000 self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 # Storage SCP ae.add_supported_context(CTImageStorage) store_scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_STORE, handle_store)] ) # Move SCP def handle_move(event): yield 'localhost', 11112 yield 2 yield 0xff00, self.good yield 0xff00, self.good ae.add_requested_context(CTImageStorage) ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove) ae.add_supported_context(StudyRootQueryRetrieveInformationModelMove) ae.add_supported_context(PatientStudyOnlyQueryRetrieveInformationModelMove) move_scp = ae.start_server( ('', 11113), block=False, evt_handlers=[(evt.EVT_C_MOVE, handle_move)] ) # Move SCU ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove) ae.add_requested_context(StudyRootQueryRetrieveInformationModelMove) ae.add_requested_context(PatientStudyOnlyQueryRetrieveInformationModelMove) for ii in range(20): assoc = ae.associate('localhost', 11113) assert assoc.is_established assert not assoc.is_released result = assoc.send_c_move( self.ds, 'TESTMOVE', PatientRootQueryRetrieveInformationModelMove ) (status, ds) = next(result) assert status.Status == 0xFF00 (status, ds) = next(result) assert status.Status == 0xFF00 (status, ds) = next(result) assert status.Status == 0x0000 with pytest.raises(StopIteration): next(result) assoc.release() assert assoc.is_released assert not assoc.is_established store_scp.shutdown() move_scp.shutdown() def test_connection_timeout(self): """Test the connection timing out""" def handle(event): yield ('localhost', 11112) yield 2 yield 0xFF00, self.good yield 0xFF00, self.good hh = [(evt.EVT_C_MOVE, handle)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove) scp = ae.start_server(('', 11112), block=False, evt_handlers=hh) ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove) ae.add_requested_context(CTImageStorage) assoc = ae.associate('localhost', 11112) class DummyMessage(): is_valid_response = True Identifier = None Status = 0x0000 STATUS_OPTIONAL_KEYWORDS = [] class DummyDIMSE(): def send_msg(*args, **kwargs): return def get_msg(*args, **kwargs): return None, None assoc._reactor_checkpoint.clear() while not assoc._is_paused: time.sleep(0.01) assoc.dimse = DummyDIMSE() assert assoc.is_established results = assoc.send_c_move( self.ds, 'TEST', PatientRootQueryRetrieveInformationModelMove ) assert next(results) == (Dataset(), None) with pytest.raises(StopIteration): next(results) assert assoc.is_aborted scp.shutdown() def test_decode_failure(self): """Test the connection timing out""" def handle(event): yield ('localhost', 11112) yield 2 yield 0xFF00, self.good yield 0xFF00, self.good hh = [(evt.EVT_C_MOVE, handle)] self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove) scp = ae.start_server(('', 11112), block=False, evt_handlers=hh) ae.add_requested_context( PatientRootQueryRetrieveInformationModelMove, ExplicitVRLittleEndian ) ae.add_requested_context(CTImageStorage) assoc = ae.associate('localhost', 11112) class DummyMessage(): is_valid_response = True DataSet = None Status = 0x0000 STATUS_OPTIONAL_KEYWORDS = [] class DummyDIMSE(): msg_queue = queue.Queue() def send_msg(*args, **kwargs): return def get_msg(*args, **kwargs): def dummy(): pass rsp = C_MOVE() rsp.MessageIDBeingRespondedTo = 1 rsp.Status = 0xC000 rsp._dataset = dummy return 1, rsp assoc._reactor_checkpoint.clear() while not assoc._is_paused: time.sleep(0.01) assoc.dimse = DummyDIMSE() assert assoc.is_established results = assoc.send_c_move( self.ds, 'TEST', PatientRootQueryRetrieveInformationModelMove ) status, ds = next(results) assert status.Status == 0xC000 assert ds is None scp.shutdown() def test_rsp_not_move(self, caplog): """Test receiving a non C-MOVE/C-STORE message in response.""" with caplog.at_level(logging.ERROR, logger='pynetdicom'): ae = AE() assoc = Association(ae, 'requestor') assoc._is_paused = True dimse = assoc.dimse dimse.msg_queue.put((3, C_FIND())) cx = build_context(PatientRootQueryRetrieveInformationModelMove) cx._as_scu = True cx._as_scp = False cx.context_id = 1 assoc._accepted_cx = {1 : cx} identifier = Dataset() identifier.PatientID = '*' assoc.is_established = True results = assoc.send_c_move( identifier, 'A', PatientRootQueryRetrieveInformationModelMove ) status, ds = next(results) assert status == Dataset() assert ds is None with pytest.raises(StopIteration): next(results) assert ( 'Received an unexpected C-FIND message from the peer' ) in caplog.text assert assoc.is_aborted def test_rsp_invalid_move(self, caplog): """Test receiving an invalid C-MOVE message in response.""" with caplog.at_level(logging.ERROR, logger='pynetdicom'): ae = AE() assoc = Association(ae, 'requestor') assoc._is_paused = True dimse = assoc.dimse dimse.msg_queue.put((3, C_MOVE())) cx = build_context(PatientRootQueryRetrieveInformationModelMove) cx._as_scu = True cx._as_scp = False cx.context_id = 1 assoc._accepted_cx = {1 : cx} identifier = Dataset() identifier.PatientID = '*' assoc.is_established = True results = assoc.send_c_move( identifier, 'A', PatientRootQueryRetrieveInformationModelMove ) status, ds = next(results) assert status == Dataset() assert ds is None with pytest.raises(StopIteration): next(results) assert ( 'Received an invalid C-MOVE response from the peer' ) in caplog.text assert assoc.is_aborted def test_query_uid_public(self): """Test using a public UID for the query model""" def handle_store(event): return 0x0000 self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 # Storage SCP ae.add_supported_context(CTImageStorage) store_scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_STORE, handle_store)] ) # Move SCP def handle_move(event): yield 'localhost', 11112 yield 2 yield 0xff00, self.good ae.add_requested_context(CTImageStorage) ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove) ae.add_supported_context(StudyRootQueryRetrieveInformationModelMove) ae.add_supported_context(PatientStudyOnlyQueryRetrieveInformationModelMove) move_scp = ae.start_server( ('', 11113), block=False, evt_handlers=[(evt.EVT_C_MOVE, handle_move)] ) # Move SCU ae.add_requested_context(PatientRootQueryRetrieveInformationModelMove) ae.add_requested_context(StudyRootQueryRetrieveInformationModelMove) ae.add_requested_context(PatientStudyOnlyQueryRetrieveInformationModelMove) assoc = ae.associate('localhost', 11113) assert assoc.is_established result = assoc.send_c_move( self.ds, 'TESTMOVE', PatientRootQueryRetrieveInformationModelMove ) (status, ds) = next(result) assert status.Status == 0xFF00 assert ds is None (status, ds) = next(result) assert status.Status == 0x0000 assert ds is None with pytest.raises(StopIteration): next(result) assoc.release() assert assoc.is_released store_scp.shutdown() move_scp.shutdown() def test_query_uid_private(self, caplog): """Test using a private UID for the query model""" def handle_store(event): return 0x0000 def handle_move(event): yield 'localhost', 11112 yield 2 yield 0xff00, self.good with caplog.at_level(logging.ERROR, logger='pynetdicom'): self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 # Storage SCP ae.add_supported_context(CTImageStorage) store_scp = ae.start_server( ('', 11112), block=False, evt_handlers=[(evt.EVT_C_STORE, handle_store)] ) ae.add_requested_context(CTImageStorage) ae.add_supported_context('1.2.3.4') move_scp = ae.start_server( ('', 11113), block=False, evt_handlers=[(evt.EVT_C_MOVE, handle_move)] ) # Move SCU ae.add_requested_context('1.2.3.4') assoc = ae.associate('localhost', 11113) assert assoc.is_established result = assoc.send_c_move(self.ds, 'TESTMOVE', '1.2.3.4') store_scp.shutdown() move_scp.shutdown() msg = ( "No supported service class available for the SOP Class " "UID '1.2.3.4'" ) assert msg in caplog.text class TestGetValidContext: """Tests for Association._get_valid_context.""" def setup(self): """Run prior to each test""" self.ae = None def teardown(self): """Clear any active threads""" if self.ae: self.ae.shutdown() def test_id_no_abstract_syntax_match(self): """Test exception raised if with ID no abstract syntax match""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(Verification) ae.add_requested_context(CTImageStorage) ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112) assert assoc.is_established msg = ( r"No presentation context for 'CT Image Storage' has been " r"accepted by the peer for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc._get_valid_context(CTImageStorage, '', 'scu', context_id=1) assoc.release() scp.shutdown() def test_id_transfer_syntax(self): """Test match with context ID.""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage) ae.add_supported_context( CTImageStorage, [ExplicitVRLittleEndian, JPEGBaseline8Bit] ) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(Verification) ae.add_requested_context(CTImageStorage) ae.add_requested_context(CTImageStorage, JPEGBaseline8Bit) assoc = ae.associate('localhost', 11112) assert assoc.is_established # Uncompressed accepted, different uncompressed sent cx = assoc._get_valid_context( CTImageStorage, '', 'scu', context_id=3 ) assert cx.context_id == 3 assert cx.abstract_syntax == CTImageStorage assert cx.transfer_syntax[0] == ImplicitVRLittleEndian assert cx.as_scu is True cx = assoc._get_valid_context( CTImageStorage, '', 'scu', context_id=5 ) assert cx.context_id == 5 assert cx.abstract_syntax == CTImageStorage assert cx.transfer_syntax[0] == JPEGBaseline8Bit assert cx.as_scu is True assoc.release() scp.shutdown() def test_id_no_transfer_syntax(self): """Test exception raised if with ID no transfer syntax match.""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) ae.add_supported_context(CTImageStorage, JPEGBaseline8Bit) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(Verification) ae.add_requested_context(CTImageStorage, JPEGBaseline8Bit) ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112) assert assoc.is_established # Confirm otherwise OK cx = assoc._get_valid_context( '1.2.840.10008.1.1', '', 'scu', context_id=1 ) assert cx.context_id == 1 assert cx.transfer_syntax[0] == ImplicitVRLittleEndian # Uncompressed accepted, compressed sent msg = ( r"No presentation context for 'Verification SOP Class' has been " r"accepted by the peer with 'JPEG Baseline \(Process 1\)' " r"transfer syntax for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc._get_valid_context( '1.2.840.10008.1.1', JPEGBaseline8Bit, 'scu', context_id=1 ) # Compressed (JPEGBaseline8Bit) accepted, uncompressed sent # Confirm otherwise OK cx = assoc._get_valid_context( CTImageStorage, JPEGBaseline8Bit, 'scu', context_id=3 ) assert cx.context_id == 3 assert cx.transfer_syntax[0] == JPEGBaseline8Bit msg = ( r"No presentation context for 'CT Image Storage' has been " r"accepted by the peer with 'Implicit VR Little Endian' " r"transfer syntax for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc._get_valid_context( CTImageStorage, ImplicitVRLittleEndian, 'scu', context_id=3 ) # Compressed (JPEGBaseline8Bit) accepted, compressed (JPEG2000) sent msg = ( r"No presentation context for 'CT Image Storage' has been " r"accepted by the peer with 'JPEG 2000 Image Compression' " r"transfer syntax for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc._get_valid_context( CTImageStorage, JPEG2000, 'scu', context_id=3 ) assoc.release() scp.shutdown() def test_id_no_role_scp(self): """Test exception raised if with ID no role match.""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) ae.add_supported_context(CTImageStorage, JPEGBaseline8Bit) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(Verification) ae.add_requested_context(CTImageStorage, JPEGBaseline8Bit) ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112) assert assoc.is_established # Confirm matching otherwise OK cx = assoc._get_valid_context( '1.2.840.10008.1.1', '', 'scu', context_id=1 ) assert cx.context_id == 1 assert cx.as_scu is True # Any transfer syntax msg = ( r"No presentation context for 'Verification SOP Class' has been " r"accepted by the peer " r"for the SCP role" ) with pytest.raises(ValueError, match=msg): assoc._get_valid_context( '1.2.840.10008.1.1', '', 'scp', context_id=1 ) # Transfer syntax used msg = ( r"No presentation context for 'Verification SOP Class' has been " r"accepted by the peer " r"with 'Implicit VR Little Endian' transfer syntax " r"for the SCP role" ) with pytest.raises(ValueError, match=msg): assoc._get_valid_context( '1.2.840.10008.1.1', ImplicitVRLittleEndian, 'scp', context_id=1 ) assoc.release() scp.shutdown() def test_id_no_role_scu(self): """Test exception raised if with ID no role match.""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet) ae.add_supported_context(CTImageStorage, scp_role=True, scu_role=True) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet) ae.add_requested_context(CTImageStorage) role = SCP_SCU_RoleSelectionNegotiation() role.sop_class_uid = CTImageStorage role.scu_role = False role.scp_role = True ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112, ext_neg=[role]) assert assoc.is_established # Confirm matching otherwise OK cx = assoc._get_valid_context( CTImageStorage, '', 'scp', context_id=3 ) assert cx.context_id == 3 assert cx.as_scp is True # Any transfer syntax msg = ( r"No presentation context for 'CT Image Storage' has been " r"accepted by the peer " r"for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc._get_valid_context( CTImageStorage, '', 'scu', context_id=3 ) # Transfer syntax used msg = ( r"No presentation context for 'CT Image Storage' has been " r"accepted by the peer " r"with 'Implicit VR Little Endian' transfer syntax " r"for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc._get_valid_context( CTImageStorage, ImplicitVRLittleEndian, 'scu', context_id=3 ) assoc.release() scp.shutdown() def test_no_id_no_abstract_syntax_match(self): """Test exception raised if no abstract syntax match""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(Verification) ae.add_requested_context(CTImageStorage) ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112) assert assoc.is_established # Test otherwise OK assoc._get_valid_context(Verification, '', 'scu') msg = ( r"No presentation context for 'CT Image Storage' has been " r"accepted by the peer " r"for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc._get_valid_context(CTImageStorage, '', 'scu') assoc.release() scp.shutdown() def test_no_id_transfer_syntax(self): """Test match.""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) ae.add_supported_context(CTImageStorage, JPEGBaseline8Bit) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(Verification) ae.add_requested_context(CTImageStorage, JPEGBaseline8Bit) ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112) assert assoc.is_established # Uncompressed accepted, different uncompressed sent cx = assoc._get_valid_context( '1.2.840.10008.1.1', ExplicitVRLittleEndian, 'scu' ) assert cx.context_id == 1 assert cx.abstract_syntax == Verification assert cx.transfer_syntax[0] == ImplicitVRLittleEndian assert cx.as_scu is True assoc.release() scp.shutdown() def test_no_id_no_transfer_syntax(self): """Test exception raised if no transfer syntax match.""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) ae.add_supported_context(CTImageStorage, JPEGBaseline8Bit) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(Verification) ae.add_requested_context(CTImageStorage, JPEGBaseline8Bit) ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112) assert assoc.is_established # Confirm otherwise OK cx = assoc._get_valid_context('1.2.840.10008.1.1', '', 'scu') assert cx.context_id == 1 assert cx.transfer_syntax[0] == ImplicitVRLittleEndian # Uncompressed accepted, compressed sent msg = ( r"No presentation context for 'Verification SOP Class' has been " r"accepted by the peer " r"with 'JPEG Baseline \(Process 1\)' transfer syntax " r"for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc._get_valid_context('1.2.840.10008.1.1', JPEGBaseline8Bit, 'scu') # Compressed (JPEGBaseline8Bit) accepted, uncompressed sent # Confirm otherwise OK cx = assoc._get_valid_context(CTImageStorage, JPEGBaseline8Bit, 'scu') assert cx.context_id == 3 assert cx.transfer_syntax[0] == JPEGBaseline8Bit msg = ( r"No presentation context for 'CT Image Storage' has been " r"accepted by the peer " r"with 'Implicit VR Little Endian' transfer syntax " r"for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc._get_valid_context( CTImageStorage, ImplicitVRLittleEndian, 'scu' ) # Compressed (JPEGBaseline8Bit) accepted, compressed (JPEG2000) sent msg = ( r"No presentation context for 'CT Image Storage' has been " r"accepted by the peer " r"with 'JPEG 2000 Image Compression' transfer syntax " r"for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc._get_valid_context(CTImageStorage, JPEG2000, 'scu') assoc.release() scp.shutdown() def test_no_id_no_role_scp(self): """Test exception raised if no role match.""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) ae.add_supported_context(CTImageStorage, JPEGBaseline8Bit) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(Verification) ae.add_requested_context(CTImageStorage, JPEGBaseline8Bit) ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112) assert assoc.is_established # Confirm matching otherwise OK cx = assoc._get_valid_context('1.2.840.10008.1.1', '', 'scu') assert cx.context_id == 1 assert cx.as_scu is True # Any transfer syntax msg = ( r"No presentation context for 'Verification SOP Class' has been " r"accepted by the peer " r"for the SCP role" ) with pytest.raises(ValueError, match=msg): assoc._get_valid_context('1.2.840.10008.1.1', '', 'scp') # Transfer syntax used msg = ( r"No presentation context for 'Verification SOP Class' has been " r"accepted by the peer " r"with 'Implicit VR Little Endian' transfer syntax " r"for the SCP role" ) with pytest.raises(ValueError, match=msg): assoc._get_valid_context( '1.2.840.10008.1.1', ImplicitVRLittleEndian, 'scp' ) assoc.release() scp.shutdown() def test_no_id_no_role_scu(self): """Test exception raised if no role match.""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet) ae.add_supported_context(CTImageStorage, scp_role=True, scu_role=True) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(PatientRootQueryRetrieveInformationModelGet) ae.add_requested_context(CTImageStorage) role = SCP_SCU_RoleSelectionNegotiation() role.sop_class_uid = CTImageStorage role.scu_role = False role.scp_role = True ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112, ext_neg=[role]) assert assoc.is_established # Confirm matching otherwise OK cx = assoc._get_valid_context(CTImageStorage, '', 'scp') assert cx.context_id == 3 assert cx.as_scp is True # Any transfer syntax msg = ( r"No presentation context for 'CT Image Storage' has been " r"accepted by the peer " r"for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc._get_valid_context(CTImageStorage, '', 'scu') # Transfer syntax used msg = ( r"No presentation context for 'CT Image Storage' has been " r"accepted by the peer " r"with 'Implicit VR Little Endian' transfer syntax " r"for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc._get_valid_context( CTImageStorage, ImplicitVRLittleEndian, 'scu' ) assoc.release() scp.shutdown() def test_implicit_explicit(self): """Test matching when both implicit and explicit are available.""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) ae.add_supported_context(CTImageStorage, ImplicitVRLittleEndian) ae.add_supported_context(CTImageStorage, ExplicitVRLittleEndian) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(CTImageStorage, ImplicitVRLittleEndian) ae.add_requested_context(CTImageStorage, ExplicitVRLittleEndian) ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112) assert assoc.is_established cx = assoc._get_valid_context( CTImageStorage, ExplicitVRLittleEndian, 'scu' ) assert cx.context_id == 3 assert cx.abstract_syntax == CTImageStorage assert cx.transfer_syntax[0] == ExplicitVRLittleEndian assert cx.as_scu is True cx = assoc._get_valid_context( CTImageStorage, ImplicitVRLittleEndian, 'scu' ) assert cx.context_id == 1 assert cx.abstract_syntax == CTImageStorage assert cx.transfer_syntax[0] == ImplicitVRLittleEndian assert cx.as_scu is True assoc.release() scp.shutdown() def test_explicit_implicit(self): """Test matching when both implicit and explicit are available.""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) ae.add_supported_context(CTImageStorage, ExplicitVRLittleEndian) ae.add_supported_context(CTImageStorage, ImplicitVRLittleEndian) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(CTImageStorage, ExplicitVRLittleEndian) ae.add_requested_context(CTImageStorage, ImplicitVRLittleEndian) ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112) assert assoc.is_established cx = assoc._get_valid_context( CTImageStorage, ExplicitVRLittleEndian, 'scu' ) assert cx.context_id == 1 assert cx.abstract_syntax == CTImageStorage assert cx.transfer_syntax[0] == ExplicitVRLittleEndian assert cx.as_scu is True cx = assoc._get_valid_context( CTImageStorage, ImplicitVRLittleEndian, 'scu' ) assert cx.context_id == 3 assert cx.abstract_syntax == CTImageStorage assert cx.transfer_syntax[0] == ImplicitVRLittleEndian assert cx.as_scu is True assoc.release() scp.shutdown def test_little_big(self): """Test no match from little to big endian.""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) ae.add_supported_context(MRImageStorage, ExplicitVRLittleEndian) ae.add_supported_context(CTImageStorage, ImplicitVRLittleEndian) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(MRImageStorage, ExplicitVRBigEndian) ae.add_requested_context(MRImageStorage, ExplicitVRLittleEndian) ae.add_requested_context(CTImageStorage, ImplicitVRLittleEndian) ae.acse_timeout = 5 ae.dimse_timeout = 5 assoc = ae.associate('localhost', 11112) assert assoc.is_established msg = ( r"No presentation context for 'MR Image Storage' has been " r"accepted by the peer with 'Explicit VR Big Endian' transfer " r"syntax for the SCU role" ) with pytest.raises(ValueError, match=msg): assoc._get_valid_context( MRImageStorage, ExplicitVRBigEndian, 'scu' ) assoc.release() scp.shutdown() def test_ups_push_action(self, caplog): """Test matching UPS Push to other UPS contexts.""" def handle(event, cx): cx.append(event.context) return 0x0000, None self.ae = ae = AE() ae.network_timeout = 5 ae.dimse_timeout = 5 ae.acse_timeout = 5 ae.add_supported_context(UnifiedProcedureStepPull) contexts = [] handlers = [(evt.EVT_N_ACTION, handle, [contexts])] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) ae.add_requested_context(UnifiedProcedureStepPull) assoc = ae.associate('localhost', 11112) assert assoc.is_established msg = ( r"No exact matching context found for 'Unified Procedure Step " r"- Push SOP Class', checking accepted contexts for other UPS " r"SOP classes" ) ds = Dataset() ds.TransactionUID = '1.2.3.4' with caplog.at_level(logging.DEBUG, logger='pynetdicom'): status, rsp = assoc.send_n_action( ds, 1, UnifiedProcedureStepPush, '1.2.3' ) assert msg in caplog.text assoc.release() assert contexts[0].abstract_syntax == UnifiedProcedureStepPull scp.shutdown() def test_ups_push_get(self, caplog): """Test matching UPS Push to other UPS contexts.""" self.ae = ae = AE() ae.network_timeout = 5 ae.dimse_timeout = 5 ae.acse_timeout = 5 ae.add_supported_context(UnifiedProcedureStepPull) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(UnifiedProcedureStepPull) assoc = ae.associate('localhost', 11112) assert assoc.is_established msg = ( r"No exact matching context found for 'Unified Procedure Step " r"- Push SOP Class', checking accepted contexts for other UPS " r"SOP classes" ) with caplog.at_level(logging.DEBUG, logger='pynetdicom'): status, rsp = assoc.send_n_get( [0x00100010], UnifiedProcedureStepPush, '1.2.3' ) assert msg in caplog.text assoc.release() scp.shutdown() def test_ups_push_set(self, caplog): """Test matching UPS Push to other UPS contexts.""" self.ae = ae = AE() ae.network_timeout = 5 ae.dimse_timeout = 5 ae.acse_timeout = 5 ae.add_supported_context(UnifiedProcedureStepPull) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(UnifiedProcedureStepPull) assoc = ae.associate('localhost', 11112) assert assoc.is_established msg = ( r"No exact matching context found for 'Unified Procedure Step " r"- Push SOP Class', checking accepted contexts for other UPS " r"SOP classes" ) ds = Dataset() ds.TransactionUID = '1.2.3.4' with caplog.at_level(logging.DEBUG, logger='pynetdicom'): status, rsp = assoc.send_n_set( ds, UnifiedProcedureStepPush, '1.2.3' ) assert msg in caplog.text assoc.release() scp.shutdown() def test_ups_push_er(self, caplog): """Test matching UPS Push to other UPS contexts.""" self.ae = ae = AE() ae.network_timeout = 5 ae.dimse_timeout = 5 ae.acse_timeout = 5 ae.add_supported_context(UnifiedProcedureStepPull) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(UnifiedProcedureStepPull) assoc = ae.associate('localhost', 11112) assert assoc.is_established msg = ( r"No exact matching context found for 'Unified Procedure Step " r"- Push SOP Class', checking accepted contexts for other UPS " r"SOP classes" ) ds = Dataset() ds.TransactionUID = '1.2.3.4' with caplog.at_level(logging.DEBUG, logger='pynetdicom'): status, rsp = assoc.send_n_event_report( ds, 1, UnifiedProcedureStepPush, '1.2.3' ) assert msg in caplog.text assoc.release() scp.shutdown() def test_ups_push_find(self, caplog): """Test matching UPS Push to other UPS contexts.""" self.ae = ae = AE() ae.network_timeout = 5 ae.dimse_timeout = 5 ae.acse_timeout = 5 ae.add_supported_context(UnifiedProcedureStepPull) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(UnifiedProcedureStepPull) assoc = ae.associate('localhost', 11112) assert assoc.is_established msg = ( r"No exact matching context found for 'Unified Procedure Step " r"- Push SOP Class', checking accepted contexts for other UPS " r"SOP classes" ) ds = Dataset() ds.TransactionUID = '1.2.3.4' with caplog.at_level(logging.DEBUG, logger='pynetdicom'): responses = assoc.send_c_find(ds, UnifiedProcedureStepPush) assert msg in caplog.text assoc.release() scp.shutdown() def test_allow_conversion(self): """Test allow_conversion=False.""" self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(CTImageStorage, ImplicitVRLittleEndian) ae.add_supported_context(CTImageStorage, ExplicitVRLittleEndian) scp = ae.start_server(('', 11112), block=False) ae.add_requested_context(CTImageStorage, ImplicitVRLittleEndian) #ae.add_requested_context(CTImageStorage, ExplicitVRLittleEndian) assoc = ae.associate('localhost', 11112) assert assoc.is_established msg = ( r"No presentation context for 'CT Image Storage' has been " r"accepted by the peer with 'Explicit VR" ) with pytest.raises(ValueError, match=msg): assoc._get_valid_context( CTImageStorage, ExplicitVRLittleEndian, 'scu', allow_conversion=False ) assoc.release() scp.shutdown() class TestEventHandlingAcceptor: """Test the transport events and handling as acceptor.""" def setup(self): self.ae = None _config.LOG_HANDLER_LEVEL = 'none' def teardown(self): if self.ae: self.ae.shutdown() _config.LOG_HANDLER_LEVEL = 'standard' def test_no_handlers(self): """Test with no association event handlers bound.""" self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) scp = ae.start_server(('', 11112), block=False) assert scp.get_handlers(evt.EVT_ABORTED) == [] assert scp.get_handlers(evt.EVT_ACCEPTED) == [] assert scp.get_handlers(evt.EVT_ESTABLISHED) == [] assert scp.get_handlers(evt.EVT_REJECTED) == [] assert scp.get_handlers(evt.EVT_RELEASED) == [] assert scp.get_handlers(evt.EVT_REQUESTED) == [] assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 assert scp.get_handlers(evt.EVT_ABORTED) == [] assert scp.get_handlers(evt.EVT_ACCEPTED) == [] assert scp.get_handlers(evt.EVT_ESTABLISHED) == [] assert scp.get_handlers(evt.EVT_REJECTED) == [] assert scp.get_handlers(evt.EVT_RELEASED) == [] assert scp.get_handlers(evt.EVT_REQUESTED) == [] assert assoc.get_handlers(evt.EVT_ABORTED) == [] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assert assoc.get_handlers(evt.EVT_RELEASED) == [] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] child = scp.active_associations[0] assert child.get_handlers(evt.EVT_ABORTED) == [] assert child.get_handlers(evt.EVT_ACCEPTED) == [] assert child.get_handlers(evt.EVT_ESTABLISHED) == [] assert child.get_handlers(evt.EVT_REJECTED) == [] assert child.get_handlers(evt.EVT_RELEASED) == [] assert child.get_handlers(evt.EVT_REQUESTED) == [] assoc.release() scp.shutdown() def test_no_handlers_unbind(self): """Test unbinding a handler that's not bound.""" _config.LOG_HANDLER_LEVEL = 'standard' def dummy(event): pass self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) scp = ae.start_server(('', 11112), block=False) assert dummy not in scp._handlers[evt.EVT_DIMSE_SENT] scp.unbind(evt.EVT_DIMSE_SENT, dummy) assert dummy not in scp._handlers[evt.EVT_DIMSE_SENT] assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 assert dummy not in assoc._handlers[evt.EVT_DIMSE_SENT] assoc.unbind(evt.EVT_DIMSE_SENT, dummy) assert dummy not in assoc._handlers[evt.EVT_DIMSE_SENT] child = scp.active_associations[0] assert dummy not in child._handlers[evt.EVT_DIMSE_SENT] child.unbind(evt.EVT_DIMSE_SENT, dummy) assert dummy not in child._handlers[evt.EVT_DIMSE_SENT] assoc.release() scp.shutdown() def test_unbind_intervention(self): """Test unbinding a user intervention handler.""" def dummy(event): pass self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) scp = ae.start_server(('', 11112), block=False) scp.bind(evt.EVT_C_ECHO, dummy) assert scp.get_handlers(evt.EVT_C_ECHO) == (dummy, None) scp.unbind(evt.EVT_C_ECHO, dummy) assert scp.get_handlers(evt.EVT_C_ECHO) != (dummy, None) assert scp.get_handlers(evt.EVT_C_ECHO) == (evt._c_echo_handler, None) scp.shutdown() def test_unbind_intervention_assoc(self): """Test unbinding a user intervention handler.""" def dummy(event): pass self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) scp = ae.start_server(('', 11112), block=False) scp.bind(evt.EVT_C_ECHO, dummy) assert scp.get_handlers(evt.EVT_C_ECHO) == (dummy, None) assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 child = scp.active_associations[0] assert child.get_handlers(evt.EVT_C_ECHO) == (dummy, None) scp.unbind(evt.EVT_C_ECHO, dummy) assert scp.get_handlers(evt.EVT_C_ECHO) != (dummy, None) assert scp.get_handlers(evt.EVT_C_ECHO) == (evt._c_echo_handler, None) assert child.get_handlers(evt.EVT_C_ECHO) != (dummy, None) assert child.get_handlers(evt.EVT_C_ECHO) == ( evt._c_echo_handler, None ) assoc.release() scp.shutdown() def test_abort(self): """Test starting with handler bound to EVT_ABORTED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_ABORTED, handle)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) assert scp.get_handlers(evt.EVT_ABORTED) == [(handle, None)] assert scp.get_handlers(evt.EVT_ACCEPTED) == [] assert scp.get_handlers(evt.EVT_ESTABLISHED) == [] assert scp.get_handlers(evt.EVT_REJECTED) == [] assert scp.get_handlers(evt.EVT_RELEASED) == [] assert scp.get_handlers(evt.EVT_REQUESTED) == [] assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 assert scp.get_handlers(evt.EVT_ABORTED) == [(handle, None)] assert scp.get_handlers(evt.EVT_ACCEPTED) == [] assert scp.get_handlers(evt.EVT_ESTABLISHED) == [] assert scp.get_handlers(evt.EVT_REJECTED) == [] assert scp.get_handlers(evt.EVT_RELEASED) == [] assert scp.get_handlers(evt.EVT_REQUESTED) == [] assert assoc.get_handlers(evt.EVT_ABORTED) == [] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assert assoc.get_handlers(evt.EVT_RELEASED) == [] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] child = scp.active_associations[0] assert child.get_handlers(evt.EVT_ABORTED) == [(handle, None)] assert child.get_handlers(evt.EVT_ACCEPTED) == [] assert child.get_handlers(evt.EVT_ESTABLISHED) == [] assert child.get_handlers(evt.EVT_REJECTED) == [] assert child.get_handlers(evt.EVT_RELEASED) == [] assert child.get_handlers(evt.EVT_REQUESTED) == [] assoc.abort() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 1 event = triggered[0] assert isinstance(event, Event) assert isinstance(event.assoc, Association) assert isinstance(event.timestamp, datetime) assert event.event.name == 'EVT_ABORTED' scp.shutdown() def test_abort_bind(self): """Test binding a handler to EVT_ABORTED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_ABORTED, handle)] scp = ae.start_server(('', 11112), block=False) assert scp.get_handlers(evt.EVT_ABORTED) == [] assert scp.get_handlers(evt.EVT_ACCEPTED) == [] assert scp.get_handlers(evt.EVT_ESTABLISHED) == [] assert scp.get_handlers(evt.EVT_REJECTED) == [] assert scp.get_handlers(evt.EVT_RELEASED) == [] assert scp.get_handlers(evt.EVT_REQUESTED) == [] assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 assert scp.get_handlers(evt.EVT_ABORTED) == [] assert scp.get_handlers(evt.EVT_ACCEPTED) == [] assert scp.get_handlers(evt.EVT_ESTABLISHED) == [] assert scp.get_handlers(evt.EVT_REJECTED) == [] assert scp.get_handlers(evt.EVT_RELEASED) == [] assert scp.get_handlers(evt.EVT_REQUESTED) == [] assert assoc.get_handlers(evt.EVT_ABORTED) == [] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assert assoc.get_handlers(evt.EVT_RELEASED) == [] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] child = scp.active_associations[0] assert child.get_handlers(evt.EVT_ABORTED) == [] assert child.get_handlers(evt.EVT_ACCEPTED) == [] assert child.get_handlers(evt.EVT_ESTABLISHED) == [] assert child.get_handlers(evt.EVT_REJECTED) == [] assert child.get_handlers(evt.EVT_RELEASED) == [] assert child.get_handlers(evt.EVT_REQUESTED) == [] scp.bind(evt.EVT_ABORTED, handle) assert scp.get_handlers(evt.EVT_ABORTED) == [(handle, None)] assert scp.get_handlers(evt.EVT_ACCEPTED) == [] assert scp.get_handlers(evt.EVT_ESTABLISHED) == [] assert scp.get_handlers(evt.EVT_REJECTED) == [] assert scp.get_handlers(evt.EVT_RELEASED) == [] assert scp.get_handlers(evt.EVT_REQUESTED) == [] assert assoc.get_handlers(evt.EVT_ABORTED) == [] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assert assoc.get_handlers(evt.EVT_RELEASED) == [] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] child = scp.active_associations[0] assert child.get_handlers(evt.EVT_ABORTED) == [(handle, None)] assert child.get_handlers(evt.EVT_ACCEPTED) == [] assert child.get_handlers(evt.EVT_ESTABLISHED) == [] assert child.get_handlers(evt.EVT_REJECTED) == [] assert child.get_handlers(evt.EVT_RELEASED) == [] assert child.get_handlers(evt.EVT_REQUESTED) == [] assoc.abort() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 1 event = triggered[0] assert isinstance(event, Event) assert isinstance(event.assoc, Association) assert isinstance(event.timestamp, datetime) assert event.event.name == 'EVT_ABORTED' scp.shutdown() def test_abort_unbind(self): """Test starting with handler bound to EVT_ABORTED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_ABORTED, handle)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) assert scp.get_handlers(evt.EVT_ABORTED) == [(handle, None)] assert scp.get_handlers(evt.EVT_ACCEPTED) == [] assert scp.get_handlers(evt.EVT_ESTABLISHED) == [] assert scp.get_handlers(evt.EVT_REJECTED) == [] assert scp.get_handlers(evt.EVT_RELEASED) == [] assert scp.get_handlers(evt.EVT_RELEASED) == [] assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 assert scp.get_handlers(evt.EVT_ABORTED) == [(handle, None)] assert scp.get_handlers(evt.EVT_ACCEPTED) == [] assert scp.get_handlers(evt.EVT_ESTABLISHED) == [] assert scp.get_handlers(evt.EVT_REJECTED) == [] assert scp.get_handlers(evt.EVT_RELEASED) == [] assert scp.get_handlers(evt.EVT_REQUESTED) == [] assert assoc.get_handlers(evt.EVT_ABORTED) == [] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assert assoc.get_handlers(evt.EVT_RELEASED) == [] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] child = scp.active_associations[0] assert child.get_handlers(evt.EVT_ABORTED) == [(handle, None)] assert child.get_handlers(evt.EVT_ACCEPTED) == [] assert child.get_handlers(evt.EVT_ESTABLISHED) == [] assert child.get_handlers(evt.EVT_REJECTED) == [] assert child.get_handlers(evt.EVT_RELEASED) == [] assert child.get_handlers(evt.EVT_REQUESTED) == [] scp.unbind(evt.EVT_ABORTED, handle) assert scp.get_handlers(evt.EVT_ABORTED) == [] assert scp.get_handlers(evt.EVT_ACCEPTED) == [] assert scp.get_handlers(evt.EVT_ESTABLISHED) == [] assert scp.get_handlers(evt.EVT_REJECTED) == [] assert scp.get_handlers(evt.EVT_RELEASED) == [] assert scp.get_handlers(evt.EVT_REQUESTED) == [] assert assoc.get_handlers(evt.EVT_ABORTED) == [] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assert assoc.get_handlers(evt.EVT_RELEASED) == [] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] child = scp.active_associations[0] assert child.get_handlers(evt.EVT_ABORTED) == [] assert child.get_handlers(evt.EVT_ACCEPTED) == [] assert child.get_handlers(evt.EVT_ESTABLISHED) == [] assert child.get_handlers(evt.EVT_REJECTED) == [] assert child.get_handlers(evt.EVT_RELEASED) == [] assert child.get_handlers(evt.EVT_REQUESTED) == [] assoc.abort() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 0 scp.shutdown() def test_abort_local(self): """Test the handler bound to EVT_ABORTED with local requested abort.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_ABORTED, handle)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 scp.active_associations[0].abort() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 1 event = triggered[0] assert isinstance(event, Event) assert isinstance(event.assoc, Association) assert isinstance(event.timestamp, datetime) assert event.event.name == 'EVT_ABORTED' scp.shutdown() def test_abort_raises(self, caplog): """Test the handler for EVT_ACCEPTED raising exception.""" def handle(event): raise NotImplementedError("Exception description") self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_ABORTED, handle)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) with caplog.at_level(logging.ERROR, logger='pynetdicom'): assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.abort() while scp.active_associations: time.sleep(0.05) scp.shutdown() msg = ( "Exception raised in user's 'evt.EVT_ABORTED' event handler" " 'handle'" ) assert msg in caplog.text assert "Exception description" in caplog.text def test_accept(self): """Test starting with handler bound to EVT_ACCEPTED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_ACCEPTED, handle)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) assert scp.get_handlers(evt.EVT_ABORTED) == [] assert scp.get_handlers(evt.EVT_ACCEPTED) == [(handle, None)] assert scp.get_handlers(evt.EVT_ESTABLISHED) == [] assert scp.get_handlers(evt.EVT_REJECTED) == [] assert scp.get_handlers(evt.EVT_RELEASED) == [] assert scp.get_handlers(evt.EVT_REQUESTED) == [] assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 assert scp.get_handlers(evt.EVT_ABORTED) == [] assert scp.get_handlers(evt.EVT_ACCEPTED) == [(handle, None)] assert scp.get_handlers(evt.EVT_ESTABLISHED) == [] assert scp.get_handlers(evt.EVT_REJECTED) == [] assert scp.get_handlers(evt.EVT_RELEASED) == [] assert scp.get_handlers(evt.EVT_REQUESTED) == [] assert assoc.get_handlers(evt.EVT_ABORTED) == [] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assert assoc.get_handlers(evt.EVT_RELEASED) == [] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] child = scp.active_associations[0] assert child.get_handlers(evt.EVT_ABORTED) == [] assert child.get_handlers(evt.EVT_ACCEPTED) == [(handle, None)] assert child.get_handlers(evt.EVT_ESTABLISHED) == [] assert child.get_handlers(evt.EVT_REJECTED) == [] assert child.get_handlers(evt.EVT_RELEASED) == [] assert child.get_handlers(evt.EVT_REQUESTED) == [] assoc.abort() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 1 event = triggered[0] assert isinstance(event, Event) assert isinstance(event.assoc, Association) assert isinstance(event.timestamp, datetime) assert event.event.name == 'EVT_ACCEPTED' scp.shutdown() def test_accept_bind(self): """Test binding a handler to EVT_ACCEPTED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_ACCEPTED, handle)] scp = ae.start_server(('', 11112), block=False) assert scp.get_handlers(evt.EVT_ACCEPTED) == [] assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 assert scp.get_handlers(evt.EVT_ACCEPTED) == [] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] child = scp.active_associations[0] assert child.get_handlers(evt.EVT_ACCEPTED) == [] assert len(triggered) == 0 scp.bind(evt.EVT_ACCEPTED, handle) assert scp.get_handlers(evt.EVT_ACCEPTED) == [(handle, None)] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] assert child.get_handlers(evt.EVT_ACCEPTED) == [(handle, None)] assoc2 = ae.associate('localhost', 11112) assoc.release() assoc2.release() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 1 assert triggered[0].event.name == 'EVT_ACCEPTED' scp.shutdown() def test_accept_unbind(self): """Test starting with handler bound to EVT_ACCEPTED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_ACCEPTED, handle)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) assert scp.get_handlers(evt.EVT_ACCEPTED) == [(handle, None)] assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 assert scp.get_handlers(evt.EVT_ACCEPTED) == [(handle, None)] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] child = scp.active_associations[0] assert child.get_handlers(evt.EVT_ACCEPTED) == [(handle, None)] assert len(triggered) == 1 assert triggered[0].event.name == "EVT_ACCEPTED" scp.unbind(evt.EVT_ACCEPTED, handle) assert scp.get_handlers(evt.EVT_ACCEPTED) == [] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] child = scp.active_associations[0] assert child.get_handlers(evt.EVT_ACCEPTED) == [] assoc2 = ae.associate('localhost', 11112) assoc.release() assoc2.release() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 1 scp.shutdown() def test_accept_raises(self, caplog): """Test the handler for EVT_ACCEPTED raising exception.""" def handle(event): raise NotImplementedError("Exception description") self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_ACCEPTED, handle)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) with caplog.at_level(logging.ERROR, logger='pynetdicom'): assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.abort() while scp.active_associations: time.sleep(0.05) scp.shutdown() msg = ( "Exception raised in user's 'evt.EVT_ACCEPTED' event handler" " 'handle'" ) assert msg in caplog.text assert "Exception description" in caplog.text def test_release(self): """Test starting with handler bound to EVT_RELEASED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_RELEASED, handle)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) assert scp.get_handlers(evt.EVT_ABORTED) == [] assert scp.get_handlers(evt.EVT_ACCEPTED) == [] assert scp.get_handlers(evt.EVT_ESTABLISHED) == [] assert scp.get_handlers(evt.EVT_REJECTED) == [] assert scp.get_handlers(evt.EVT_RELEASED) == [(handle, None)] assert scp.get_handlers(evt.EVT_REQUESTED) == [] assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 assert scp.get_handlers(evt.EVT_ABORTED) == [] assert scp.get_handlers(evt.EVT_ACCEPTED) == [] assert scp.get_handlers(evt.EVT_ESTABLISHED) == [] assert scp.get_handlers(evt.EVT_REJECTED) == [] assert scp.get_handlers(evt.EVT_RELEASED) == [(handle, None)] assert scp.get_handlers(evt.EVT_REQUESTED) == [] assert assoc.get_handlers(evt.EVT_ABORTED) == [] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assert assoc.get_handlers(evt.EVT_RELEASED) == [] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] child = scp.active_associations[0] assert child.get_handlers(evt.EVT_ABORTED) == [] assert child.get_handlers(evt.EVT_ACCEPTED) == [] assert child.get_handlers(evt.EVT_ESTABLISHED) == [] assert child.get_handlers(evt.EVT_REJECTED) == [] assert child.get_handlers(evt.EVT_RELEASED) == [(handle, None)] assert child.get_handlers(evt.EVT_REQUESTED) == [] assoc.release() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 1 event = triggered[0] assert isinstance(event, Event) assert isinstance(event.assoc, Association) assert isinstance(event.timestamp, datetime) assert event.event.name == 'EVT_RELEASED' scp.shutdown() def test_release_bind(self): """Test binding a handler to EVT_RELEASED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_RELEASED, handle)] scp = ae.start_server(('', 11112), block=False) assert scp.get_handlers(evt.EVT_RELEASED) == [] assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 assert scp.get_handlers(evt.EVT_RELEASED) == [] assert assoc.get_handlers(evt.EVT_RELEASED) == [] child = scp.active_associations[0] assert child.get_handlers(evt.EVT_RELEASED) == [] scp.bind(evt.EVT_RELEASED, handle) assert scp.get_handlers(evt.EVT_RELEASED) == [(handle, None)] assert assoc.get_handlers(evt.EVT_RELEASED) == [] child = scp.active_associations[0] assert child.get_handlers(evt.EVT_RELEASED) == [(handle, None)] assoc.release() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 1 event = triggered[0] assert isinstance(event, Event) assert isinstance(event.assoc, Association) assert isinstance(event.timestamp, datetime) assert event.event.name == 'EVT_RELEASED' scp.shutdown() def test_release_unbind(self): """Test starting with handler bound to EVT_ABORTED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_RELEASED, handle)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 scp.unbind(evt.EVT_RELEASED, handle) assert scp.get_handlers(evt.EVT_RELEASED) == [] assert assoc.get_handlers(evt.EVT_RELEASED) == [] child = scp.active_associations[0] assert child.get_handlers(evt.EVT_RELEASED) == [] assoc.release() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 0 scp.shutdown() def test_release_local(self): """Test the handler bound to EVT_RELEASED with local requested abort.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_RELEASED, handle)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 scp.active_associations[0].release() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 1 event = triggered[0] assert isinstance(event, Event) assert isinstance(event.assoc, Association) assert isinstance(event.timestamp, datetime) assert event.event.name == 'EVT_RELEASED' scp.shutdown() def test_release_raises(self, caplog): """Test the handler for EVT_RELEASED raising exception.""" def handle(event): raise NotImplementedError("Exception description") self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_RELEASED, handle)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) with caplog.at_level(logging.ERROR, logger='pynetdicom'): assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.release() while scp.active_associations: time.sleep(0.05) scp.shutdown() msg = ( "Exception raised in user's 'evt.EVT_RELEASED' event handler" " 'handle'" ) assert msg in caplog.text assert "Exception description" in caplog.text def test_established(self): """Test starting with handler bound to EVT_ESTABLISHED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_ESTABLISHED, handle)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) assert scp.get_handlers(evt.EVT_ABORTED) == [] assert scp.get_handlers(evt.EVT_ACCEPTED) == [] assert scp.get_handlers(evt.EVT_ESTABLISHED) == [(handle, None)] assert scp.get_handlers(evt.EVT_REJECTED) == [] assert scp.get_handlers(evt.EVT_RELEASED) == [] assert scp.get_handlers(evt.EVT_REQUESTED) == [] assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 assert scp.get_handlers(evt.EVT_ABORTED) == [] assert scp.get_handlers(evt.EVT_ACCEPTED) == [] assert scp.get_handlers(evt.EVT_ESTABLISHED) == [(handle, None)] assert scp.get_handlers(evt.EVT_REJECTED) == [] assert scp.get_handlers(evt.EVT_RELEASED) == [] assert scp.get_handlers(evt.EVT_REQUESTED) == [] assert assoc.get_handlers(evt.EVT_ABORTED) == [] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] child = scp.active_associations[0] assert child.get_handlers(evt.EVT_ABORTED) == [] assert child.get_handlers(evt.EVT_ACCEPTED) == [] assert child.get_handlers(evt.EVT_ESTABLISHED) == [(handle, None)] assert child.get_handlers(evt.EVT_REJECTED) == [] assert child.get_handlers(evt.EVT_RELEASED) == [] assert child.get_handlers(evt.EVT_REQUESTED) == [] assoc.release() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 1 event = triggered[0] assert isinstance(event, Event) assert isinstance(event.assoc, Association) assert isinstance(event.timestamp, datetime) assert event.event.name == 'EVT_ESTABLISHED' scp.shutdown() def test_established_bind(self): """Test binding a handler to EVT_ESTABLISHED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_ESTABLISHED, handle)] scp = ae.start_server(('', 11112), block=False) assert scp.get_handlers(evt.EVT_ESTABLISHED) == [] scp.bind(evt.EVT_ESTABLISHED, handle) assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 assert scp.get_handlers(evt.EVT_ESTABLISHED) == [(handle, None)] assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [] child = scp.active_associations[0] assert child.get_handlers(evt.EVT_ESTABLISHED) == [(handle, None)] assoc.release() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 1 event = triggered[0] assert isinstance(event, Event) assert isinstance(event.assoc, Association) assert isinstance(event.timestamp, datetime) assert event.event.name == 'EVT_ESTABLISHED' scp.shutdown() def test_established_unbind(self): """Test starting with handler bound to EVT_ABORTED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_ESTABLISHED, handle)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) scp.unbind(evt.EVT_ESTABLISHED, handle) assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 assert scp.get_handlers(evt.EVT_ESTABLISHED) == [] assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [] child = scp.active_associations[0] assert child.get_handlers(evt.EVT_ESTABLISHED) == [] assoc.release() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 0 scp.shutdown() def test_established_raises(self, caplog): """Test the handler for EVT_ESTABLISHED raising exception.""" def handle(event): raise NotImplementedError("Exception description") self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_ESTABLISHED, handle)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) with caplog.at_level(logging.ERROR, logger='pynetdicom'): assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.release() while scp.active_associations: time.sleep(0.05) scp.shutdown() msg = ( "Exception raised in user's 'evt.EVT_ESTABLISHED' event handler" " 'handle'" ) assert msg in caplog.text assert "Exception description" in caplog.text def test_requested(self): """Test starting with handler bound to EVT_REQUESTED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_REQUESTED, handle)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) assert scp.get_handlers(evt.EVT_ABORTED) == [] assert scp.get_handlers(evt.EVT_ACCEPTED) == [] assert scp.get_handlers(evt.EVT_ESTABLISHED) == [] assert scp.get_handlers(evt.EVT_REJECTED) == [] assert scp.get_handlers(evt.EVT_RELEASED) == [] assert scp.get_handlers(evt.EVT_REQUESTED) == [(handle, None)] assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 assert scp.get_handlers(evt.EVT_ABORTED) == [] assert scp.get_handlers(evt.EVT_ACCEPTED) == [] assert scp.get_handlers(evt.EVT_ESTABLISHED) == [] assert scp.get_handlers(evt.EVT_REJECTED) == [] assert scp.get_handlers(evt.EVT_RELEASED) == [] assert scp.get_handlers(evt.EVT_REQUESTED) == [(handle, None)] assert assoc.get_handlers(evt.EVT_ABORTED) == [] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] child = scp.active_associations[0] assert child.get_handlers(evt.EVT_ABORTED) == [] assert child.get_handlers(evt.EVT_ACCEPTED) == [] assert child.get_handlers(evt.EVT_ESTABLISHED) == [] assert child.get_handlers(evt.EVT_REJECTED) == [] assert child.get_handlers(evt.EVT_RELEASED) == [] assert child.get_handlers(evt.EVT_REQUESTED) == [(handle, None)] assoc.release() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 1 event = triggered[0] assert isinstance(event, Event) assert isinstance(event.assoc, Association) assert isinstance(event.timestamp, datetime) assert event.event.name == 'EVT_REQUESTED' scp.shutdown() def test_requested_bind(self): """Test binding a handler to EVT_REQUESTED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_REQUESTED, handle)] scp = ae.start_server(('', 11112), block=False) assert scp.get_handlers(evt.EVT_REQUESTED) == [] scp.bind(evt.EVT_REQUESTED, handle) assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 assert scp.get_handlers(evt.EVT_REQUESTED) == [(handle, None)] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] child = scp.active_associations[0] assert child.get_handlers(evt.EVT_REQUESTED) == [(handle, None)] assoc.release() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 1 event = triggered[0] assert isinstance(event, Event) assert isinstance(event.assoc, Association) assert isinstance(event.timestamp, datetime) assert event.event.name == 'EVT_REQUESTED' scp.shutdown() def test_requested_unbind(self): """Test starting with handler bound to EVT_ABORTED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_REQUESTED, handle)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) scp.unbind(evt.EVT_REQUESTED, handle) assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 assert scp.get_handlers(evt.EVT_REQUESTED) == [] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] child = scp.active_associations[0] assert child.get_handlers(evt.EVT_REQUESTED) == [] assoc.release() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 0 scp.shutdown() def test_requested_raises(self, caplog): """Test the handler for EVT_REQUESTED raising exception.""" def handle(event): raise NotImplementedError("Exception description") self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_REQUESTED, handle)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) with caplog.at_level(logging.ERROR, logger='pynetdicom'): assoc = ae.associate('localhost', 11112) assert assoc.is_established assoc.release() while scp.active_associations: time.sleep(0.05) scp.shutdown() msg = ( "Exception raised in user's 'evt.EVT_REQUESTED' event handler" " 'handle'" ) assert msg in caplog.text assert "Exception description" in caplog.text def test_rejected(self): """Test starting with handler bound to EVT_REJECTED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.require_called_aet = True ae.add_supported_context(CTImageStorage) ae.add_requested_context(Verification) handlers = [(evt.EVT_REJECTED, handle)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) assert scp.get_handlers(evt.EVT_ABORTED) == [] assert scp.get_handlers(evt.EVT_ACCEPTED) == [] assert scp.get_handlers(evt.EVT_ESTABLISHED) == [] assert scp.get_handlers(evt.EVT_REJECTED) == [(handle, None)] assert scp.get_handlers(evt.EVT_RELEASED) == [] assert scp.get_handlers(evt.EVT_REQUESTED) == [] assoc = ae.associate('localhost', 11112) assert assoc.is_rejected assert scp.get_handlers(evt.EVT_ABORTED) == [] assert scp.get_handlers(evt.EVT_ACCEPTED) == [] assert scp.get_handlers(evt.EVT_ESTABLISHED) == [] assert scp.get_handlers(evt.EVT_REJECTED) == [(handle, None)] assert scp.get_handlers(evt.EVT_RELEASED) == [] assert scp.get_handlers(evt.EVT_REQUESTED) == [] assert assoc.get_handlers(evt.EVT_ABORTED) == [] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] assert len(triggered) == 1 event = triggered[0] assert isinstance(event, Event) assert isinstance(event.assoc, Association) assert isinstance(event.timestamp, datetime) assert event.event.name == 'EVT_REJECTED' scp.shutdown() def test_rejected_bind(self): """Test binding a handler to EVT_REJECTED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.require_called_aet = True ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_REJECTED, handle)] scp = ae.start_server(('', 11112), block=False) assert scp.get_handlers(evt.EVT_REJECTED) == [] scp.bind(evt.EVT_REJECTED, handle) assoc = ae.associate('localhost', 11112) assert assoc.is_rejected assert scp.get_handlers(evt.EVT_REJECTED) == [(handle, None)] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assert len(triggered) == 1 event = triggered[0] assert isinstance(event, Event) assert isinstance(event.assoc, Association) assert isinstance(event.timestamp, datetime) assert event.event.name == 'EVT_REJECTED' scp.shutdown() def test_rejected_unbind(self): """Test starting with handler bound to EVT_ABORTED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.require_called_aet = True ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_REJECTED, handle)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) scp.unbind(evt.EVT_REJECTED, handle) assoc = ae.associate('localhost', 11112) assert assoc.is_rejected assert scp.get_handlers(evt.EVT_REJECTED) == [] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assoc.release() assert len(triggered) == 0 scp.shutdown() def test_rejected_raises(self, caplog): """Test the handler for EVT_REJECTED raising exception.""" def handle(event): raise NotImplementedError("Exception description") self.ae = ae = AE() ae.require_called_aet = True ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_REJECTED, handle)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) with caplog.at_level(logging.ERROR, logger='pynetdicom'): assoc = ae.associate('localhost', 11112) assert assoc.is_rejected scp.shutdown() msg = ( "Exception raised in user's 'evt.EVT_REJECTED' event handler" " 'handle'" ) assert msg in caplog.text assert "Exception description" in caplog.text def test_optional_args(self): """Test passing optional arguments to the handler.""" arguments = [] def handle(event, *args): arguments.append(args) args = ['a', 1, {'test': 1}] self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_ACCEPTED, handle, args)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 assert scp.get_handlers(evt.EVT_ACCEPTED) == [(handle, args)] assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [] child = scp.active_associations[0] assert child.get_handlers(evt.EVT_ACCEPTED) == [(handle, args)] assoc.abort() while scp.active_associations: time.sleep(0.05) scp.shutdown() assert len(arguments) == 1 assert args == list(arguments[0]) def test_optional_args_intervention(self): """Test passing optional arguments to the handler.""" arguments = [] def handle_echo(event, *args): arguments.append(args) return 0x0000 args = ['a', 1, {'test': 1}] self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_C_ECHO, handle_echo, args)] scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers) assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 assert scp.get_handlers(evt.EVT_C_ECHO) == (handle_echo, args) child = scp.active_associations[0] assert child.get_handlers(evt.EVT_C_ECHO) == (handle_echo, args) status = assoc.send_c_echo() assert status.Status == 0x0000 assoc.abort() while scp.active_associations: time.sleep(0.05) scp.shutdown() assert len(arguments) == 1 assert args == list(arguments[0]) class TestEventHandlingRequestor: """Test the transport events and handling as acceptor.""" def setup(self): self.ae = None _config.LOG_HANDLER_LEVEL = 'none' def teardown(self): if self.ae: self.ae.shutdown() _config.LOG_HANDLER_LEVEL = 'standard' def test_no_handlers(self): """Test with no association event handlers bound.""" self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) scp = ae.start_server(('', 11112), block=False) assert scp.get_handlers(evt.EVT_ABORTED) == [] assert scp.get_handlers(evt.EVT_ACCEPTED) == [] assert scp.get_handlers(evt.EVT_ESTABLISHED) == [] assert scp.get_handlers(evt.EVT_REJECTED) == [] assert scp.get_handlers(evt.EVT_RELEASED) == [] assert scp.get_handlers(evt.EVT_REQUESTED) == [] assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 assert scp.get_handlers(evt.EVT_ABORTED) == [] assert scp.get_handlers(evt.EVT_ACCEPTED) == [] assert scp.get_handlers(evt.EVT_ESTABLISHED) == [] assert scp.get_handlers(evt.EVT_REJECTED) == [] assert scp.get_handlers(evt.EVT_RELEASED) == [] assert scp.get_handlers(evt.EVT_REQUESTED) == [] assert assoc.get_handlers(evt.EVT_ABORTED) == [] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assert assoc.get_handlers(evt.EVT_RELEASED) == [] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] child = scp.active_associations[0] assert child.get_handlers(evt.EVT_ABORTED) == [] assert child.get_handlers(evt.EVT_ACCEPTED) == [] assert child.get_handlers(evt.EVT_ESTABLISHED) == [] assert child.get_handlers(evt.EVT_REJECTED) == [] assert child.get_handlers(evt.EVT_RELEASED) == [] assert child.get_handlers(evt.EVT_REQUESTED) == [] assoc.release() scp.shutdown() def test_unbind_not_event(self): """Test unbind a handler if no events bound.""" def dummy(event): pass self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) scp = ae.start_server(('', 11112), block=False) assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 assert assoc.get_handlers(evt.EVT_DIMSE_SENT) == [] assoc.unbind(evt.EVT_DIMSE_SENT, dummy) assert assoc.get_handlers(evt.EVT_DIMSE_SENT) == [] assoc.release() scp.shutdown() def test_unbind_notification_none(self): """Test unbinding a handler thats not bound.""" def dummy(event): pass def dummy2(event): pass self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) scp = ae.start_server(('', 11112), block=False) assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 assoc.bind(evt.EVT_DIMSE_SENT, dummy) assert assoc.get_handlers(evt.EVT_DIMSE_SENT) == [(dummy, None)] assoc.unbind(evt.EVT_DIMSE_SENT, dummy2) assert assoc.get_handlers(evt.EVT_DIMSE_SENT) == [(dummy, None)] assoc.release() scp.shutdown() def test_unbind_intervention(self): """Test unbinding a user intervention handler.""" def dummy(event): pass self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) scp = ae.start_server(('', 11112), block=False) assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 assoc.bind(evt.EVT_C_ECHO, dummy) assert assoc.get_handlers(evt.EVT_C_ECHO) == (dummy, None) assoc.unbind(evt.EVT_C_ECHO, dummy) assert assoc.get_handlers(evt.EVT_C_ECHO) != (dummy, None) assert assoc.get_handlers(evt.EVT_C_ECHO) == ( evt._c_echo_handler, None ) assoc.release() scp.shutdown() def test_abort(self): """Test starting with handler bound to EVT_ABORTED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_ABORTED, handle)] scp = ae.start_server(('', 11112), block=False) assoc = ae.associate('localhost', 11112, evt_handlers=handlers) assert assoc.is_established assert len(scp.active_associations) == 1 assert assoc.get_handlers(evt.EVT_ABORTED) == [(handle, None)] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assert assoc.get_handlers(evt.EVT_RELEASED) == [] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] assoc.abort() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 1 event = triggered[0] assert isinstance(event, Event) assert isinstance(event.assoc, Association) assert isinstance(event.timestamp, datetime) assert event.event.name == 'EVT_ABORTED' scp.shutdown() def test_abort_bind(self): """Test binding a handler to EVT_ABORTED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_ABORTED, handle)] scp = ae.start_server(('', 11112), block=False) assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 assert assoc.get_handlers(evt.EVT_ABORTED) == [] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assert assoc.get_handlers(evt.EVT_RELEASED) == [] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] assoc.bind(evt.EVT_ABORTED, handle) assert assoc.get_handlers(evt.EVT_ABORTED) == [(handle, None)] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assert assoc.get_handlers(evt.EVT_RELEASED) == [] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] assoc.abort() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 1 event = triggered[0] assert isinstance(event, Event) assert isinstance(event.assoc, Association) assert isinstance(event.timestamp, datetime) assert event.event.name == 'EVT_ABORTED' scp.shutdown() def test_abort_unbind(self): """Test starting with handler bound to EVT_ABORTED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_ABORTED, handle)] scp = ae.start_server(('', 11112), block=False) assoc = ae.associate('localhost', 11112, evt_handlers=handlers) assert assoc.is_established assert len(scp.active_associations) == 1 assert assoc.get_handlers(evt.EVT_ABORTED) == [(handle, None)] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assert assoc.get_handlers(evt.EVT_RELEASED) == [] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] assoc.unbind(evt.EVT_ABORTED, handle) assert assoc.get_handlers(evt.EVT_ABORTED) == [] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assert assoc.get_handlers(evt.EVT_RELEASED) == [] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] assoc.abort() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 0 scp.shutdown() def test_abort_remote(self): """Test the handler bound to EVT_ABORTED with local requested abort.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_ABORTED, handle)] scp = ae.start_server(('', 11112), block=False) assoc = ae.associate('localhost', 11112, evt_handlers=handlers) assert assoc.is_established assert len(scp.active_associations) == 1 scp.active_associations[0].abort() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 1 event = triggered[0] assert isinstance(event, Event) assert isinstance(event.assoc, Association) assert isinstance(event.timestamp, datetime) assert event.event.name == 'EVT_ABORTED' scp.shutdown() def test_abort_raises(self, caplog): """Test the handler for EVT_ACCEPTED raising exception.""" def handle(event): raise NotImplementedError("Exception description") self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_ABORTED, handle)] scp = ae.start_server(('', 11112), block=False) with caplog.at_level(logging.ERROR, logger='pynetdicom'): assoc = ae.associate('localhost', 11112, evt_handlers=handlers) assert assoc.is_established assoc.abort() while scp.active_associations: time.sleep(0.05) scp.shutdown() msg = ( "Exception raised in user's 'evt.EVT_ABORTED' event handler" " 'handle'" ) assert msg in caplog.text assert "Exception description" in caplog.text def test_accept(self): """Test starting with handler bound to EVT_ACCEPTED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_ACCEPTED, handle)] scp = ae.start_server(('', 11112), block=False) assoc = ae.associate('localhost', 11112, evt_handlers=handlers) assert assoc.is_established assert len(scp.active_associations) == 1 assert assoc.get_handlers(evt.EVT_ABORTED) == [] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [(handle, None)] assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assert assoc.get_handlers(evt.EVT_RELEASED) == [] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] assoc.abort() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 1 event = triggered[0] assert isinstance(event, Event) assert isinstance(event.assoc, Association) assert isinstance(event.timestamp, datetime) assert event.event.name == 'EVT_ACCEPTED' scp.shutdown() def test_accept_raises(self, caplog): """Test the handler for EVT_ACCEPTED raising exception.""" def handle(event): raise NotImplementedError("Exception description") self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_ACCEPTED, handle)] scp = ae.start_server(('', 11112), block=False) with caplog.at_level(logging.ERROR, logger='pynetdicom'): assoc = ae.associate('localhost', 11112, evt_handlers=handlers) assert assoc.is_established assoc.abort() while scp.active_associations: time.sleep(0.05) scp.shutdown() msg = ( "Exception raised in user's 'evt.EVT_ACCEPTED' event handler" " 'handle'" ) assert msg in caplog.text assert "Exception description" in caplog.text def test_release(self): """Test starting with handler bound to EVT_RELEASED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_RELEASED, handle)] scp = ae.start_server(('', 11112), block=False) assoc = ae.associate('localhost', 11112, evt_handlers=handlers) assert assoc.is_established assert len(scp.active_associations) == 1 assert assoc.get_handlers(evt.EVT_ABORTED) == [] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assert assoc.get_handlers(evt.EVT_RELEASED) == [(handle, None)] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] assoc.release() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 1 event = triggered[0] assert isinstance(event, Event) assert isinstance(event.assoc, Association) assert isinstance(event.timestamp, datetime) assert event.event.name == 'EVT_RELEASED' scp.shutdown() def test_release_bind(self): """Test binding a handler to EVT_RELEASED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_RELEASED, handle)] scp = ae.start_server(('', 11112), block=False) assoc = ae.associate('localhost', 11112) assert assoc.is_established assert len(scp.active_associations) == 1 assert assoc.get_handlers(evt.EVT_RELEASED) == [] assoc.bind(evt.EVT_RELEASED, handle) assert assoc.get_handlers(evt.EVT_RELEASED) == [(handle, None)] assoc.release() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 1 event = triggered[0] assert isinstance(event, Event) assert isinstance(event.assoc, Association) assert isinstance(event.timestamp, datetime) assert event.event.name == 'EVT_RELEASED' scp.shutdown() def test_release_unbind(self): """Test starting with handler bound to EVT_ABORTED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_RELEASED, handle)] scp = ae.start_server(('', 11112), block=False) assoc = ae.associate('localhost', 11112, evt_handlers=handlers) assert assoc.is_established assert len(scp.active_associations) == 1 assert assoc.get_handlers(evt.EVT_RELEASED) == [(handle, None)] assoc.unbind(evt.EVT_RELEASED, handle) assert assoc.get_handlers(evt.EVT_RELEASED) == [] assoc.release() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 0 scp.shutdown() def test_release_remote(self): """Test the handler bound to EVT_RELEASED with local requested abort.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_RELEASED, handle)] scp = ae.start_server(('', 11112), block=False) assoc = ae.associate('localhost', 11112, evt_handlers=handlers) assert assoc.is_established assert len(scp.active_associations) == 1 scp.active_associations[0].release() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 1 event = triggered[0] assert isinstance(event, Event) assert isinstance(event.assoc, Association) assert isinstance(event.timestamp, datetime) assert event.event.name == 'EVT_RELEASED' scp.shutdown() def test_release_raises(self, caplog): """Test the handler for EVT_RELEASED raising exception.""" def handle(event): raise NotImplementedError("Exception description") self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_RELEASED, handle)] scp = ae.start_server(('', 11112), block=False) with caplog.at_level(logging.ERROR, logger='pynetdicom'): assoc = ae.associate('localhost', 11112, evt_handlers=handlers) assert assoc.is_established assoc.release() while scp.active_associations: time.sleep(0.05) scp.shutdown() msg = ( "Exception raised in user's 'evt.EVT_RELEASED' event handler" " 'handle'" ) assert msg in caplog.text assert "Exception description" in caplog.text def test_established(self): """Test starting with handler bound to EVT_ESTABLISHED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_ESTABLISHED, handle)] scp = ae.start_server(('', 11112), block=False) assoc = ae.associate('localhost', 11112, evt_handlers=handlers) assert assoc.is_established assert len(scp.active_associations) == 1 assert assoc.get_handlers(evt.EVT_ABORTED) == [] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [(handle, None)] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assert assoc.get_handlers(evt.EVT_RELEASED) == [] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] assoc.release() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 1 event = triggered[0] assert isinstance(event, Event) assert isinstance(event.assoc, Association) assert isinstance(event.timestamp, datetime) assert event.event.name == 'EVT_ESTABLISHED' scp.shutdown() def test_established_raises(self, caplog): """Test the handler for EVT_ESTABLISHED raising exception.""" def handle(event): raise NotImplementedError("Exception description") self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_ESTABLISHED, handle)] scp = ae.start_server(('', 11112), block=False) with caplog.at_level(logging.ERROR, logger='pynetdicom'): assoc = ae.associate('localhost', 11112, evt_handlers=handlers) assert assoc.is_established assoc.release() while scp.active_associations: time.sleep(0.05) scp.shutdown() msg = ( "Exception raised in user's 'evt.EVT_ESTABLISHED' event handler" " 'handle'" ) assert msg in caplog.text assert "Exception description" in caplog.text def test_requested(self): """Test starting with handler bound to EVT_REQUESTED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_REQUESTED, handle)] scp = ae.start_server(('', 11112), block=False) assoc = ae.associate('localhost', 11112, evt_handlers=handlers) assert assoc.is_established assert len(scp.active_associations) == 1 assert assoc.get_handlers(evt.EVT_ABORTED) == [] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [] assert assoc.get_handlers(evt.EVT_REJECTED) == [] assert assoc.get_handlers(evt.EVT_RELEASED) == [] assert assoc.get_handlers(evt.EVT_REQUESTED) == [(handle, None)] assoc.release() while scp.active_associations: time.sleep(0.05) assert len(triggered) == 1 event = triggered[0] assert isinstance(event, Event) assert isinstance(event.assoc, Association) assert isinstance(event.timestamp, datetime) assert event.event.name == 'EVT_REQUESTED' scp.shutdown() def test_requested_raises(self, caplog): """Test the handler for EVT_REQUESTED raising exception.""" def handle(event): raise NotImplementedError("Exception description") self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_REQUESTED, handle)] scp = ae.start_server(('', 11112), block=False) with caplog.at_level(logging.ERROR, logger='pynetdicom'): assoc = ae.associate('localhost', 11112, evt_handlers=handlers) assert assoc.is_established assoc.release() while scp.active_associations: time.sleep(0.05) scp.shutdown() msg = ( "Exception raised in user's 'evt.EVT_REQUESTED' event handler" " 'handle'" ) assert msg in caplog.text assert "Exception description" in caplog.text def test_rejected(self): """Test starting with handler bound to EVT_REJECTED.""" triggered = [] def handle(event): triggered.append(event) self.ae = ae = AE() ae.require_called_aet = True ae.add_supported_context(CTImageStorage) ae.add_requested_context(Verification) handlers = [(evt.EVT_REJECTED, handle)] scp = ae.start_server(('', 11112), block=False) assoc = ae.associate('localhost', 11112, evt_handlers=handlers) assert assoc.is_rejected assert assoc.get_handlers(evt.EVT_ABORTED) == [] assert assoc.get_handlers(evt.EVT_ACCEPTED) == [] assert assoc.get_handlers(evt.EVT_ESTABLISHED) == [] assert assoc.get_handlers(evt.EVT_REJECTED) == [(handle, None)] assert assoc.get_handlers(evt.EVT_RELEASED) == [] assert assoc.get_handlers(evt.EVT_REQUESTED) == [] assert len(triggered) == 1 event = triggered[0] assert isinstance(event, Event) assert isinstance(event.assoc, Association) assert isinstance(event.timestamp, datetime) assert event.event.name == 'EVT_REJECTED' scp.shutdown() def test_rejected_raises(self, caplog): """Test the handler for EVT_REJECTED raising exception.""" def handle(event): raise NotImplementedError("Exception description") self.ae = ae = AE() ae.require_called_aet = True ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_REJECTED, handle)] scp = ae.start_server(('', 11112), block=False) with caplog.at_level(logging.ERROR, logger='pynetdicom'): assoc = ae.associate('localhost', 11112, evt_handlers=handlers) assert assoc.is_rejected scp.shutdown() msg = ( "Exception raised in user's 'evt.EVT_REJECTED' event handler" " 'handle'" ) assert msg in caplog.text assert "Exception description" in caplog.text def test_optional_args(self): """Test passing optional arguments to the handler.""" arguments = [] def handle(event, *args): arguments.append(args) args = ['a', 1, {'test': 1}] self.ae = ae = AE() ae.add_supported_context(Verification) ae.add_requested_context(Verification) handlers = [(evt.EVT_ACCEPTED, handle, args)] scp = ae.start_server(('', 11112), block=False) assoc = ae.associate('localhost', 11112, evt_handlers=handlers) assert assoc.is_established assert len(scp.active_associations) == 1 assert assoc.get_handlers(evt.EVT_ACCEPTED) == [(handle, args)] assoc.abort() while scp.active_associations: time.sleep(0.05) scp.shutdown() assert len(arguments) == 1 assert args == list(arguments[0]) @pytest.mark.skipif(not ON_WINDOWS, reason="Not running on Windows") class TestAssociationWindows: """Windows specific association tests.""" def setup(self): """This function runs prior to all test methods""" self.ae = None def teardown(self): """This function runs after all test methods""" if self.ae: self.ae.shutdown() import importlib importlib.reload(pynetdicom.utils) def get_timer_info(self): """Get the current timer resolution.""" dll = ctypes.WinDLL("NTDLL.DLL") minimum = ctypes.c_ulong() maximum = ctypes.c_ulong() current = ctypes.c_ulong() dll.NtQueryTimerResolution( ctypes.byref(maximum), ctypes.byref(minimum), ctypes.byref(current) ) return minimum.value, maximum.value, current.value @hide_modules(['ctypes']) def test_no_ctypes(self): """Test no exception raised if ctypes not available.""" # Reload pynetdicom package # Be aware doing this for important modules may cause issues import importlib importlib.reload(pynetdicom.utils) self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) ae.add_requested_context(Verification) scp = ae.start_server(('', 11112), block=False) assoc = ae.associate('localhost', 11112) assert assoc.send_c_echo().Status == 0x0000 assoc.release() assert assoc.is_released scp.shutdown() @pytest.mark.skipif(not HAVE_CTYPES, reason="No ctypes module") def test_set_timer_resolution(self): """Test setting the windows timer resolution works.""" min_val, max_val, pre_timer = self.get_timer_info() # Set the timer resolution to the minimum plus 10% pynetdicom._config.WINDOWS_TIMER_RESOLUTION = min_val * 1.10 / 10000 self.ae = ae = AE() ae.acse_timeout = 5 ae.dimse_timeout = 5 ae.network_timeout = 5 ae.add_supported_context(Verification) ae.add_requested_context(Verification) scp = ae.start_server(('', 11112), block=False) assoc = ae.associate('localhost', 11112) min_val, max_val, during_timer = self.get_timer_info() assert during_timer < pre_timer assoc.release() assert assoc.is_released scp.shutdown() min_val, max_val, post_timer = self.get_timer_info() assert post_timer > during_timer
{ "repo_name": "scaramallion/pynetdicom", "path": "pynetdicom/tests/test_assoc.py", "copies": "1", "size": "241524", "license": "mit", "hash": -5001927215942043000, "line_mean": 32.6900544009, "line_max": 99, "alpha_frac": 0.5934068664, "autogenerated": false, "ratio": 3.828184685612845, "config_test": true, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9919870956271195, "avg_score": 0.0003441191483299828, "num_lines": 7169 }
""" Associative and Commutative unification This module provides goals for associative and commutative unification. It accomplishes this through naively trying all possibilities. This was built to be used in the computer algebra systems SymPy and Theano. >>> from logpy import run, var, fact >>> from logpy.assoccomm import eq_assoccomm as eq >>> from logpy.assoccomm import commutative, associative >>> # Define some dummy Ops >>> add = 'add' >>> mul = 'mul' >>> # Declare that these ops are commutative using the facts system >>> fact(commutative, mul) >>> fact(commutative, add) >>> fact(associative, mul) >>> fact(associative, add) >>> # Define some wild variables >>> x, y = var('x'), var('y') >>> # Two expressions to match >>> pattern = (mul, (add, 1, x), y) # (1 + x) * y >>> expr = (mul, 2, (add, 3, 1)) # 2 * (3 + 1) >>> print(run(0, (x,y), eq(pattern, expr))) ((3, 2),) """ from logpy.core import (isvar, assoc, unify, conde, var, eq, fail, goaleval, lall, EarlyGoalError, condeseq, goaleval) from .goals import heado, permuteq, conso, tailo from .facts import Relation from logpy import core from .util import groupsizes, index from .util import transitive_get as walk from .term import term, arguments, operator associative = Relation('associative') commutative = Relation('commutative') def assocunify(u, v, s, eq=core.eq, n=None): """ Associative Unification See Also: eq_assoccomm """ uop, uargs = op_args(u) vop, vargs = op_args(v) if not uop and not vop: res = unify(u, v, s) if res is not False: return (res,) # TODO: iterate through all possibilities if uop and vop: s = unify(uop, vop, s) if s is False: raise StopIteration() op = walk(uop, s) sm, lg = (uargs, vargs) if len(uargs) <= len(vargs) else (vargs, uargs) ops = assocsized(op, lg, len(sm)) goal = condeseq([(eq, a, b) for a, b, in zip(sm, lg2)] for lg2 in ops) return goaleval(goal)(s) if uop: op, tail = uop, uargs b = v if vop: op, tail = vop, vargs b = u ns = [n] if n else range(2, len(tail)+1) knowns = (build(op, x) for n in ns for x in assocsized(op, tail, n)) goal = condeseq([(core.eq, b, k)] for k in knowns) return goaleval(goal)(s) def assocsized(op, tail, n): """ All associative combinations of x in n groups """ gsizess = groupsizes(len(tail), n) partitions = (groupsizes_to_partition(*gsizes) for gsizes in gsizess) return (makeops(op, partition(tail, part)) for part in partitions) def makeops(op, lists): """ Construct operations from an op and parition lists >>> from logpy.assoccomm import makeops >>> makeops('add', [(1, 2), (3, 4, 5)]) (('add', 1, 2), ('add', 3, 4, 5)) """ return tuple(l[0] if len(l) == 1 else build(op, l) for l in lists) def partition(tup, part): """ Partition a tuple >>> from logpy.assoccomm import partition >>> partition("abcde", [[0,1], [4,3,2]]) [('a', 'b'), ('e', 'd', 'c')] """ return [index(tup, ind) for ind in part] def groupsizes_to_partition(*gsizes): """ >>> from logpy.assoccomm import groupsizes_to_partition >>> groupsizes_to_partition(2, 3) [[0, 1], [2, 3, 4]] """ idx = 0 part = [] for gs in gsizes: l = [] for i in range(gs): l.append(idx) idx += 1 part.append(l) return part def eq_assoc(u, v, eq=core.eq, n=None): """ Goal for associative equality >>> from logpy import run, var, fact >>> from logpy.assoccomm import eq_assoc as eq >>> fact(commutative, 'add') # declare that 'add' is commutative >>> fact(associative, 'add') # declare that 'add' is associative >>> x = var() >>> run(0, x, eq(('add', 1, 2, 3), ('add', 1, x))) (('add', 2, 3),) """ uop, uargs = op_args(u) vop, vargs = op_args(v) if uop and vop: return conde([(core.eq, u, v)], [(eq, uop, vop), (associative, uop), lambda s: assocunify(u, v, s, eq, n)]) if uop or vop: if vop: uop, vop = vop, uop uargs, vargs = vargs, uargs v, u = u, v return conde([(core.eq, u, v)], [(associative, uop), lambda s: assocunify(u, v, s, eq, n)]) return (core.eq, u, v) def eq_comm(u, v, eq=None): """ Goal for commutative equality >>> from logpy import run, var, fact >>> from logpy.assoccomm import eq_comm as eq >>> from logpy.assoccomm import commutative, associative >>> fact(commutative, 'add') # declare that 'add' is commutative >>> fact(associative, 'add') # declare that 'add' is associative >>> x = var() >>> run(0, x, eq(('add', 1, 2, 3), ('add', 2, x, 1))) (3,) """ eq = eq or eq_comm op = var() utail = var() vtail = var() if isvar(u) and isvar(v): return (core.eq, u, v) raise EarlyGoalError() uop, uargs = op_args(u) vop, vargs = op_args(v) if not uop and not vop: return (core.eq, u, v) if vop and not uop: uop, uargs = vop, vargs v, u = u, v return (conde, ((core.eq, u, v),), ((commutative, uop), (buildo, uop, vtail, v), (permuteq, uargs, vtail, eq))) def build_tuple(op, args): try: return term(op, args) except TypeError: raise EarlyGoalError() def buildo(op, args, obj): """ obj is composed of op on args Example: in add(1,2,3) ``add`` is the op and (1,2,3) are the args Checks op_regsitry for functions to define op/arg relationships """ if not isvar(obj): oop, oargs = op_args(obj) return lall((eq, op, oop), (eq, args, oargs)) else: try: return eq(obj, build(op, args)) except TypeError: raise EarlyGoalError() raise EarlyGoalError() def build(op, args): try: return term(op, args) except NotImplementedError: raise EarlyGoalError() def op_args(x): """ Break apart x into an operation and tuple of args """ if isvar(x): return None, None try: return operator(x), arguments(x) except NotImplementedError: return None, None def eq_assoccomm(u, v): """ Associative/Commutative eq Works like logic.core.eq but supports associative/commutative expr trees tree-format: (op, *args) example: (add, 1, 2, 3) State that operations are associative or commutative with relations >>> from logpy.assoccomm import eq_assoccomm as eq >>> from logpy.assoccomm import commutative, associative >>> from logpy import fact, run, var >>> fact(commutative, 'add') # declare that 'add' is commutative >>> fact(associative, 'add') # declare that 'add' is associative >>> x = var() >>> e1 = ('add', 1, 2, 3) >>> e2 = ('add', 1, x) >>> run(0, x, eq(e1, e2)) (('add', 2, 3), ('add', 3, 2)) """ try: uop, uargs = op_args(u) vop, vargs = op_args(v) except ValueError: return (eq, u, v) if uop and not vop and not isvar(v): return fail if vop and not uop and not isvar(u): return fail if uop and vop and not uop == vop: return fail if uop and not (uop,) in associative.facts: return (eq, u, v) if vop and not (vop,) in associative.facts: return (eq, u, v) if uop and vop: u, v = (u, v) if len(uargs) >= len(vargs) else (v, u) n = min(map(len, (uargs, vargs))) # length of shorter tail else: n = None if vop and not uop: u, v = v, u w = var() return (lall, (eq_assoc, u, w, eq_assoccomm, n), (eq_comm, v, w, eq_assoccomm))
{ "repo_name": "cpcloud/logpy", "path": "logpy/assoccomm.py", "copies": "1", "size": "7954", "license": "bsd-3-clause", "hash": 6187931501800630000, "line_mean": 27.6115107914, "line_max": 79, "alpha_frac": 0.5609756098, "autogenerated": false, "ratio": 3.1538461538461537, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9206781004321535, "avg_score": 0.0016081518649238237, "num_lines": 278 }
"""assoc_rename_and_uuid_column Revision ID: 1b385158fd32 Revises: 40dfdcb2b07c Create Date: 2016-08-17 14:37:40.391822 """ # revision identifiers, used by Alembic. revision = '1b385158fd32' down_revision = '40dfdcb2b07c' from alembic import op import sqlalchemy as sa def upgrade(): op.rename_table('knowledge_post_author', 'assoc_post_author') op.rename_table('knowledge_post_tags', 'assoc_post_tag') with op.batch_alter_table('assoc_post_author') as batch_op: batch_op.add_column(sa.Column('order', sa.Integer(), nullable=True)) with op.batch_alter_table('posts') as batch_op: batch_op.add_column(sa.Column('uuid', sa.String(length=100), nullable=True)) batch_op.create_unique_constraint('uq_uuid', ['uuid']) with op.batch_alter_table('pageviews') as batch_op: batch_op.add_column(sa.Column('object_action', sa.String(length=100), nullable=True)) batch_op.add_column(sa.Column('version', sa.String(length=100), nullable=True)) def downgrade(): with op.batch_alter_table('assoc_post_author') as batch_op: batch_op.drop_column('order') op.rename_table('assoc_post_author', 'knowledge_post_author') op.rename_table('assoc_post_tag', 'knowledge_post_tags') with op.batch_alter_table('posts') as batch_op: batch_op.drop_constraint('uq_uuid', type_='unique') batch_op.drop_column('uuid') with op.batch_alter_table('pageviews') as batch_op: batch_op.drop_column('pageviews', 'object_action') batch_op.drop_column('pageviews', 'version')
{ "repo_name": "airbnb/knowledge-repo", "path": "knowledge_repo/app/migrations/versions/1b385158fd32_assoc_rename_and_uuid_column.py", "copies": "1", "size": "1567", "license": "apache-2.0", "hash": -7529395538986187000, "line_mean": 33.0652173913, "line_max": 93, "alpha_frac": 0.6828334397, "autogenerated": false, "ratio": 3.0846456692913384, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.42674791089913383, "avg_score": null, "num_lines": null }
"Assorted class utilities and tools" class AttrDisplay: def gatherAttrs(self): attrs = [] for key in sorted(self.__dict__): attrs.append("%s=%s" % (key, getattr(self, key))) return ', '.join(attrs) def __str__(self): return '[%s: %s]' % (self.__class__.__name__, self.gatherAttrs()) class TopTest(AttrDisplay): def gatherAttrs(self): return 'Spam' class Person(AttrDisplay): def __init__(self, name, job=None, pay=0): self.name = name self.job = job self.pay = pay def lastName(self): return self.name.split()[-1] def giveRaise(self, percent): self.pay = int(self.pay * (1 + percent)) def __str__(self): return '[Person: %s, %s]' % (self.name, self.pay) __repr__ = __str__ class Manager(Person): def __init__(self, name, pay=0): Person.__init__(self, name, 'mgr', pay) def giveRaise(self, percent, bonus=.10): Person.giveRaise(self, percent + bonus) # def __getattr__(self, item): # return getattr(self.person, item) def __str__(self): return str(self.person) class Department: def __init__(self, *args): self.members = list(args) def addMember(self, person): self.members.append(person) def giveRaises(self, percent): for person in self.members: person.giveRaise(percent) def showAll(self): for person in self.members: print(person)
{ "repo_name": "zhayangtao/HelloPython", "path": "python01/PythonExample.py", "copies": "1", "size": "1505", "license": "apache-2.0", "hash": -4680060669774696000, "line_mean": 22.8888888889, "line_max": 73, "alpha_frac": 0.5627906977, "autogenerated": false, "ratio": 3.591885441527446, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.9650499029787178, "avg_score": 0.000835421888053467, "num_lines": 63 }
"""Assorted commands. """ import os import threading import sublime import sublime_plugin from Vintageous.state import _init_vintageous from Vintageous.state import State from Vintageous.vi import settings from Vintageous.vi import cmd_defs from Vintageous.vi.dot_file import DotFile from Vintageous.vi.utils import modes from Vintageous.vi.utils import regions_transformer class _vi_slash_on_parser_done(sublime_plugin.WindowCommand): def run(self, key=None): state = State(self.window.active_view()) state.motion = cmd_defs.ViSearchForwardImpl() state.last_buffer_search = (state.motion._inp or state.last_buffer_search) class _vi_question_mark_on_parser_done(sublime_plugin.WindowCommand): def run(self, key=None): state = State(self.window.active_view()) state.motion = cmd_defs.ViSearchBackwardImpl() state.last_buffer_search = (state.motion._inp or state.last_buffer_search) # TODO: Test me. class VintageStateTracker(sublime_plugin.EventListener): def on_post_save(self, view): # Ensure the carets are within valid bounds. For instance, this is a # concern when `trim_trailing_white_space_on_save` is set to true. state = State(view) view.run_command('_vi_adjust_carets', {'mode': state.mode}) def on_query_context(self, view, key, operator, operand, match_all): vintage_state = State(view) return vintage_state.context.check(key, operator, operand, match_all) def on_close(self, view): settings.destroy(view) class ViMouseTracker(sublime_plugin.EventListener): def on_text_command(self, view, command, args): if command == 'drag_select': state = State(view) if state.mode in (modes.VISUAL, modes.VISUAL_LINE, modes.VISUAL_BLOCK): if (args.get('extend') or (args.get('by') == 'words') or args.get('additive')): return elif not args.get('extend'): return ('sequence', {'commands': [ ['drag_select', args], ['_enter_normal_mode', { 'mode': state.mode}] ]}) elif state.mode == modes.NORMAL: # TODO(guillermooo): Dragging the mouse does not seem to # fire a different event than simply clicking. This makes it # hard to update the xpos. if args.get('extend') or (args.get('by') == 'words'): return ('sequence', {'commands': [ ['drag_select', args], ['_enter_visual_mode', { 'mode': state.mode}] ]}) # TODO: Test me. class ViFocusRestorerEvent(sublime_plugin.EventListener): def __init__(self): self.timer = None def action(self): self.timer = None def on_activated(self, view): if self.timer: self.timer.cancel() # Switching to a different view; enter normal mode. _init_vintageous(view) else: # Switching back from another application. Ignore. pass def on_deactivated(self, view): self.timer = threading.Timer(0.25, self.action) self.timer.start() class _vi_adjust_carets(sublime_plugin.TextCommand): def run(self, edit, mode=None): def f(view, s): if mode in (modes.NORMAL, modes.INTERNAL_NORMAL): if ((view.substr(s.b) == '\n' or s.b == view.size()) and not view.line(s.b).empty()): return sublime.Region(s.b - 1) return s regions_transformer(self.view, f) class Sequence(sublime_plugin.TextCommand): """Required so that mark_undo_groups_for_gluing and friends work. """ def run(self, edit, commands): for cmd, args in commands: self.view.run_command(cmd, args) class ResetVintageous(sublime_plugin.WindowCommand): def run(self): v = self.window.active_view() v.settings().erase('vintage') _init_vintageous(v) DotFile.from_user().run() print("Package.Vintageous: State reset.") sublime.status_message("Vintageous: State reset") class ForceExitFromCommandMode(sublime_plugin.WindowCommand): """ A sort of a panic button. """ def run(self): v = self.window.active_view() v.settings().erase('vintage') # XXX: What happens exactly when the user presses Esc again now? Which # more are we in? v.settings().set('command_mode', False) v.settings().set('inverse_caret_state', False) print("Vintageous: Exiting from command mode.") sublime.status_message("Vintageous: Exiting from command mode.") class VintageousToggleCtrlKeys(sublime_plugin.WindowCommand): def run(self): prefs = sublime.load_settings('Preferences.sublime-settings') value = prefs.get('vintageous_use_ctrl_keys', False) prefs.set('vintageous_use_ctrl_keys', (not value)) sublime.save_settings('Preferences.sublime-settings') status = 'enabled' if (not value) else 'disabled' print("Package.Vintageous: Use of Ctrl- keys {0}.".format(status)) sublime.status_message("Vintageous: Use of Ctrl- keys {0}" .format(status)) class ReloadVintageousSettings(sublime_plugin.TextCommand): def run(self, edit): DotFile.from_user().run() class VintageousOpenConfigFile(sublime_plugin.WindowCommand): """Opens or creates $packages/User/.vintageousrc. """ def run(self): path = os.path.realpath(os.path.join(sublime.packages_path(), 'User/.vintageousrc')) if os.path.exists(path): self.window.open_file(path) else: with open(path, 'w'): pass self.window.open_file(path)
{ "repo_name": "vastcharade/Vintageous", "path": "xsupport.py", "copies": "9", "size": "6043", "license": "mit", "hash": -7732688025031808000, "line_mean": 31.1436170213, "line_max": 78, "alpha_frac": 0.5973854046, "autogenerated": false, "ratio": 3.739480198019802, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.8836865602619802, "avg_score": null, "num_lines": null }
''' Assorted exceptions. ''' class CreateError(Exception): ''' Exception thrown when unable to create a new database record. ''' def __init__(self, message="Failed to create new database record!", orig_exception=None): Exception.__init__(self, message, orig_exception) class NotReadyError(Exception): ''' Exception thrown when attempting to retrieve results that are not yet ready. ''' def __init__(self, message="Tried to access results before they were ready!", orig_exception=None): Exception.__init__(self, message, orig_exception) class ReadError(Exception): ''' Exception thrown when unable to retrieve a desired database record. ''' def __init__(self, message="Failed to retrieve database record!", orig_exception=None): Exception.__init__(self, message, orig_exception) class UpdateError(Exception): ''' Exception thrown when unable to update a database record. ''' def __init__(self, message="Failed to update database record!", orig_exception=None): Exception.__init__(self, message, orig_exception)
{ "repo_name": "ChaseSnapshot/smcity", "path": "smcity/misc/errors.py", "copies": "1", "size": "1095", "license": "unlicense", "hash": 4591989279140732000, "line_mean": 42.8, "line_max": 103, "alpha_frac": 0.6885844749, "autogenerated": false, "ratio": 4.600840336134453, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.05148405065013439, "num_lines": 25 }
# Assorted functions that are needed frequently. import my_cnf import mysql.connector import sys import unicodecsv as csv def connect_as_db_user(): """ Open a connection as the database user. """ db_option_group = "client%s" % my_cnf.database try: cnx = mysql.connector.connect(option_files="my.cnf", option_groups=db_option_group, database=my_cnf.database, raise_on_warnings=False, get_warnings=True) cursor = cnx.cursor() except mysql.connector.Error as err: print "Connect to MySQL server as db user failed:\n%s" % err sys.exit(1) return (cnx, cursor) def is_table_empty(cnx, cursor, table): """ Return True if the given table is empty. Use this to check if a table has been populated, to avoid re-running the query that populated it. """ cmd = "SELECT COUNT(*) FROM %s;" % table count = 0 try: cursor.execute(cmd) count = cursor.fetchone()[0] except mysql.connector.Error as err: print "Select count(*) failed:\n%s" % err sys.exit(1) return count == 0 def do_modify(cnx, cursor, cmd): """ Execute a command that alters the schema or data, and thus requires a commit. """ try: cursor.execute(cmd) exec_warnings = cursor.fetchwarnings() cnx.commit() commit_warnings = cursor.fetchwarnings() except mysql.connector.Error as err: print "Query failed:\n%s\n%s" % (cmd, err) cnx.close() sys.exit(1) # The warnings do not seem to be returned, even in cases when warnings # are known to occur. return (exec_warnings, commit_warnings) def do_select(cnx, cursor, cmd, csv_file=None, csv_headers=None): """ Execute a select query, i.e. something that returns results. Read the results. Optionally write them to a csv file. """ try: cursor.execute(cmd) # A commit is not allowed when the cursor has data. except mysql.connector.Error as err: print "Query failed:\n%s\n%s" % (cmd, err) cnx.close() sys.exit(1) rows = cursor.fetchall() if csv_file: # Write them out. with open(csv_file, "wb") as csv_handle: writer = csv.writer(csv_handle, delimiter=";") writer.writerow(csv_headers) writer.writerows(rows) return rows def do_cmd(cnx, cursor, cmd, commit=False): """ Execute a query that does not return results, and optionally do a commit. Queries that do not need a commit include state setting queries that take immediate effect, or a direct dump to a file. Queries that do require a commit include inserts. A commit is not allowed after a select query that returns results. """ try: cursor.execute(cmd) if commit: cnx.commit() except mysql.connector.Error as err: print "Query failed:\n%s\n%s" % (cmd, err) cnx.close() sys.exit(1) def populate_table(cnx, cursor, table, cmd, verbose=False): """ Execute a command that will populate a table, if that table is empty. This allows re-running a script that may have had an error part-way through, without worry that an insert into a table will be repeated. """ # First check if this table is populated. if not is_table_empty(table): if verbose: print "Table %s already contains data." % table return # Here, it's safe to add data. do_modify(cnx, cursor, cmd)
{ "repo_name": "ptressel/yelp_friend_analysis", "path": "sql_utilities.py", "copies": "1", "size": "3678", "license": "mit", "hash": 8510684639453304000, "line_mean": 29.6583333333, "line_max": 78, "alpha_frac": 0.6033170201, "autogenerated": false, "ratio": 3.916932907348243, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.002918875479359351, "num_lines": 120 }
"""Assorted functions to deal with specific WP tasks or objects that don't fit anywhere else""" import datetime import warnings from django.db import models from django.core.urlresolvers import reverse def wp_nav_menu(blog, menu_slug, root_id=None): """Return a tree of menu items for a given menu""" if not hasattr(blog.models, 'NavMenu'): raise ConfigurationError("Blog %s has no NavMenu model") if not hasattr(blog.models.NavMenu, 'navmenuitem_set'): raise ConfigurationError("Blog %s has no NavMenuItem related manager in the NavMenu model") # don't trap try: menu = blog.models.NavMenu.objects.select_related('term').get(term__slug=menu_slug) except ObjectDoesNotExist: raise ValueError("No such menu.") menu_items = list() menu_map = dict(root=list()) item_posts = set() item_taxonomies = set() qs = menu.navmenuitem_set.prefetch_postmeta().defer('content', 'content_filtered') for post in qs: menu_item = dict( id=post.id, label=post.title, title=post.title, menu_order=post.menu_order, object=None ) for meta in post.wp_prefetched_postmeta: name = str(meta.name.replace('_menu_item_', '')) value = meta.value if name in ('object_id', 'menu_item_parent'): value = None if not value else int(value) elif name == 'classes': continue else: value = None if not value else value if name == 'object': name = 'object_type' if name == 'menu_item_parent': name = 'parent' menu_item[name] = value if menu_item['type'] == 'post_type': item_posts.add((menu_item['object_type'], menu_item['object_id'])) elif menu_item['type'] == 'taxonomy': item_taxonomies.add((menu_item['object_type'], menu_item['object_id'])) menu_items.append(menu_item) if menu_item['parent']: menu_map.setdefault(menu_item['parent'], list()).append((post.menu_order, menu_item)) else: menu_map['root'].append((post.menu_order, menu_item)) if item_posts: item_posts = dict( (p.id, p) for p in blog.models.BasePost.objects.filter( id__in=[t[1] for t in item_posts] ).defer( 'content', 'content_filtered', '_excerpt' ) ) if item_taxonomies: item_taxonomies = dict( ((t.term_id, t.taxonomy), t) for t in blog.models.Taxonomy.objects.filter( term__id__in=[t[1] for t in item_taxonomies] ).select_related('term') ) for menu_item in menu_items: menu_item['children'] = [l[1] for l in sorted(menu_map.get(menu_item['id'], list()))] if menu_item['type'] == 'custom': continue if menu_item['type'] == 'post_type': obj = item_posts[menu_item['object_id']] if obj.post_type != menu_item['object_type']: warnings.warn( "Menu item %s has post type '%s', but post %s has object type '%s'", (menu_item['id'], menu_item['object_type'], obj.id, obj.post_type) ) menu_item['object'] = obj menu_item['title'] = obj.title menu_item['url'] = obj.get_absolute_url() elif menu_item['type'] == 'taxonomy': obj = item_taxonomies[(menu_item['object_id'], menu_item['object_type'])] menu_item['object'] = obj menu_item['title'] = obj.term.name menu_item['url'] = obj.get_absolute_url() if root_id: # slightly inefficient, but who cares... try: return menu, [l[1] for l in sorted(menu_map[int(root_id)])] except KeyError: raise ValueError("No item in menu '%s' with id of '%s'." % (menu_slug, root_id)) # type = post_type | taxonomy return menu, [l[1] for l in sorted(menu_map['root'])] def month_archives(blog, asc=False, orderby_count=False, num=None): ordering = ('-year', '-month') if not asc else ('year', 'month') if orderby_count: ordering = ('-id__count',) + ordering qs = blog.models.Post.objects.published().extra( select=dict(year='year(post_date)', month='month(post_date)') ).values_list('year', 'month').annotate(models.Count('id')).order_by(*ordering) if num is not None: qs = qs[:num] archives = [] for year, month, count in qs: archives.append(dict( year=year, month=month, count=count, dt=datetime.date(year=year, month=month, day=1), get_absolute_url=reverse('wpf_archive', kwargs=dict(year='%04d' % year, month='%02d' % month)) )) return archives def year_archives(blog, asc=False, orderby_count=False, num=None): ordering = ('-year',) if not asc else ('year',) if orderby_count: ordering = ('-id__count',) + ordering qs = blog.models.Post.objects.published().extra( select=dict(year='year(post_date)') ).values_list('year').annotate(models.Count('id')).order_by(*ordering) if num is not None: qs = qs[:num] archives = [] for year, count in qs: archives.append(dict( year=year, month=1, count=count, dt=datetime.date(year=year, month=1, day=1), get_absolute_url=reverse('wpf_archive', kwargs=dict(year='%04d' % year)) )) return archives
{ "repo_name": "ludoo/wpkit", "path": "attic/ngfrontman/wp_frontman/wp_helpers.py", "copies": "2", "size": "5568", "license": "bsd-3-clause", "hash": 2069767758599601400, "line_mean": 39.9411764706, "line_max": 106, "alpha_frac": 0.5666307471, "autogenerated": false, "ratio": 3.663157894736842, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 1, "avg_score": 0.0036412001739514386, "num_lines": 136 }
"""Assorted helper methods""" from collections.abc import Iterable import numpy as np def appendsolwarning(msg, data, result, category="uncategorized"): "Append a particular category of warnings to a solution." if "warnings" not in result: result["warnings"] = {} if category not in result["warnings"]: result["warnings"][category] = [] result["warnings"][category].append((msg, data)) @np.vectorize def isnan(element): "Determine if something of arbitrary type is a numpy nan." try: return np.isnan(element) except TypeError: return False def maybe_flatten(value): "Extract values from 0-d numpy arrays, if necessary" if hasattr(value, "shape") and not value.shape: return value.flatten()[0] # 0-d numpy arrays return value def try_str_without(item, excluded, *, latex=False): "Try to call item.str_without(excluded); fall back to str(item)" if latex and hasattr(item, "latex"): return item.latex(excluded) if hasattr(item, "str_without"): return item.str_without(excluded) return str(item) def mag(c): "Return magnitude of a Number or Quantity" return getattr(c, "magnitude", c) def is_sweepvar(sub): "Determines if a given substitution indicates a sweep." return splitsweep(sub)[0] def splitsweep(sub): "Splits a substitution into (is_sweepvar, sweepval)" try: sweep, value = sub if sweep is "sweep" and (isinstance(value, Iterable) or # pylint: disable=literal-comparison hasattr(value, "__call__")): return True, value except (TypeError, ValueError): pass return False, None
{ "repo_name": "hoburg/gpkit", "path": "gpkit/small_scripts.py", "copies": "1", "size": "1709", "license": "mit", "hash": -7182357385960519000, "line_mean": 27.9661016949, "line_max": 101, "alpha_frac": 0.6512580456, "autogenerated": false, "ratio": 3.9107551487414187, "config_test": false, "has_no_keywords": false, "few_assignments": false, "quality_score": 0.5062013194341419, "avg_score": null, "num_lines": null }